diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 56dae8189ad..cb713915504 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.7.0a1 +current_version = 1.9.0a1 parse = (?P[\d]+) # major version number \.(?P[\d]+) # minor version number \.(?P[\d]+) # patch version number @@ -35,13 +35,3 @@ first_value = 1 [bumpversion:file:core/setup.py] [bumpversion:file:core/dbt/version.py] - -[bumpversion:file:plugins/postgres/setup.py] - -[bumpversion:file:plugins/postgres/dbt/adapters/postgres/__version__.py] - -[bumpversion:file:docker/Dockerfile] - -[bumpversion:file:tests/adapter/setup.py] - -[bumpversion:file:tests/adapter/dbt/tests/adapter/__version__.py] diff --git a/.changes/0.0.0.md b/.changes/0.0.0.md index 8ba726dbba3..a5f89338a7b 100644 --- a/.changes/0.0.0.md +++ b/.changes/0.0.0.md @@ -3,6 +3,7 @@ For information on prior major and minor releases, see their changelogs: +* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md) * [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md) * [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md) * [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md) diff --git a/.changes/header.tpl.md b/.changes/header.tpl.md index e48cb3069f6..4d0055e6287 100755 --- a/.changes/header.tpl.md +++ b/.changes/header.tpl.md @@ -1,6 +1,6 @@ # dbt Core Changelog -- This file provides a full account of all changes to `dbt-core` and `dbt-postgres` +- This file provides a full account of all changes to `dbt-core` - Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases. - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry) diff --git a/.changes/unreleased/Breaking Changes-20231206-192442.yaml b/.changes/unreleased/Breaking Changes-20231206-192442.yaml new file mode 100644 index 00000000000..c6aad99bce5 --- /dev/null +++ b/.changes/unreleased/Breaking Changes-20231206-192442.yaml @@ -0,0 +1,6 @@ +kind: Breaking Changes +body: Fix changing the current working directory when using dpt deps, clean and init. +time: 2023-12-06T19:24:42.575372+09:00 +custom: + Author: rariyama + Issue: "8997" diff --git a/.changes/unreleased/Dependencies-20230621-005752.yaml b/.changes/unreleased/Dependencies-20230621-005752.yaml deleted file mode 100644 index badef6c30b4..00000000000 --- a/.changes/unreleased/Dependencies-20230621-005752.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Bump mypy from 1.3.0 to 1.4.0" -time: 2023-06-21T00:57:52.00000Z -custom: - Author: dependabot[bot] - PR: 7912 diff --git a/.changes/unreleased/Dependencies-20230726-201740.yaml b/.changes/unreleased/Dependencies-20230726-201740.yaml deleted file mode 100644 index d75ddf467d7..00000000000 --- a/.changes/unreleased/Dependencies-20230726-201740.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: "Dependencies" -body: "Bump mypy from 1.4.0 to 1.4.1" -time: 2023-07-26T20:17:40.00000Z -custom: - Author: dependabot[bot] - PR: 8219 diff --git a/.changes/unreleased/Dependencies-20230727-145703.yaml b/.changes/unreleased/Dependencies-20230727-145703.yaml deleted file mode 100644 index 6cc64bf5102..00000000000 --- a/.changes/unreleased/Dependencies-20230727-145703.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Dependencies -body: Update pin for click<9 -time: 2023-07-27T14:57:03.180458-05:00 -custom: - Author: emmyoop - PR: "8232" diff --git a/.changes/unreleased/Dependencies-20230727-145726.yaml b/.changes/unreleased/Dependencies-20230727-145726.yaml deleted file mode 100644 index febecde9db9..00000000000 --- a/.changes/unreleased/Dependencies-20230727-145726.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Dependencies -body: Add upper bound to sqlparse pin of <0.5 -time: 2023-07-27T14:57:26.40416-05:00 -custom: - Author: emmyoop - PR: "8236" diff --git a/.changes/unreleased/Dependencies-20230728-135227.yaml b/.changes/unreleased/Dependencies-20230728-135227.yaml deleted file mode 100644 index 3373d055c3e..00000000000 --- a/.changes/unreleased/Dependencies-20230728-135227.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Dependencies -body: Support dbt-semantic-interfaces 0.2.0 -time: 2023-07-28T13:52:27.207241-07:00 -custom: - Author: QMalcolm - PR: "8250" diff --git a/.changes/unreleased/Dependencies-20240509-093717.yaml b/.changes/unreleased/Dependencies-20240509-093717.yaml new file mode 100644 index 00000000000..82094a3e122 --- /dev/null +++ b/.changes/unreleased/Dependencies-20240509-093717.yaml @@ -0,0 +1,6 @@ +kind: Dependencies +body: Remove logbook dependency +time: 2024-05-09T09:37:17.745129-05:00 +custom: + Author: emmyoop + Issue: "8027" diff --git a/.changes/unreleased/Dependencies-20240820-131909.yaml b/.changes/unreleased/Dependencies-20240820-131909.yaml new file mode 100644 index 00000000000..c2392d917db --- /dev/null +++ b/.changes/unreleased/Dependencies-20240820-131909.yaml @@ -0,0 +1,7 @@ +kind: Dependencies +body: Increase supported version range for dbt-semantic-interfaces. Needed to support + custom calendar features. +time: 2024-08-20T13:19:09.015225-07:00 +custom: + Author: courtneyholcomb + Issue: "9265" diff --git a/.changes/unreleased/Docs-20230715-200907.yaml b/.changes/unreleased/Docs-20230715-200907.yaml deleted file mode 100644 index fe1792e18aa..00000000000 --- a/.changes/unreleased/Docs-20230715-200907.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Docs -body: Corrected spelling of "Partiton" -time: 2023-07-15T20:09:07.057361092+02:00 -custom: - Author: pgoslatara - Issue: "8100" diff --git a/.changes/unreleased/Docs-20230718-192422.yaml b/.changes/unreleased/Docs-20230718-192422.yaml deleted file mode 100644 index ab034ea5582..00000000000 --- a/.changes/unreleased/Docs-20230718-192422.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Docs -body: Remove static SQL codeblock for metrics -time: 2023-07-18T19:24:22.155323+02:00 -custom: - Author: marcodamore - Issue: "436" diff --git a/.changes/unreleased/Docs-20240311-140344.yaml b/.changes/unreleased/Docs-20240311-140344.yaml new file mode 100644 index 00000000000..6a78c95ef5a --- /dev/null +++ b/.changes/unreleased/Docs-20240311-140344.yaml @@ -0,0 +1,6 @@ +kind: Docs +body: Enable display of unit tests +time: 2024-03-11T14:03:44.490834-04:00 +custom: + Author: gshank + Issue: "501" diff --git a/.changes/unreleased/Docs-20240501-021050.yaml b/.changes/unreleased/Docs-20240501-021050.yaml new file mode 100644 index 00000000000..8799a69debf --- /dev/null +++ b/.changes/unreleased/Docs-20240501-021050.yaml @@ -0,0 +1,6 @@ +kind: Docs +body: Unit tests not rendering +time: 2024-05-01T02:10:50.987412+02:00 +custom: + Author: aranke + Issue: "506" diff --git a/.changes/unreleased/Docs-20240516-223036.yaml b/.changes/unreleased/Docs-20240516-223036.yaml new file mode 100644 index 00000000000..1eb76039621 --- /dev/null +++ b/.changes/unreleased/Docs-20240516-223036.yaml @@ -0,0 +1,6 @@ +kind: Docs +body: Add support for Saved Query node +time: 2024-05-16T22:30:36.206492-07:00 +custom: + Author: ChenyuLInx + Issue: "486" diff --git a/.changes/unreleased/Docs-20240613-151048.yaml b/.changes/unreleased/Docs-20240613-151048.yaml new file mode 100644 index 00000000000..f8554c0a995 --- /dev/null +++ b/.changes/unreleased/Docs-20240613-151048.yaml @@ -0,0 +1,6 @@ +kind: Docs +body: Fix npm security vulnerabilities as of June 2024 +time: 2024-06-13T15:10:48.301989+01:00 +custom: + Author: aranke + Issue: "513" diff --git a/.changes/unreleased/Features-20240506-175642.yaml b/.changes/unreleased/Features-20240506-175642.yaml new file mode 100644 index 00000000000..308be95b4d6 --- /dev/null +++ b/.changes/unreleased/Features-20240506-175642.yaml @@ -0,0 +1,6 @@ +kind: Features +body: serialize inferred primary key +time: 2024-05-06T17:56:42.757673-05:00 +custom: + Author: dave-connors-3 + Issue: "9824" diff --git a/.changes/unreleased/Features-20240507-162717.yaml b/.changes/unreleased/Features-20240507-162717.yaml new file mode 100644 index 00000000000..662902483ff --- /dev/null +++ b/.changes/unreleased/Features-20240507-162717.yaml @@ -0,0 +1,6 @@ +kind: Features +body: 'Add unit_test: selection method' +time: 2024-05-07T16:27:17.047585-04:00 +custom: + Author: michelleark + Issue: "10053" diff --git a/.changes/unreleased/Features-20240522-000309.yaml b/.changes/unreleased/Features-20240522-000309.yaml new file mode 100644 index 00000000000..d02d3be3170 --- /dev/null +++ b/.changes/unreleased/Features-20240522-000309.yaml @@ -0,0 +1,7 @@ +kind: Features +body: Maximally parallelize dbt clone + in clone command" +time: 2024-05-22T00:03:09.765977-04:00 +custom: + Author: michelleark + Issue: "7914" diff --git a/.changes/unreleased/Features-20240527-124405.yaml b/.changes/unreleased/Features-20240527-124405.yaml new file mode 100644 index 00000000000..5dd2850609d --- /dev/null +++ b/.changes/unreleased/Features-20240527-124405.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Add --host flag to dbt docs serve, defaulting to '127.0.0.1' +time: 2024-05-27T12:44:05.040843-04:00 +custom: + Author: michelleark + Issue: "10229" diff --git a/.changes/unreleased/Features-20240531-150816.yaml b/.changes/unreleased/Features-20240531-150816.yaml new file mode 100644 index 00000000000..ebe69c0c5e3 --- /dev/null +++ b/.changes/unreleased/Features-20240531-150816.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Update data_test to accept arbitrary config options +time: 2024-05-31T15:08:16.431966-05:00 +custom: + Author: McKnight-42 + Issue: "10197" diff --git a/.changes/unreleased/Features-20240606-112334.yaml b/.changes/unreleased/Features-20240606-112334.yaml new file mode 100644 index 00000000000..4a325d6811f --- /dev/null +++ b/.changes/unreleased/Features-20240606-112334.yaml @@ -0,0 +1,6 @@ +kind: Features +body: add pre_model and post_model hook calls to data and unit tests to be able to provide extra config options +time: 2024-06-06T11:23:34.758675-05:00 +custom: + Author: McKnight-42 + Issue: "10198" diff --git a/.changes/unreleased/Features-20240617-103948.yaml b/.changes/unreleased/Features-20240617-103948.yaml new file mode 100644 index 00000000000..a64867d569a --- /dev/null +++ b/.changes/unreleased/Features-20240617-103948.yaml @@ -0,0 +1,6 @@ +kind: Features +body: add --empty value to jinja context as flags.EMPTY +time: 2024-06-17T10:39:48.275801-04:00 +custom: + Author: michelleark + Issue: "10317" diff --git a/.changes/unreleased/Features-20240621-141635.yaml b/.changes/unreleased/Features-20240621-141635.yaml new file mode 100644 index 00000000000..4e839edb32d --- /dev/null +++ b/.changes/unreleased/Features-20240621-141635.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Warning message for snapshot timestamp data types +time: 2024-06-21T14:16:35.717637-04:00 +custom: + Author: gshank + Issue: "10234" diff --git a/.changes/unreleased/Features-20240625-095107.yaml b/.changes/unreleased/Features-20240625-095107.yaml new file mode 100644 index 00000000000..ce7c3d6c803 --- /dev/null +++ b/.changes/unreleased/Features-20240625-095107.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Support cumulative_type_params & sub-daily granularities in semantic manifest. +time: 2024-06-25T09:51:07.983248-07:00 +custom: + Author: courtneyholcomb + Issue: "10360" diff --git a/.changes/unreleased/Features-20240627-162953.yaml b/.changes/unreleased/Features-20240627-162953.yaml new file mode 100644 index 00000000000..8e385aa6186 --- /dev/null +++ b/.changes/unreleased/Features-20240627-162953.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Add time_granularity to metric spec. +time: 2024-06-27T16:29:53.500917-07:00 +custom: + Author: courtneyholcomb + Issue: "10376" diff --git a/.changes/unreleased/Features-20240712-214546.yaml b/.changes/unreleased/Features-20240712-214546.yaml new file mode 100644 index 00000000000..3c9c5b8dd26 --- /dev/null +++ b/.changes/unreleased/Features-20240712-214546.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Support standard schema/database fields for snapshots +time: 2024-07-12T21:45:46.06011-04:00 +custom: + Author: gshank + Issue: "10301" diff --git a/.changes/unreleased/Features-20240719-161841.yaml b/.changes/unreleased/Features-20240719-161841.yaml new file mode 100644 index 00000000000..a802faf6888 --- /dev/null +++ b/.changes/unreleased/Features-20240719-161841.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Support ref and source in foreign key constraint expressions, bump dbt-common minimum to 1.6 +time: 2024-07-19T16:18:41.434278-04:00 +custom: + Author: michelleark + Issue: "8062" diff --git a/.changes/unreleased/Features-20240722-202238.yaml b/.changes/unreleased/Features-20240722-202238.yaml new file mode 100644 index 00000000000..5fda200b919 --- /dev/null +++ b/.changes/unreleased/Features-20240722-202238.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Support new semantic layer time spine configs to enable sub-daily granularity. +time: 2024-07-22T20:22:38.258249-07:00 +custom: + Author: courtneyholcomb + Issue: "10475" diff --git a/.changes/unreleased/Features-20240829-135320.yaml b/.changes/unreleased/Features-20240829-135320.yaml new file mode 100644 index 00000000000..c7f5cf9d8b4 --- /dev/null +++ b/.changes/unreleased/Features-20240829-135320.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Add support for behavior flags +time: 2024-08-29T13:53:20.16122-04:00 +custom: + Author: mikealfare + Issue: "10618" diff --git a/.changes/unreleased/Features-20240903-132428.yaml b/.changes/unreleased/Features-20240903-132428.yaml new file mode 100644 index 00000000000..08df6958990 --- /dev/null +++ b/.changes/unreleased/Features-20240903-132428.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Enable `--resource-type` and `--exclude-resource-type` CLI flags and environment variables for `dbt test` +time: 2024-09-03T13:24:28.592837+01:00 +custom: + Author: TowardOliver dbeatty10 + Issue: "10656" diff --git a/.changes/unreleased/Features-20240903-154133.yaml b/.changes/unreleased/Features-20240903-154133.yaml new file mode 100644 index 00000000000..fe45b8d4d10 --- /dev/null +++ b/.changes/unreleased/Features-20240903-154133.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Allow configuring snapshot column names +time: 2024-09-03T15:41:33.167097-04:00 +custom: + Author: gshank + Issue: "10185" diff --git a/.changes/unreleased/Features-20240904-182320.yaml b/.changes/unreleased/Features-20240904-182320.yaml new file mode 100644 index 00000000000..7d216ec749a --- /dev/null +++ b/.changes/unreleased/Features-20240904-182320.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Add custom_granularities to YAML spec for time spines. +time: 2024-09-04T18:23:20.234952-07:00 +custom: + Author: courtneyholcomb + Issue: "9265" diff --git a/.changes/unreleased/Features-20240911-121029.yaml b/.changes/unreleased/Features-20240911-121029.yaml new file mode 100644 index 00000000000..365faf7fadd --- /dev/null +++ b/.changes/unreleased/Features-20240911-121029.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Add basic functionality for creating microbatch incremental models +time: 2024-09-11T12:10:29.822189-05:00 +custom: + Author: MichelleArk QMalcolm + Issue: 9490 10635 10637 10638 10636 10662 10639 diff --git a/.changes/unreleased/Features-20240913-232111.yaml b/.changes/unreleased/Features-20240913-232111.yaml new file mode 100644 index 00000000000..8f0fc74643e --- /dev/null +++ b/.changes/unreleased/Features-20240913-232111.yaml @@ -0,0 +1,6 @@ +kind: Features +body: Execute microbatch models in batches +time: 2024-09-13T23:21:11.935434-04:00 +custom: + Author: michelleark + Issue: "10700" diff --git a/.changes/unreleased/Fixes-20230601-204157.yaml b/.changes/unreleased/Fixes-20230601-204157.yaml new file mode 100644 index 00000000000..fb6ca636e1e --- /dev/null +++ b/.changes/unreleased/Fixes-20230601-204157.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Remove unused check_new method +time: 2023-06-01T20:41:57.556342+02:00 +custom: + Author: kevinneville + Issue: "7586" diff --git a/.changes/unreleased/Fixes-20230625-142731.yaml b/.changes/unreleased/Fixes-20230625-142731.yaml deleted file mode 100644 index 593fd8b9733..00000000000 --- a/.changes/unreleased/Fixes-20230625-142731.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Fixed double-underline -time: 2023-06-25T14:27:31.231253719+08:00 -custom: - Author: lllong33 - Issue: "5301" diff --git a/.changes/unreleased/Fixes-20230717-160652.yaml b/.changes/unreleased/Fixes-20230717-160652.yaml deleted file mode 100644 index 8c63a584db9..00000000000 --- a/.changes/unreleased/Fixes-20230717-160652.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Copy target_schema from config into snapshot node -time: 2023-07-17T16:06:52.957724-04:00 -custom: - Author: gshank - Issue: "6745" diff --git a/.changes/unreleased/Fixes-20230718-125518.yaml b/.changes/unreleased/Fixes-20230718-125518.yaml deleted file mode 100644 index be3c7e9d8ed..00000000000 --- a/.changes/unreleased/Fixes-20230718-125518.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Enable converting deprecation warnings to errors -time: 2023-07-18T12:55:18.03914-04:00 -custom: - Author: michelleark - Issue: "8130" diff --git a/.changes/unreleased/Fixes-20230720-122723.yaml b/.changes/unreleased/Fixes-20230720-122723.yaml deleted file mode 100644 index 8c390f6fbe3..00000000000 --- a/.changes/unreleased/Fixes-20230720-122723.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Add status to Parse Inline Error -time: 2023-07-20T12:27:23.085084-07:00 -custom: - Author: ChenyuLInx - Issue: "8173" diff --git a/.changes/unreleased/Fixes-20230720-161513.yaml b/.changes/unreleased/Fixes-20230720-161513.yaml deleted file mode 100644 index 9dd9c291cf3..00000000000 --- a/.changes/unreleased/Fixes-20230720-161513.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Ensure `warn_error_options` get serialized in `invocation_args_dict` -time: 2023-07-20T16:15:13.761813-07:00 -custom: - Author: QMalcolm - Issue: "7694" diff --git a/.changes/unreleased/Fixes-20230720-170112.yaml b/.changes/unreleased/Fixes-20230720-170112.yaml deleted file mode 100644 index 9947afceafe..00000000000 --- a/.changes/unreleased/Fixes-20230720-170112.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Stop detecting materialization macros based on macro name -time: 2023-07-20T17:01:12.496238-07:00 -custom: - Author: QMalcolm - Issue: "6231" diff --git a/.changes/unreleased/Fixes-20230720-172422.yaml b/.changes/unreleased/Fixes-20230720-172422.yaml deleted file mode 100644 index 9bf2307f48a..00000000000 --- a/.changes/unreleased/Fixes-20230720-172422.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Update `dbt deps` download retry logic to handle `EOFError` exceptions -time: 2023-07-20T17:24:22.969951-07:00 -custom: - Author: QMalcolm - Issue: "6653" diff --git a/.changes/unreleased/Fixes-20230726-104448.yaml b/.changes/unreleased/Fixes-20230726-104448.yaml deleted file mode 100644 index 0c8e55dd6b9..00000000000 --- a/.changes/unreleased/Fixes-20230726-104448.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Improve handling of CTE injection with ephemeral models -time: 2023-07-26T10:44:48.888451-04:00 -custom: - Author: gshank - Issue: "8213" diff --git a/.changes/unreleased/Fixes-20230727-125830.yaml b/.changes/unreleased/Fixes-20230727-125830.yaml deleted file mode 100644 index 0dc4bdad6e0..00000000000 --- a/.changes/unreleased/Fixes-20230727-125830.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Fixes -body: Fix unbound local variable error in `checked_agg_time_dimension_for_measure` -time: 2023-07-27T12:58:30.673803-07:00 -custom: - Author: QMalcolm - Issue: "8230" diff --git a/.changes/unreleased/Fixes-20230728-115620.yaml b/.changes/unreleased/Fixes-20230728-115620.yaml deleted file mode 100644 index 2c61ce6baa0..00000000000 --- a/.changes/unreleased/Fixes-20230728-115620.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Fixes -body: Ensure runtime errors are raised for graph runnable tasks (compile, show, run, - etc) -time: 2023-07-28T11:56:20.863718-04:00 -custom: - Author: michelleark - Issue: "8166" diff --git a/.changes/unreleased/Fixes-20240113-073615.yaml b/.changes/unreleased/Fixes-20240113-073615.yaml new file mode 100644 index 00000000000..3dd68508db8 --- /dev/null +++ b/.changes/unreleased/Fixes-20240113-073615.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Convert "Skipping model due to fail_fast" message to DEBUG level +time: 2024-01-13T07:36:15.836294-00:00 +custom: + Author: scottgigante,nevdelap + Issue: "8774" diff --git a/.changes/unreleased/Fixes-20240508-151127.yaml b/.changes/unreleased/Fixes-20240508-151127.yaml new file mode 100644 index 00000000000..4ccd18a6729 --- /dev/null +++ b/.changes/unreleased/Fixes-20240508-151127.yaml @@ -0,0 +1,7 @@ +kind: Fixes +body: 'Restore previous behavior for --favor-state: only favor defer_relation if not + selected in current command"' +time: 2024-05-08T15:11:27.510912+02:00 +custom: + Author: jtcohen6 + Issue: "10107" diff --git a/.changes/unreleased/Fixes-20240509-091411.yaml b/.changes/unreleased/Fixes-20240509-091411.yaml new file mode 100644 index 00000000000..a4c243779c5 --- /dev/null +++ b/.changes/unreleased/Fixes-20240509-091411.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Unit test fixture (csv) returns null for empty value +time: 2024-05-09T09:14:11.772709-04:00 +custom: + Author: michelleark + Issue: "9881" diff --git a/.changes/unreleased/Fixes-20240516-153913.yaml b/.changes/unreleased/Fixes-20240516-153913.yaml new file mode 100644 index 00000000000..b96f45e2c37 --- /dev/null +++ b/.changes/unreleased/Fixes-20240516-153913.yaml @@ -0,0 +1,7 @@ +kind: Fixes +body: Fix json format log and --quiet for ls and jinja print by converting print call + to fire events +time: 2024-05-16T15:39:13.896723-07:00 +custom: + Author: ChenyuLInx + Issue: "8756" diff --git a/.changes/unreleased/Fixes-20240516-223510.yaml b/.changes/unreleased/Fixes-20240516-223510.yaml new file mode 100644 index 00000000000..53a0e553e7f --- /dev/null +++ b/.changes/unreleased/Fixes-20240516-223510.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Add resource type to saved_query +time: 2024-05-16T22:35:10.287514-07:00 +custom: + Author: ChenyuLInx + Issue: "10168" diff --git a/.changes/unreleased/Fixes-20240522-182855.yaml b/.changes/unreleased/Fixes-20240522-182855.yaml new file mode 100644 index 00000000000..b0963b4a6cc --- /dev/null +++ b/.changes/unreleased/Fixes-20240522-182855.yaml @@ -0,0 +1,7 @@ +kind: Fixes +body: 'Fix: Order-insensitive unit test equality assertion for expected/actual with + multiple nulls' +time: 2024-05-22T18:28:55.91733-04:00 +custom: + Author: michelleark + Issue: "10167" diff --git a/.changes/unreleased/Fixes-20240523-204251.yaml b/.changes/unreleased/Fixes-20240523-204251.yaml new file mode 100644 index 00000000000..33abfd2ae34 --- /dev/null +++ b/.changes/unreleased/Fixes-20240523-204251.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Renaming or removing a contracted model should raise a BreakingChange warning/error +time: 2024-05-23T20:42:51.033946-04:00 +custom: + Author: michelleark + Issue: "10116" diff --git a/.changes/unreleased/Fixes-20240524-131135.yaml b/.changes/unreleased/Fixes-20240524-131135.yaml new file mode 100644 index 00000000000..7a15d9bf68d --- /dev/null +++ b/.changes/unreleased/Fixes-20240524-131135.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: prefer disabled project nodes to external node +time: 2024-05-24T13:11:35.440443-04:00 +custom: + Author: michelleark + Issue: "10224" diff --git a/.changes/unreleased/Fixes-20240605-111652.yaml b/.changes/unreleased/Fixes-20240605-111652.yaml new file mode 100644 index 00000000000..25c756db86b --- /dev/null +++ b/.changes/unreleased/Fixes-20240605-111652.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Fix issues with selectors and inline nodes +time: 2024-06-05T11:16:52.187667-04:00 +custom: + Author: gshank + Issue: 8943 9269 diff --git a/.changes/unreleased/Fixes-20240607-134648.yaml b/.changes/unreleased/Fixes-20240607-134648.yaml new file mode 100644 index 00000000000..f40b98678f9 --- /dev/null +++ b/.changes/unreleased/Fixes-20240607-134648.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Fix snapshot config to work in yaml files +time: 2024-06-07T13:46:48.383215-04:00 +custom: + Author: gshank + Issue: "4000" diff --git a/.changes/unreleased/Fixes-20240610-132130.yaml b/.changes/unreleased/Fixes-20240610-132130.yaml new file mode 100644 index 00000000000..45ea8d7dd8f --- /dev/null +++ b/.changes/unreleased/Fixes-20240610-132130.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Improve handling of error when loading schema file list +time: 2024-06-10T13:21:30.963371-04:00 +custom: + Author: gshank + Issue: "10284" diff --git a/.changes/unreleased/Fixes-20240610-200522.yaml b/.changes/unreleased/Fixes-20240610-200522.yaml new file mode 100644 index 00000000000..456575644ac --- /dev/null +++ b/.changes/unreleased/Fixes-20240610-200522.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Use model alias for the CTE identifier generated during ephemeral materialization +time: 2024-06-10T20:05:22.510814008Z +custom: + Author: jeancochrane + Issue: "5273" diff --git a/.changes/unreleased/Fixes-20240612-124256.yaml b/.changes/unreleased/Fixes-20240612-124256.yaml new file mode 100644 index 00000000000..2528ebe539a --- /dev/null +++ b/.changes/unreleased/Fixes-20240612-124256.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Saved Query node fail during skip +time: 2024-06-12T12:42:56.329073-07:00 +custom: + Author: ChenyuLInx + Issue: "10029" diff --git a/.changes/unreleased/Fixes-20240612-152139.yaml b/.changes/unreleased/Fixes-20240612-152139.yaml new file mode 100644 index 00000000000..8881e470780 --- /dev/null +++ b/.changes/unreleased/Fixes-20240612-152139.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Implement state:modified for saved queries +time: 2024-06-12T15:21:39.851426-04:00 +custom: + Author: gshank + Issue: "10294" diff --git a/.changes/unreleased/Fixes-20240613-183117.yaml b/.changes/unreleased/Fixes-20240613-183117.yaml new file mode 100644 index 00000000000..14b1ee2bf08 --- /dev/null +++ b/.changes/unreleased/Fixes-20240613-183117.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: DOn't warn on `unit_test` config paths that are properly used +time: 2024-06-13T18:31:17.486497-07:00 +custom: + Author: QMalcolm + Issue: "10311" diff --git a/.changes/unreleased/Fixes-20240624-171729.yaml b/.changes/unreleased/Fixes-20240624-171729.yaml new file mode 100644 index 00000000000..f121ac5aa8b --- /dev/null +++ b/.changes/unreleased/Fixes-20240624-171729.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Fix setting `silence` of `warn_error_options` via `dbt_project.yaml` flags +time: 2024-06-24T17:17:29.464865-07:00 +custom: + Author: QMalcolm + Issue: "10160" diff --git a/.changes/unreleased/Fixes-20240625-171737.yaml b/.changes/unreleased/Fixes-20240625-171737.yaml new file mode 100644 index 00000000000..6e839a1f81a --- /dev/null +++ b/.changes/unreleased/Fixes-20240625-171737.yaml @@ -0,0 +1,7 @@ +kind: Fixes +body: Attempt to provide test fixture tables with all values to set types correctly + for comparisong with source tables +time: 2024-06-25T17:17:37.514619-07:00 +custom: + Author: versusfacit + Issue: "10365" diff --git a/.changes/unreleased/Fixes-20240627-154448.yaml b/.changes/unreleased/Fixes-20240627-154448.yaml new file mode 100644 index 00000000000..f2ea7dd739c --- /dev/null +++ b/.changes/unreleased/Fixes-20240627-154448.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Limit data_tests deprecation to root_project +time: 2024-06-27T15:44:48.579869-04:00 +custom: + Author: gshank + Issue: "9835" diff --git a/.changes/unreleased/Fixes-20240709-172440.yaml b/.changes/unreleased/Fixes-20240709-172440.yaml new file mode 100644 index 00000000000..4931b2f80f0 --- /dev/null +++ b/.changes/unreleased/Fixes-20240709-172440.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: CLI flags should take precedence over env var flags +time: 2024-07-09T17:24:40.918977-04:00 +custom: + Author: gshank + Issue: "10304" diff --git a/.changes/unreleased/Fixes-20240714-100254.yaml b/.changes/unreleased/Fixes-20240714-100254.yaml new file mode 100644 index 00000000000..442abc03498 --- /dev/null +++ b/.changes/unreleased/Fixes-20240714-100254.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Fix typing for artifact schemas +time: 2024-07-14T10:02:54.452099+09:00 +custom: + Author: nakamichiworks + Issue: "10442" diff --git a/.changes/unreleased/Fixes-20240716-133703.yaml b/.changes/unreleased/Fixes-20240716-133703.yaml new file mode 100644 index 00000000000..e7063d696bf --- /dev/null +++ b/.changes/unreleased/Fixes-20240716-133703.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Fix over deletion of generated_metrics in partial parsing +time: 2024-07-16T13:37:03.49651-04:00 +custom: + Author: gshank + Issue: "10450" diff --git a/.changes/unreleased/Fixes-20240716-171427.yaml b/.changes/unreleased/Fixes-20240716-171427.yaml new file mode 100644 index 00000000000..5d8db74a448 --- /dev/null +++ b/.changes/unreleased/Fixes-20240716-171427.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Fix error constructing warn_error_options +time: 2024-07-16T17:14:27.837171-04:00 +custom: + Author: gshank + Issue: "10452" diff --git a/.changes/unreleased/Fixes-20240728-221421.yaml b/.changes/unreleased/Fixes-20240728-221421.yaml new file mode 100644 index 00000000000..420414e3f52 --- /dev/null +++ b/.changes/unreleased/Fixes-20240728-221421.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Do not update varchar column definitions if a contract exists +time: 2024-07-28T22:14:21.67712-04:00 +custom: + Author: gshank + Issue: "10362" diff --git a/.changes/unreleased/Fixes-20240731-095152.yaml b/.changes/unreleased/Fixes-20240731-095152.yaml new file mode 100644 index 00000000000..c7899f6c30b --- /dev/null +++ b/.changes/unreleased/Fixes-20240731-095152.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: fix all_constraints access, disabled node parsing of non-uniquely named resources +time: 2024-07-31T09:51:52.751135-04:00 +custom: + Author: michelleark gshank + Issue: "10509" diff --git a/.changes/unreleased/Fixes-20240806-172110.yaml b/.changes/unreleased/Fixes-20240806-172110.yaml new file mode 100644 index 00000000000..716bf861639 --- /dev/null +++ b/.changes/unreleased/Fixes-20240806-172110.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Propagate measure label when using create_metrics +time: 2024-08-06T17:21:10.265494-07:00 +custom: + Author: aliceliu + Issue: "10536" diff --git a/.changes/unreleased/Fixes-20240806-194843.yaml b/.changes/unreleased/Fixes-20240806-194843.yaml new file mode 100644 index 00000000000..7eb5a4bd8d8 --- /dev/null +++ b/.changes/unreleased/Fixes-20240806-194843.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: respect --quiet and --warn-error-options for flag deprecations +time: 2024-08-06T19:48:43.399453-04:00 +custom: + Author: michelleark + Issue: "10105" diff --git a/.changes/unreleased/Fixes-20240813-154235.yaml b/.changes/unreleased/Fixes-20240813-154235.yaml new file mode 100644 index 00000000000..03c3a3c7cac --- /dev/null +++ b/.changes/unreleased/Fixes-20240813-154235.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Fix state:modified check for exports +time: 2024-08-13T15:42:35.471685-07:00 +custom: + Author: aliceliu + Issue: "10138" diff --git a/.changes/unreleased/Fixes-20240816-140807.yaml b/.changes/unreleased/Fixes-20240816-140807.yaml new file mode 100644 index 00000000000..1f6c60da6da --- /dev/null +++ b/.changes/unreleased/Fixes-20240816-140807.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Filter out empty nodes after graph selection to support consistent selection of nodes that depend on upstream public models +time: 2024-08-16T14:08:07.426235-07:00 +custom: + Author: jtcohen6 + Issue: "8987" diff --git a/.changes/unreleased/Fixes-20240824-210903.yaml b/.changes/unreleased/Fixes-20240824-210903.yaml new file mode 100644 index 00000000000..bf0dc9cb022 --- /dev/null +++ b/.changes/unreleased/Fixes-20240824-210903.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Late render pre- and post-hooks configs in properties / schema YAML files +time: 2024-08-24T21:09:03.252733-06:00 +custom: + Author: dbeatty10 + Issue: "10603" diff --git a/.changes/unreleased/Fixes-20240829-105701.yaml b/.changes/unreleased/Fixes-20240829-105701.yaml new file mode 100644 index 00000000000..170f2463fa2 --- /dev/null +++ b/.changes/unreleased/Fixes-20240829-105701.yaml @@ -0,0 +1,7 @@ +kind: Fixes +body: Allow the use of env_var function in certain macros in which it was previously + unavailable. +time: 2024-08-29T10:57:01.160613-04:00 +custom: + Author: peterallenwebb + Issue: "10609" diff --git a/.changes/unreleased/Fixes-20240905-180248.yaml b/.changes/unreleased/Fixes-20240905-180248.yaml new file mode 100644 index 00000000000..3d18c28a4f6 --- /dev/null +++ b/.changes/unreleased/Fixes-20240905-180248.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: 'Remove deprecation for tests: to data_tests: change' +time: 2024-09-05T18:02:48.086421-04:00 +custom: + Author: gshank + Issue: "10564" diff --git a/.changes/unreleased/Fixes-20240917-174446.yaml b/.changes/unreleased/Fixes-20240917-174446.yaml new file mode 100644 index 00000000000..89dd65e6f35 --- /dev/null +++ b/.changes/unreleased/Fixes-20240917-174446.yaml @@ -0,0 +1,6 @@ +kind: Fixes +body: Fix `--resource-type test` for `dbt list` and `dbt build` +time: 2024-09-17T17:44:46.121032-06:00 +custom: + Author: dbeatty10 + Issue: "10730" diff --git a/.changes/unreleased/Security-20240522-094540.yaml b/.changes/unreleased/Security-20240522-094540.yaml new file mode 100644 index 00000000000..b35f96dc084 --- /dev/null +++ b/.changes/unreleased/Security-20240522-094540.yaml @@ -0,0 +1,6 @@ +kind: Security +body: Explicitly bind to localhost in docs serve +time: 2024-05-22T09:45:40.748185-04:00 +custom: + Author: ChenyuLInx michelleark + Issue: "10209" diff --git a/.changes/unreleased/Under the Hood-20230719-124611.yaml b/.changes/unreleased/Under the Hood-20230719-124611.yaml deleted file mode 100644 index be381dc7618..00000000000 --- a/.changes/unreleased/Under the Hood-20230719-124611.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: Refactor flaky test pp_versioned_models -time: 2023-07-19T12:46:11.972481-04:00 -custom: - Author: gshank - Issue: "7781" diff --git a/.changes/unreleased/Under the Hood-20230719-163334.yaml b/.changes/unreleased/Under the Hood-20230719-163334.yaml deleted file mode 100644 index adc3634aa7f..00000000000 --- a/.changes/unreleased/Under the Hood-20230719-163334.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: format exception from dbtPlugin.initialize -time: 2023-07-19T16:33:34.586377-04:00 -custom: - Author: michelleark - Issue: "8152" diff --git a/.changes/unreleased/Under the Hood-20230724-150654.yaml b/.changes/unreleased/Under the Hood-20230724-150654.yaml deleted file mode 100644 index bb78d3b1f1a..00000000000 --- a/.changes/unreleased/Under the Hood-20230724-150654.yaml +++ /dev/null @@ -1,6 +0,0 @@ -kind: Under the Hood -body: A way to control maxBytes for a single dbt.log file -time: 2023-07-24T15:06:54.263822-07:00 -custom: - Author: ChenyuLInx - Issue: "8199" diff --git a/.changes/unreleased/Under the Hood-20230725-102609.yaml b/.changes/unreleased/Under the Hood-20230725-102609.yaml deleted file mode 100644 index f4cf4aec5cc..00000000000 --- a/.changes/unreleased/Under the Hood-20230725-102609.yaml +++ /dev/null @@ -1,7 +0,0 @@ -kind: Under the Hood -body: Ref expressions with version can now be processed by the latest version of the - high-performance dbt-extractor library. -time: 2023-07-25T10:26:09.902878-04:00 -custom: - Author: peterallenwebb - Issue: "7688" diff --git a/.changes/unreleased/Under the Hood-20240502-154430.yaml b/.changes/unreleased/Under the Hood-20240502-154430.yaml new file mode 100644 index 00000000000..6c17df3a6c6 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240502-154430.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Clear error message for Private package in dbt-core +time: 2024-05-02T15:44:30.713097-07:00 +custom: + Author: ChenyuLInx + Issue: "10083" diff --git a/.changes/unreleased/Under the Hood-20240506-145511.yaml b/.changes/unreleased/Under the Hood-20240506-145511.yaml new file mode 100644 index 00000000000..f5bad25d797 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240506-145511.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Enable use of context in serialization +time: 2024-05-06T14:55:11.1812-04:00 +custom: + Author: gshank + Issue: "10093" diff --git a/.changes/unreleased/Under the Hood-20240519-155946.yaml b/.changes/unreleased/Under the Hood-20240519-155946.yaml new file mode 100644 index 00000000000..920c7ff860d --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240519-155946.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Make RSS high water mark measurement more accurate on Linux +time: 2024-05-19T15:59:46.700842315-04:00 +custom: + Author: peterallenwebb + Issue: "10177" diff --git a/.changes/unreleased/Under the Hood-20240529-102814.yaml b/.changes/unreleased/Under the Hood-20240529-102814.yaml new file mode 100644 index 00000000000..7ec6750a765 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240529-102814.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Enable record filtering by type. +time: 2024-05-29T10:28:14.547624-05:00 +custom: + Author: emmyoop + Issue: "10240" diff --git a/.changes/unreleased/Under the Hood-20240618-140652.yaml b/.changes/unreleased/Under the Hood-20240618-140652.yaml new file mode 100644 index 00000000000..1941a5f3510 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240618-140652.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Remove IntermediateSnapshotNode +time: 2024-06-18T14:06:52.618602-04:00 +custom: + Author: gshank + Issue: "10326" diff --git a/.changes/unreleased/Under the Hood-20240701-131750.yaml b/.changes/unreleased/Under the Hood-20240701-131750.yaml new file mode 100644 index 00000000000..f15394cc583 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240701-131750.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Additional logging for skipped ephemeral models +time: 2024-07-01T13:17:50.827788-04:00 +custom: + Author: gshank + Issue: "10389" diff --git a/.changes/unreleased/Under the Hood-20240716-184859.yaml b/.changes/unreleased/Under the Hood-20240716-184859.yaml new file mode 100644 index 00000000000..7ee33b08dad --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240716-184859.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: bump black to 24.3.0 +time: 2024-07-16T18:48:59.651834-04:00 +custom: + Author: michelleark + Issue: "10454" diff --git a/.changes/unreleased/Under the Hood-20240716-205703.yaml b/.changes/unreleased/Under the Hood-20240716-205703.yaml new file mode 100644 index 00000000000..ebec452c865 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240716-205703.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: generate protos with protoc version 5.26.1 +time: 2024-07-16T20:57:03.332448-04:00 +custom: + Author: michelleark + Issue: "10457" diff --git a/.changes/unreleased/Under the Hood-20240806-155406.yaml b/.changes/unreleased/Under the Hood-20240806-155406.yaml new file mode 100644 index 00000000000..aafcd955ba9 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240806-155406.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Move from minimal-snowplow-tracker fork back to snowplow-tracker +time: 2024-08-06T15:54:06.422444-04:00 +custom: + Author: peterallenwebb + Issue: "8409" diff --git a/.changes/unreleased/Under the Hood-20240807-155652.yaml b/.changes/unreleased/Under the Hood-20240807-155652.yaml new file mode 100644 index 00000000000..3bd9bcbe273 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240807-155652.yaml @@ -0,0 +1,7 @@ +kind: Under the Hood +body: Add group info to RunResultError, RunResultFailure, RunResultWarning log lines +time: 2024-08-07T15:56:52.171199-05:00 +custom: + Author: aranke + Issue: "" + JiraID: "364" diff --git a/.changes/unreleased/Under the Hood-20240809-130234.yaml b/.changes/unreleased/Under the Hood-20240809-130234.yaml new file mode 100644 index 00000000000..964dd2fedf2 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240809-130234.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Improve speed of tree traversal when finding children, increasing build speed for some selectors +time: 2024-08-09T13:02:34.759905-07:00 +custom: + Author: ttusing + Issue: "10434" diff --git a/.changes/unreleased/Under the Hood-20240821-095516.yaml b/.changes/unreleased/Under the Hood-20240821-095516.yaml new file mode 100644 index 00000000000..b9335434bf3 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240821-095516.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Add test for sources tables with quotes +time: 2024-08-21T09:55:16.038101-04:00 +custom: + Author: gshank + Issue: "10582" diff --git a/.changes/unreleased/Under the Hood-20240827-105014.yaml b/.changes/unreleased/Under the Hood-20240827-105014.yaml new file mode 100644 index 00000000000..318f9ccb1b0 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240827-105014.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Additional type hints for `core/dbt/version.py` +time: 2024-08-27T10:50:14.047859-05:00 +custom: + Author: QMalcolm + Issue: "10612" diff --git a/.changes/unreleased/Under the Hood-20240827-113123.yaml b/.changes/unreleased/Under the Hood-20240827-113123.yaml new file mode 100644 index 00000000000..889c07239de --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240827-113123.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Fix typing issues in core/dbt/contracts/sql.py +time: 2024-08-27T11:31:23.749912-05:00 +custom: + Author: QMalcolm + Issue: "10614" diff --git a/.changes/unreleased/Under the Hood-20240827-114810.yaml b/.changes/unreleased/Under the Hood-20240827-114810.yaml new file mode 100644 index 00000000000..43d17ef4dc5 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240827-114810.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Fix type errors in `dbt/core/task/clean.py` +time: 2024-08-27T11:48:10.438173-05:00 +custom: + Author: QMalcolm + Issue: "10616" diff --git a/.changes/unreleased/Under the Hood-20240911-162730.yaml b/.changes/unreleased/Under the Hood-20240911-162730.yaml new file mode 100644 index 00000000000..0d35aeb5262 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240911-162730.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Add Snowplow tracking for behavior flag deprecations +time: 2024-09-11T16:27:30.293832-04:00 +custom: + Author: mikealfare + Issue: "10552" diff --git a/.changes/unreleased/Under the Hood-20240913-213312.yaml b/.changes/unreleased/Under the Hood-20240913-213312.yaml new file mode 100644 index 00000000000..495b6f8de53 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240913-213312.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Add test utility patch_microbatch_end_time for adapters testing +time: 2024-09-13T21:33:12.482336-04:00 +custom: + Author: michelleark + Issue: "10713" diff --git a/.changes/unreleased/Under the Hood-20240916-102201.yaml b/.changes/unreleased/Under the Hood-20240916-102201.yaml new file mode 100644 index 00000000000..48485d44f48 --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240916-102201.yaml @@ -0,0 +1,6 @@ +kind: Under the Hood +body: Replace `TestSelector` with `ResourceTypeSelector` +time: 2024-09-16T10:22:01.339462-06:00 +custom: + Author: dbeatty10 + Issue: "10718" diff --git a/.changes/unreleased/Under the Hood-20240918-170325.yaml b/.changes/unreleased/Under the Hood-20240918-170325.yaml new file mode 100644 index 00000000000..3f265a36eda --- /dev/null +++ b/.changes/unreleased/Under the Hood-20240918-170325.yaml @@ -0,0 +1,7 @@ +kind: Under the Hood +body: Standardize returning `ResourceTypeSelector` instances in `dbt list` and `dbt + build` +time: 2024-09-18T17:03:25.639516-06:00 +custom: + Author: dbeatty10 + Issue: "10739" diff --git a/.changie.yaml b/.changie.yaml index ba0590da8d3..23e802f190d 100644 --- a/.changie.yaml +++ b/.changie.yaml @@ -31,43 +31,7 @@ kinds: - {{.Body}} ({{ range $index, $element := $IssueList }}{{if $index}}, {{end}}{{$element}}{{end}}) - label: Under the Hood - label: Dependencies - changeFormat: |- - {{- $PRList := list }} - {{- $changes := splitList " " $.Custom.PR }} - {{- range $pullrequest := $changes }} - {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $pullrequest }} - {{- $PRList = append $PRList $changeLink }} - {{- end -}} - - {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}}) - skipGlobalChoices: true - additionalChoices: - - key: Author - label: GitHub Username(s) (separated by a single space if multiple) - type: string - minLength: 3 - - key: PR - label: GitHub Pull Request Number (separated by a single space if multiple) - type: string - minLength: 1 - label: Security - changeFormat: |- - {{- $PRList := list }} - {{- $changes := splitList " " $.Custom.PR }} - {{- range $pullrequest := $changes }} - {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $pullrequest }} - {{- $PRList = append $PRList $changeLink }} - {{- end -}} - - {{.Body}} ({{ range $index, $element := $PRList }}{{if $index}}, {{end}}{{$element}}{{end}}) - skipGlobalChoices: true - additionalChoices: - - key: Author - label: GitHub Username(s) (separated by a single space if multiple) - type: string - minLength: 3 - - key: PR - label: GitHub Pull Request Number (separated by a single space if multiple) - type: string - minLength: 1 newlines: afterChangelogHeader: 1 @@ -106,18 +70,10 @@ footerFormat: | {{- $changeList := splitList " " $change.Custom.Author }} {{- $IssueList := list }} {{- $changeLink := $change.Kind }} - {{- if or (eq $change.Kind "Dependencies") (eq $change.Kind "Security") }} - {{- $changes := splitList " " $change.Custom.PR }} - {{- range $issueNbr := $changes }} - {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/pull/nbr)" | replace "nbr" $issueNbr }} - {{- $IssueList = append $IssueList $changeLink }} - {{- end -}} - {{- else }} - {{- $changes := splitList " " $change.Custom.Issue }} - {{- range $issueNbr := $changes }} - {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }} - {{- $IssueList = append $IssueList $changeLink }} - {{- end -}} + {{- $changes := splitList " " $change.Custom.Issue }} + {{- range $issueNbr := $changes }} + {{- $changeLink := "[#nbr](https://github.com/dbt-labs/dbt-core/issues/nbr)" | replace "nbr" $issueNbr }} + {{- $IssueList = append $IssueList $changeLink }} {{- end }} {{- /* check if this contributor has other changes associated with them already */}} {{- if hasKey $contributorDict $author }} diff --git a/.flake8 b/.flake8 index e39b2fa4646..084d3c0163a 100644 --- a/.flake8 +++ b/.flake8 @@ -7,6 +7,9 @@ ignore = W503 # makes Flake8 work like black W504 E203 # makes Flake8 work like black + E704 # makes Flake8 work like black E741 E501 # long line checking is done in black exclude = test/ +per-file-ignores = + */__init__.py: F401 diff --git a/.gitattributes b/.gitattributes index 2cc23be887c..b963dbdc1c5 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,4 +1,4 @@ -core/dbt/include/index.html binary +core/dbt/task/docs/index.html binary tests/functional/artifacts/data/state/*/manifest.json binary core/dbt/docs/build/html/searchindex.js binary core/dbt/docs/build/html/index.html binary diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 01da3492720..698a20b0539 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -13,48 +13,6 @@ # the core team as a whole will be assigned * @dbt-labs/core-team -### OSS Tooling Guild - -/.github/ @dbt-labs/guild-oss-tooling -.bumpversion.cfg @dbt-labs/guild-oss-tooling - -.changie.yaml @dbt-labs/guild-oss-tooling - -pre-commit-config.yaml @dbt-labs/guild-oss-tooling -pytest.ini @dbt-labs/guild-oss-tooling -tox.ini @dbt-labs/guild-oss-tooling - -pyproject.toml @dbt-labs/guild-oss-tooling -requirements.txt @dbt-labs/guild-oss-tooling -dev_requirements.txt @dbt-labs/guild-oss-tooling -/core/setup.py @dbt-labs/guild-oss-tooling -/core/MANIFEST.in @dbt-labs/guild-oss-tooling - -### ADAPTERS - -# Adapter interface ("base" + "sql" adapter defaults, cache) -/core/dbt/adapters @dbt-labs/core-adapters - -# Global project (default macros + materializations), starter project -/core/dbt/include @dbt-labs/core-adapters - -# Postgres plugin -/plugins/ @dbt-labs/core-adapters -/plugins/postgres/setup.py @dbt-labs/core-adapters @dbt-labs/guild-oss-tooling - -# Functional tests for adapter plugins -/tests/adapter @dbt-labs/core-adapters - -### TESTS - -# Overlapping ownership for vast majority of unit + functional tests - -# Perf regression testing framework -# This excludes the test project files itself since those aren't specific -# framework changes (excluded by not setting an owner next to it- no owner) -/performance @nathaniel-may -/performance/projects - ### ARTIFACTS /schemas/dbt @dbt-labs/cloud-artifacts diff --git a/.github/ISSUE_TEMPLATE/code-docs.yml b/.github/ISSUE_TEMPLATE/code-docs.yml new file mode 100644 index 00000000000..5cfb7b985bb --- /dev/null +++ b/.github/ISSUE_TEMPLATE/code-docs.yml @@ -0,0 +1,18 @@ +name: 📄 Code docs +description: Report an issue for markdown files within this repo, such as README, ARCHITECTURE, etc. +title: "[Code docs] " +labels: ["triage"] +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to fill out this code docs issue! + - type: textarea + attributes: + label: Please describe the issue and your proposals. + description: | + Links? References? Anything that will give us more context about the issue you are encountering! + + Tip: You can attach images by clicking this area to highlight it and then dragging files in. + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index ca13b07b887..14a1c050227 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,5 +1,8 @@ blank_issues_enabled: false contact_links: + - name: Documentation + url: https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose + about: Problems and issues with dbt product documentation hosted on docs.getdbt.com. Issues for markdown files within this repo, such as README, should be opened using the "Code docs" template. - name: Ask the community for help url: https://github.com/dbt-labs/docs.getdbt.com/discussions about: Need help troubleshooting? Check out our guide on how to ask diff --git a/.github/ISSUE_TEMPLATE/implementation-ticket.yml b/.github/ISSUE_TEMPLATE/implementation-ticket.yml index 439f94af74b..0b5729c6a47 100644 --- a/.github/ISSUE_TEMPLATE/implementation-ticket.yml +++ b/.github/ISSUE_TEMPLATE/implementation-ticket.yml @@ -1,7 +1,7 @@ name: 🛠️ Implementation description: This is an implementation ticket intended for use by the maintainers of dbt-core title: "[<project>] <title>" -labels: ["user_docs"] +labels: ["user docs"] body: - type: markdown attributes: @@ -11,7 +11,7 @@ body: label: Housekeeping description: > A couple friendly reminders: - 1. Remove the `user_docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes + 1. Remove the `user docs` label if the scope of this work does not require changes to https://docs.getdbt.com/docs: no end-user interface (e.g. yml spec, CLI, error messages, etc) or functional changes 2. Link any blocking issues in the "Blocked on" field under the "Core devs & maintainers" project. options: - label: I am a maintainer of dbt-core @@ -25,16 +25,43 @@ body: required: true - type: textarea attributes: - label: Acceptance critera + label: Acceptance criteria description: | What is the definition of done for this ticket? Include any relevant edge cases and/or test cases validations: required: true + - type: textarea + attributes: + label: Suggested Tests + description: | + Provide scenarios to test. Link to existing similar tests if appropriate. + placeholder: | + 1. Test with no version specified in the schema file and use selection logic on a versioned model for a specific version. Expect pass. + 2. Test with a version specified in the schema file that is no valid. Expect ParsingError. + validations: + required: true + - type: textarea + attributes: + label: Impact to Other Teams + description: | + Will this change impact other teams? Include details of the kinds of changes required (new tests, code changes, related tickets) and _add the relevant `Impact:[team]` label_. + placeholder: | + Example: This change impacts `dbt-redshift` because the tests will need to be modified. The `Impact:[Adapter]` label has been added. + validations: + required: true + - type: textarea + attributes: + label: Will backports be required? + description: | + Will this change need to be backported to previous versions? Add details, possible blockers to backporting and _add the relevant backport labels `backport 1.x.latest`_ + placeholder: | + Example: Backport to 1.6.latest, 1.5.latest and 1.4.latest. Since 1.4 isn't using click, the backport may be complicated. The `backport 1.6.latest`, `backport 1.5.latest` and `backport 1.4.latest` labels have been added. + validations: + required: true - type: textarea attributes: label: Context description: | - Provide the "why", motivation, and alternative approaches considered -- linking to previous refinement issues, spikes, Notion docs as appropriate - validations: + Provide the "why", motivation, and alternative approaches considered -- linking to previous refinement issues, spikes and documentation as appropriate validations: required: false diff --git a/.github/_README.md b/.github/_README.md index 2ba8e7357fa..cb8e276c1a1 100644 --- a/.github/_README.md +++ b/.github/_README.md @@ -47,7 +47,8 @@ ___ ### How to re-run jobs -- Some actions cannot be rerun in the GitHub UI. Namely the snyk checks and the cla check. Snyk checks are rerun by closing and reopening the PR. You can retrigger the cla check by commenting on the PR with `@cla-bot check` +- From the UI you can rerun from failure +- You can retrigger the cla check by commenting on the PR with `@cla-bot check` ___ diff --git a/.github/actions/latest-wrangler/action.yml b/.github/actions/latest-wrangler/action.yml index d712eecf64e..4ad97da473c 100644 --- a/.github/actions/latest-wrangler/action.yml +++ b/.github/actions/latest-wrangler/action.yml @@ -1,20 +1,21 @@ -name: "Github package 'latest' tag wrangler for containers" -description: "Determines wether or not a given dbt container should be given a bare 'latest' tag (I.E. dbt-core:latest)" +name: "GitHub package `latest` tag wrangler for containers" +description: "Determines if the published image should include `latest` tags" + inputs: package_name: - description: "Package to check (I.E. dbt-core, dbt-redshift, etc)" + description: "Package being published (i.e. `dbt-core`, `dbt-redshift`, etc.)" required: true new_version: - description: "Semver of the container being built (I.E. 1.0.4)" + description: "SemVer of the package being published (i.e. 1.7.2, 1.8.0a1, etc.)" required: true - gh_token: - description: "Auth token for github (must have view packages scope)" + github_token: + description: "Auth token for GitHub (must have view packages scope)" required: true + outputs: - latest: - description: "Wether or not built container should be tagged latest (bool)" - minor_latest: - description: "Wether or not built container should be tagged minor.latest (bool)" + tags: + description: "A list of tags to associate with this version" + runs: using: "docker" image: "Dockerfile" diff --git a/.github/actions/latest-wrangler/main.py b/.github/actions/latest-wrangler/main.py index db91cf8354b..e001abec016 100644 --- a/.github/actions/latest-wrangler/main.py +++ b/.github/actions/latest-wrangler/main.py @@ -1,98 +1,71 @@ import os -import sys +from packaging.version import Version, parse import requests -from distutils.util import strtobool -from typing import Union -from packaging.version import parse, Version +import sys +from typing import List + + +def main(): + package_name: str = os.environ["INPUT_PACKAGE_NAME"] + new_version: Version = parse(os.environ["INPUT_NEW_VERSION"]) + github_token: str = os.environ["INPUT_GITHUB_TOKEN"] + + response = _package_metadata(package_name, github_token) + published_versions = _published_versions(response) + new_version_tags = _new_version_tags(new_version, published_versions) + _register_tags(new_version_tags, package_name) + + +def _package_metadata(package_name: str, github_token: str) -> requests.Response: + url = f"https://api.github.com/orgs/dbt-labs/packages/container/{package_name}/versions" + return requests.get(url, auth=("", github_token)) + + +def _published_versions(response: requests.Response) -> List[Version]: + package_metadata = response.json() + return [ + parse(tag) + for version in package_metadata + for tag in version["metadata"]["container"]["tags"] + if "latest" not in tag + ] -if __name__ == "__main__": - # get inputs - package = os.environ["INPUT_PACKAGE"] - new_version = parse(os.environ["INPUT_NEW_VERSION"]) - gh_token = os.environ["INPUT_GH_TOKEN"] - halt_on_missing = strtobool(os.environ.get("INPUT_HALT_ON_MISSING", "False")) - - # get package metadata from github - package_request = requests.get( - f"https://api.github.com/orgs/dbt-labs/packages/container/{package}/versions", - auth=("", gh_token), - ) - package_meta = package_request.json() - - # Log info if we don't get a 200 - if package_request.status_code != 200: - print(f"Call to GH API failed: {package_request.status_code} {package_meta['message']}") - - # Make an early exit if there is no matching package in github - if package_request.status_code == 404: - if halt_on_missing: - sys.exit(1) - # everything is the latest if the package doesn't exist - github_output = os.environ.get("GITHUB_OUTPUT") - with open(github_output, "at", encoding="utf-8") as gh_output: - gh_output.write("latest=True") - gh_output.write("minor_latest=True") - sys.exit(0) - - # TODO: verify package meta is "correct" - # https://github.com/dbt-labs/dbt-core/issues/4640 - - # map versions and tags - version_tag_map = { - version["id"]: version["metadata"]["container"]["tags"] for version in package_meta - } - - # is pre-release - pre_rel = True if any(x in str(new_version) for x in ["a", "b", "rc"]) else False - - # semver of current latest - for version, tags in version_tag_map.items(): - if "latest" in tags: - # N.B. This seems counterintuitive, but we expect any version tagged - # 'latest' to have exactly three associated tags: - # latest, major.minor.latest, and major.minor.patch. - # Subtracting everything that contains the string 'latest' gets us - # the major.minor.patch which is what's needed for comparison. - current_latest = parse([tag for tag in tags if "latest" not in tag][0]) - else: - current_latest = False - - # semver of current_minor_latest - for version, tags in version_tag_map.items(): - if f"{new_version.major}.{new_version.minor}.latest" in tags: - # Similar to above, only now we expect exactly two tags: - # major.minor.patch and major.minor.latest - current_minor_latest = parse([tag for tag in tags if "latest" not in tag][0]) - else: - current_minor_latest = False - - def is_latest( - pre_rel: bool, new_version: Version, remote_latest: Union[bool, Version] - ) -> bool: - """Determine if a given contaier should be tagged 'latest' based on: - - it's pre-release status - - it's version - - the version of a previously identified container tagged 'latest' - - :param pre_rel: Wether or not the version of the new container is a pre-release - :param new_version: The version of the new container - :param remote_latest: The version of the previously identified container that's - already tagged latest or False - """ - # is a pre-release = not latest - if pre_rel: - return False - # + no latest tag found = is latest - if not remote_latest: - return True - # + if remote version is lower than current = is latest, else not latest - return True if remote_latest <= new_version else False - - latest = is_latest(pre_rel, new_version, current_latest) - minor_latest = is_latest(pre_rel, new_version, current_minor_latest) +def _new_version_tags(new_version: Version, published_versions: List[Version]) -> List[str]: + # the package version is always a tag + tags = [str(new_version)] + # pre-releases don't get tagged with `latest` + if new_version.is_prerelease: + return tags + + if new_version > max(published_versions): + tags.append("latest") + + published_patches = [ + version + for version in published_versions + if version.major == new_version.major and version.minor == new_version.minor + ] + if new_version > max(published_patches): + tags.append(f"{new_version.major}.{new_version.minor}.latest") + + return tags + + +def _register_tags(tags: List[str], package_name: str) -> None: + fully_qualified_tags = ",".join([f"ghcr.io/dbt-labs/{package_name}:{tag}" for tag in tags]) github_output = os.environ.get("GITHUB_OUTPUT") with open(github_output, "at", encoding="utf-8") as gh_output: - gh_output.write(f"latest={latest}") - gh_output.write(f"minor_latest={minor_latest}") + gh_output.write(f"fully_qualified_tags={fully_qualified_tags}") + + +def _validate_response(response: requests.Response) -> None: + message = response["message"] + if response.status_code != 200: + print(f"Call to GitHub API failed: {response.status_code} - {message}") + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/.github/actions/setup-postgres-linux/action.yml b/.github/actions/setup-postgres-linux/action.yml index 1c8fc772a8a..30050b1a38d 100644 --- a/.github/actions/setup-postgres-linux/action.yml +++ b/.github/actions/setup-postgres-linux/action.yml @@ -5,6 +5,15 @@ runs: steps: - shell: bash run: | - sudo systemctl start postgresql.service + sudo apt-get --purge remove postgresql postgresql-* + sudo apt update -y + sudo apt install gnupg2 wget vim -y + sudo sh -c 'echo "deb https://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list' + curl -fsSL https://www.postgresql.org/media/keys/ACCC4CF8.asc|sudo gpg --dearmor -o /etc/apt/trusted.gpg.d/postgresql.gpg + sudo apt update -y + sudo apt install postgresql-16 + sudo apt-get -y install postgresql postgresql-contrib + sudo systemctl start postgresql + sudo systemctl enable postgresql pg_isready sudo -u postgres bash ${{ github.action_path }}/setup_db.sh diff --git a/.github/actions/setup-postgres-macos/action.yml b/.github/actions/setup-postgres-macos/action.yml index af9a9fe1657..3ce1cd7144e 100644 --- a/.github/actions/setup-postgres-macos/action.yml +++ b/.github/actions/setup-postgres-macos/action.yml @@ -5,7 +5,8 @@ runs: steps: - shell: bash run: | - brew services start postgresql + brew install postgresql@16 + brew services start postgresql@16 echo "Check PostgreSQL service is running" i=10 COMMAND='pg_isready' diff --git a/.github/actions/setup-postgres-windows/action.yml b/.github/actions/setup-postgres-windows/action.yml index 419b5e267cb..2b6217d360f 100644 --- a/.github/actions/setup-postgres-windows/action.yml +++ b/.github/actions/setup-postgres-windows/action.yml @@ -5,8 +5,22 @@ runs: steps: - shell: pwsh run: | - $pgService = Get-Service -Name postgresql* + Write-Host -Object "Installing PostgreSQL 16 as windows service..." + $installerArgs = @("--install_runtimes 0", "--superpassword root", "--enable_acledit 1", "--unattendedmodeui none", "--mode unattended") + $filePath = Invoke-DownloadWithRetry -Url "https://get.enterprisedb.com/postgresql/postgresql-16.1-1-windows-x64.exe" -Path "$env:PGROOT/postgresql-16.1-1-windows-x64.exe" + Start-Process -FilePath $filePath -ArgumentList $installerArgs -Wait -PassThru + + Write-Host -Object "Validating PostgreSQL 16 Install..." + Get-Service -Name postgresql* + $pgReady = Start-Process -FilePath "$env:PGBIN\pg_isready" -Wait -PassThru + $exitCode = $pgReady.ExitCode + if ($exitCode -ne 0) { + Write-Host -Object "PostgreSQL is not ready. Exitcode: $exitCode" + exit $exitCode + } + + Write-Host -Object "Starting PostgreSQL 16 Service..." + $pgService = Get-Service -Name postgresql-x64-16 Set-Service -InputObject $pgService -Status running -StartupType automatic - Start-Process -FilePath "$env:PGBIN\pg_isready" -Wait -PassThru $env:Path += ";$env:PGBIN" bash ${{ github.action_path }}/setup_db.sh diff --git a/.github/dependabot.yml b/.github/dependabot.yml index e3158a0bae5..ae39691c7e0 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -11,11 +11,6 @@ updates: schedule: interval: "daily" rebase-strategy: "disabled" - - package-ecosystem: "pip" - directory: "/plugins/postgres" - schedule: - interval: "daily" - rebase-strategy: "disabled" # docker dependencies - package-ecosystem: "docker" @@ -28,3 +23,10 @@ updates: schedule: interval: "weekly" rebase-strategy: "disabled" + + # github dependencies + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + rebase-strategy: "disabled" diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index f3fe5ac83d5..94c53a02ef1 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,15 +1,12 @@ -resolves # -[docs](https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose) dbt-labs/docs.getdbt.com/# +Resolves # <!--- - Include the number of the issue addressed by this PR above if applicable. + Include the number of the issue addressed by this PR above, if applicable. PRs for code changes without an associated issue *will not be merged*. See CONTRIBUTING.md for more information. - Include the number of the docs issue that was opened for this PR. If - this change has no user-facing implications, "N/A" suffices instead. New - docs tickets can be created by clicking the link above or by going to - https://github.com/dbt-labs/docs.getdbt.com/issues/new/choose. + Add the `user docs` label to this PR if it will need docs changes. An + issue will get opened in docs.getdbt.com upon successful merge of this PR. --> ### Problem @@ -29,7 +26,8 @@ resolves # ### Checklist -- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me -- [ ] I have run this code in development and it appears to resolve the stated issue -- [ ] This PR includes tests, or tests are not required/relevant for this PR -- [ ] This PR has no interface changes (e.g. macros, cli, logs, json artifacts, config files, adapter interface, etc) or this PR has already received feedback and approval from Product or DX +- [ ] I have read [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md) and understand what's expected of me. +- [ ] I have run this code in development, and it appears to resolve the stated issue. +- [ ] This PR includes tests, or tests are not required or relevant for this PR. +- [ ] This PR has no interface changes (e.g., macros, CLI, logs, JSON artifacts, config files, adapter interface, etc.) or this PR has already received feedback and approval from Product or DX. +- [ ] This PR includes [type annotations](https://docs.python.org/3/library/typing.html) for new and modified functions. diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml index 1cbfb7a66a0..b20069df9b6 100644 --- a/.github/workflows/backport.yml +++ b/.github/workflows/backport.yml @@ -35,6 +35,6 @@ jobs: github.event.pull_request.merged && contains(github.event.label.name, 'backport') steps: - - uses: tibdex/backport@v2.0.3 + - uses: tibdex/backport@v2.0.4 with: github_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/bot-changelog.yml b/.github/workflows/bot-changelog.yml index 07e978d6229..c85343ea333 100644 --- a/.github/workflows/bot-changelog.yml +++ b/.github/workflows/bot-changelog.yml @@ -41,8 +41,6 @@ jobs: include: - label: "dependencies" changie_kind: "Dependencies" - - label: "snyk" - changie_kind: "Security" runs-on: ubuntu-latest steps: @@ -58,4 +56,4 @@ jobs: commit_message: "Add automated changelog yaml from template for bot PR" changie_kind: ${{ matrix.changie_kind }} label: ${{ matrix.label }} - custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n PR: ${{ github.event.pull_request.number }}" + custom_changelog_string: "custom:\n Author: ${{ github.event.pull_request.user.login }}\n Issue: ${{ github.event.pull_request.number }}" diff --git a/.github/workflows/changelog-existence.yml b/.github/workflows/changelog-existence.yml index 7be8c1accb5..e5127073cfe 100644 --- a/.github/workflows/changelog-existence.yml +++ b/.github/workflows/changelog-existence.yml @@ -2,10 +2,8 @@ # Checks that a file has been committed under the /.changes directory # as a new CHANGELOG entry. Cannot check for a specific filename as # it is dynamically generated by change type and timestamp. -# This workflow should not require any secrets since it runs for PRs -# from forked repos. -# By default, secrets are not passed to workflows running from -# a forked repo. +# This workflow runs on pull_request_target because it requires +# secrets to post comments. # **why?** # Ensure code change gets reflected in the CHANGELOG. @@ -19,8 +17,10 @@ name: Check Changelog Entry on: - pull_request: + pull_request_target: types: [opened, reopened, labeled, unlabeled, synchronize] + paths-ignore: ['.changes/**', '.github/**', 'tests/**', '**.md', '**.yml'] + workflow_dispatch: defaults: diff --git a/.github/workflows/check-artifact-changes.yml b/.github/workflows/check-artifact-changes.yml new file mode 100644 index 00000000000..cce6d8f799d --- /dev/null +++ b/.github/workflows/check-artifact-changes.yml @@ -0,0 +1,41 @@ +name: Check Artifact Changes + +on: + pull_request: + types: [ opened, reopened, labeled, unlabeled, synchronize ] + paths-ignore: [ '.changes/**', '.github/**', 'tests/**', '**.md', '**.yml' ] + + workflow_dispatch: + +jobs: + check-artifact-changes: + runs-on: ubuntu-latest + if: ${{ !contains(github.event.pull_request.labels.*.name, 'artifact_minor_upgrade') }} + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Check for changes in core/dbt/artifacts + # https://github.com/marketplace/actions/paths-changes-filter + uses: dorny/paths-filter@v3 + id: check_artifact_changes + with: + filters: | + artifacts_changed: + - 'core/dbt/artifacts/**' + list-files: shell + + - name: Fail CI if artifacts have changed + if: steps.check_artifact_changes.outputs.artifacts_changed == 'true' + run: | + echo "CI failure: Artifact changes checked in core/dbt/artifacts directory." + echo "Files changed: ${{ steps.check_artifact_changes.outputs.artifacts_changed_files }}" + echo "To bypass this check, confirm that the change is not breaking (https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/artifacts/README.md#breaking-changes) and add the 'artifact_minor_upgrade' label to the PR. Modifications and additions to all fields require updates to https://github.com/dbt-labs/dbt-jsonschema." + exit 1 + + - name: CI check passed + if: steps.check_artifact_changes.outputs.artifacts_changed == 'false' + run: | + echo "No prohibited artifact changes found in core/dbt/artifacts. CI check passed." diff --git a/.github/workflows/community-label.yml b/.github/workflows/community-label.yml new file mode 100644 index 00000000000..3b75a3c14e7 --- /dev/null +++ b/.github/workflows/community-label.yml @@ -0,0 +1,39 @@ +# **what?** +# Label a PR with a `community` label when a PR is opened by a user outside core/adapters + +# **why?** +# To streamline triage and ensure that community contributions are recognized and prioritized + +# **when?** +# When a PR is opened, not in draft or moved from draft to ready for review + + +name: Label community PRs + +on: + # have to use pull_request_target since community PRs come from forks + pull_request_target: + types: [opened, ready_for_review] + +defaults: + run: + shell: bash + +permissions: + pull-requests: write # labels PRs + contents: read # reads team membership + +jobs: + open_issues: + # If this PR already has the community label, no need to relabel it + # If this PR is opened and not draft, determine if it needs to be labeled + # if the PR is converted out of draft, determine if it needs to be labeled + if: | + (!contains(github.event.pull_request.labels.*.name, 'community') && + (github.event.action == 'opened' && github.event.pull_request.draft == false ) || + github.event.action == 'ready_for_review' ) + uses: dbt-labs/actions/.github/workflows/label-community.yml@main + with: + github_team: 'core-group' + label: 'community' + secrets: inherit diff --git a/.github/workflows/docs-issue.yml b/.github/workflows/docs-issue.yml new file mode 100644 index 00000000000..085e68d4d3b --- /dev/null +++ b/.github/workflows/docs-issue.yml @@ -0,0 +1,41 @@ +# **what?** +# Open an issue in docs.getdbt.com when an issue is labeled `user docs` and closed as completed + +# **why?** +# To reduce barriers for keeping docs up to date + +# **when?** +# When an issue is labeled `user docs` and is closed as completed. Can be labeled before or after the issue is closed. + + +name: Open issues in docs.getdbt.com repo when an issue is labeled +run-name: "Open an issue in docs.getdbt.com for issue #${{ github.event.issue.number }}" + +on: + issues: + types: [labeled, closed] + +defaults: + run: + shell: bash + +permissions: + issues: write # comments on issues + +jobs: + open_issues: + # we only want to run this when the issue is closed as completed and the label `user docs` has been assigned. + # If this logic does not exist in this workflow, it runs the + # risk of duplicaton of issues being created due to merge and label both triggering this workflow to run and neither having + # generating the comment before the other runs. This lives here instead of the shared workflow because this is where we + # decide if it should run or not. + if: | + (github.event.issue.state == 'closed' && + github.event.issue.state_reason == 'completed' && + contains( github.event.issue.labels.*.name, 'user docs')) + uses: dbt-labs/actions/.github/workflows/open-issue-in-repo.yml@main + with: + issue_repository: "dbt-labs/docs.getdbt.com" + issue_title: "[Core] Docs Changes Needed from ${{ github.event.repository.name }} Issue #${{ github.event.issue.number }}" + issue_body: "At a minimum, update body to include a link to the page on docs.getdbt.com requiring updates and what part(s) of the page you would like to see updated.\n Originating from this issue: https://github.com/dbt-labs/dbt-core/issues/${{ github.event.issue.number }}" + secrets: inherit diff --git a/.github/workflows/jira-creation.yml b/.github/workflows/jira-creation.yml deleted file mode 100644 index 79936a93a85..00000000000 --- a/.github/workflows/jira-creation.yml +++ /dev/null @@ -1,26 +0,0 @@ -# **what?** -# Mirrors issues into Jira. Includes the information: title, -# GitHub Issue ID and URL - -# **why?** -# Jira is our tool for tracking and we need to see these issues in there - -# **when?** -# On issue creation or when an issue is labeled `Jira` - -name: Jira Issue Creation - -on: - issues: - types: [opened, labeled] - -permissions: - issues: write - -jobs: - call-creation-action: - uses: dbt-labs/actions/.github/workflows/jira-creation-actions.yml@main - secrets: - JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} - JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} - JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} diff --git a/.github/workflows/jira-label.yml b/.github/workflows/jira-label.yml deleted file mode 100644 index b39349170b4..00000000000 --- a/.github/workflows/jira-label.yml +++ /dev/null @@ -1,26 +0,0 @@ -# **what?** -# Calls mirroring Jira label Action. Includes adding a new label -# to an existing issue or removing a label as well - -# **why?** -# Jira is our tool for tracking and we need to see these labels in there - -# **when?** -# On labels being added or removed from issues - -name: Jira Label Mirroring - -on: - issues: - types: [labeled, unlabeled] - -permissions: - issues: read - -jobs: - call-label-action: - uses: dbt-labs/actions/.github/workflows/jira-label-actions.yml@main - secrets: - JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} - JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} - JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} diff --git a/.github/workflows/jira-transition.yml b/.github/workflows/jira-transition.yml deleted file mode 100644 index 563d8197a8a..00000000000 --- a/.github/workflows/jira-transition.yml +++ /dev/null @@ -1,27 +0,0 @@ -# **what?** -# Transition a Jira issue to a new state -# Only supports these GitHub Issue transitions: -# closed, deleted, reopened - -# **why?** -# Jira needs to be kept up-to-date - -# **when?** -# On issue closing, deletion, reopened - -name: Jira Issue Transition - -on: - issues: - types: [closed, deleted, reopened] - -# no special access is needed -permissions: read-all - -jobs: - call-transition-action: - uses: dbt-labs/actions/.github/workflows/jira-transition-actions.yml@main - secrets: - JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} - JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} - JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 8c41ac5221c..b596bf7293f 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -47,10 +47,10 @@ jobs: steps: - name: Check out the repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.8' @@ -74,17 +74,17 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8", "3.9", "3.10", "3.11"] + python-version: [ "3.8", "3.9", "3.10", "3.11", "3.12" ] env: TOXENV: "unit" steps: - name: Check out the repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -95,8 +95,12 @@ jobs: python -m pip install tox tox --version - - name: Run tox - run: tox + - name: Run unit tests + uses: nick-fields/retry@v3 + with: + timeout_minutes: 10 + max_attempts: 3 + command: tox -e unit - name: Get current date if: always() @@ -107,9 +111,10 @@ jobs: - name: Upload Unit Test Coverage to Codecov if: ${{ matrix.python-version == '3.11' }} - uses: codecov/codecov-action@v3 - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + flags: unit integration-metadata: name: integration test metadata generation @@ -134,7 +139,7 @@ jobs: - name: generate include id: generate-include run: | - INCLUDE=('"python-version":"3.8","os":"windows-latest"' '"python-version":"3.8","os":"macos-latest"' ) + INCLUDE=('"python-version":"3.8","os":"windows-latest"' '"python-version":"3.8","os":"macos-12"' ) INCLUDE_GROUPS="[" for include in ${INCLUDE[@]}; do for group in $(seq 1 ${{ env.PYTHON_INTEGRATION_TEST_WORKERS }}); do @@ -156,7 +161,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8", "3.9", "3.10", "3.11"] + python-version: [ "3.8", "3.9", "3.10", "3.11", "3.12" ] os: [ubuntu-20.04] split-group: ${{ fromJson(needs.integration-metadata.outputs.split-groups) }} include: ${{ fromJson(needs.integration-metadata.outputs.include) }} @@ -174,10 +179,10 @@ jobs: steps: - name: Check out the repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -200,8 +205,12 @@ jobs: python -m pip install tox tox --version - - name: Run tests - run: tox -- --ddtrace + - name: Run integration tests + uses: nick-fields/retry@v3 + with: + timeout_minutes: 30 + max_attempts: 3 + command: tox -- --ddtrace env: PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }} @@ -212,26 +221,35 @@ jobs: CURRENT_DATE=$(date +'%Y-%m-%dT%H_%M_%S') # no colons allowed for artifacts echo "date=$CURRENT_DATE" >> $GITHUB_OUTPUT - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 if: always() with: - name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ steps.date.outputs.date }} + name: logs_${{ matrix.python-version }}_${{ matrix.os }}_${{ matrix.split-group }}_${{ steps.date.outputs.date }} path: ./logs - name: Upload Integration Test Coverage to Codecov if: ${{ matrix.python-version == '3.11' }} - uses: codecov/codecov-action@v3 - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + uses: codecov/codecov-action@v4 + with: + token: ${{ secrets.CODECOV_TOKEN }} + flags: integration integration-report: - name: integration test suite + if: ${{ always() }} + name: Integration Test Suite runs-on: ubuntu-latest needs: integration steps: - - name: "[Notification] Integration test suite passes" + - name: "Integration Tests Failed" + if: ${{ contains(needs.integration.result, 'failure') || contains(needs.integration.result, 'cancelled') }} + # when this is true the next step won't execute + run: | + echo "::notice title='Integration test suite failed'" + exit 1 + + - name: "Integration Tests Passed" run: | - echo "::notice title="Integration test suite passes"" + echo "::notice title='Integration test suite passed'" build: name: build packages @@ -240,10 +258,10 @@ jobs: steps: - name: Check out the repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: '3.8' @@ -278,7 +296,7 @@ jobs: - name: Install source distributions # ignore dbt-1.0.0, which intentionally raises an error when installed from source run: | - find ./dist/dbt-[a-z]*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/ + find ./dist/*.gz -maxdepth 1 -type f | xargs python -m pip install --force-reinstall --find-links=dist/ - name: Check source distributions run: | diff --git a/.github/workflows/model_performance.yml b/.github/workflows/model_performance.yml index 869fa0b2fc6..8d238ac574e 100644 --- a/.github/workflows/model_performance.yml +++ b/.github/workflows/model_performance.yml @@ -48,7 +48,7 @@ jobs: # explicitly checkout the performance runner from main regardless of which # version we are modeling. - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: main @@ -87,12 +87,12 @@ jobs: # explicitly checkout the performance runner from main regardless of which # version we are modeling. - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: main # attempts to access a previously cached runner - - uses: actions/cache@v3 + - uses: actions/cache@v4 id: cache with: path: ${{ env.RUNNER_CACHE_PATH }} @@ -148,7 +148,7 @@ jobs: echo "release_branch: ${{ needs.set-variables.outputs.release_branch }}" - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.8" @@ -160,13 +160,13 @@ jobs: # explicitly checkout main to get the latest project definitions - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: main # this was built in the previous job so it will be there. - name: Fetch Runner - uses: actions/cache@v3 + uses: actions/cache@v4 id: cache with: path: ${{ env.RUNNER_CACHE_PATH }} @@ -195,7 +195,7 @@ jobs: - name: '[DEBUG] ls baseline directory after run' run: ls -R performance/baselines/ - - uses: actions/upload-artifact@v3 + - uses: actions/upload-artifact@v4 with: name: baseline path: performance/baselines/${{ needs.set-variables.outputs.release_id }}/ @@ -225,7 +225,7 @@ jobs: echo "release_branch: ${{ needs.set-variables.outputs.release_branch }}" - name: Checkout - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ matrix.base-branch }} @@ -235,7 +235,7 @@ jobs: git push origin ${{ matrix.target-branch }} git branch --set-upstream-to=origin/${{ matrix.target-branch }} ${{ matrix.target-branch }} - - uses: actions/download-artifact@v3 + - uses: actions/download-artifact@v4 with: name: baseline path: performance/baselines/${{ needs.set-variables.outputs.release_id }} @@ -253,7 +253,7 @@ jobs: push: 'origin origin/${{ matrix.target-branch }}' - name: Create Pull Request - uses: peter-evans/create-pull-request@v5 + uses: peter-evans/create-pull-request@v6 with: author: 'Github Build Bot <buildbot@fishtownanalytics.com>' base: ${{ matrix.base-branch }} diff --git a/.github/workflows/nightly-release.yml b/.github/workflows/nightly-release.yml index f839c051c6c..d4f2e5bab15 100644 --- a/.github/workflows/nightly-release.yml +++ b/.github/workflows/nightly-release.yml @@ -20,6 +20,7 @@ on: permissions: contents: write # this is the permission that allows creating a new release + packages: write # this is the permission that allows Docker release defaults: run: @@ -33,22 +34,15 @@ jobs: runs-on: ubuntu-latest outputs: - commit_sha: ${{ steps.resolve-commit-sha.outputs.release_commit }} version_number: ${{ steps.nightly-release-version.outputs.number }} release_branch: ${{ steps.release-branch.outputs.name }} steps: - name: "Checkout ${{ github.repository }} Branch ${{ env.RELEASE_BRANCH }}" - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ env.RELEASE_BRANCH }} - - name: "Resolve Commit To Release" - id: resolve-commit-sha - run: | - commit_sha=$(git rev-parse HEAD) - echo "release_commit=$commit_sha" >> $GITHUB_OUTPUT - - name: "Get Current Version Number" id: version-number-sources run: | @@ -88,7 +82,6 @@ jobs: steps: - name: "[DEBUG] Log Outputs" run: | - echo commit_sha : ${{ needs.aggregate-release-data.outputs.commit_sha }} echo version_number: ${{ needs.aggregate-release-data.outputs.version_number }} echo release_branch: ${{ needs.aggregate-release-data.outputs.release_branch }} @@ -97,13 +90,8 @@ jobs: uses: ./.github/workflows/release.yml with: - sha: ${{ needs.aggregate-release-data.outputs.commit_sha }} target_branch: ${{ needs.aggregate-release-data.outputs.release_branch }} version_number: ${{ needs.aggregate-release-data.outputs.version_number }} - build_script_path: "scripts/build-dist.sh" - env_setup_script_path: "scripts/env-setup.sh" - s3_bucket_name: "core-team-artifacts" - package_test_command: "dbt --version" test_run: true nightly_release: true secrets: inherit diff --git a/.github/workflows/release-docker.yml b/.github/workflows/release-docker.yml deleted file mode 100644 index ae83eb5f8a3..00000000000 --- a/.github/workflows/release-docker.yml +++ /dev/null @@ -1,118 +0,0 @@ -# **what?** -# This workflow will generate a series of docker images for dbt and push them to the github container registry - -# **why?** -# Docker images for dbt are used in a number of important places throughout the dbt ecosystem. This is how we keep those images up-to-date. - -# **when?** -# This is triggered manually - -# **next steps** -# - build this into the release workflow (or conversly, break out the different release methods into their own workflow files) - -name: Docker release - -permissions: - packages: write - -on: - workflow_dispatch: - inputs: - package: - description: The package to release. _One_ of [dbt-core, dbt-redshift, dbt-bigquery, dbt-snowflake, dbt-spark, dbt-postgres] - required: true - version_number: - description: The release version number (i.e. 1.0.0b1). Do not include `latest` tags or a leading `v`! - required: true - -jobs: - get_version_meta: - name: Get version meta - runs-on: ubuntu-latest - outputs: - major: ${{ steps.version.outputs.major }} - minor: ${{ steps.version.outputs.minor }} - patch: ${{ steps.version.outputs.patch }} - latest: ${{ steps.latest.outputs.latest }} - minor_latest: ${{ steps.latest.outputs.minor_latest }} - steps: - - uses: actions/checkout@v3 - - name: Split version - id: version - run: | - IFS="." read -r MAJOR MINOR PATCH <<< ${{ github.event.inputs.version_number }} - echo "major=$MAJOR" >> $GITHUB_OUTPUT - echo "minor=$MINOR" >> $GITHUB_OUTPUT - echo "patch=$PATCH" >> $GITHUB_OUTPUT - - - name: Is pkg 'latest' - id: latest - uses: ./.github/actions/latest-wrangler - with: - package: ${{ github.event.inputs.package }} - new_version: ${{ github.event.inputs.version_number }} - gh_token: ${{ secrets.GITHUB_TOKEN }} - halt_on_missing: False - - setup_image_builder: - name: Set up docker image builder - runs-on: ubuntu-latest - needs: [get_version_meta] - steps: - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 - - build_and_push: - name: Build images and push to GHCR - runs-on: ubuntu-latest - needs: [setup_image_builder, get_version_meta] - steps: - - name: Get docker build arg - id: build_arg - run: | - BUILD_ARG_NAME=$(echo ${{ github.event.inputs.package }} | sed 's/\-/_/g') - BUILD_ARG_VALUE=$(echo ${{ github.event.inputs.package }} | sed 's/postgres/core/g') - echo "build_arg_name=$BUILD_ARG_NAME" >> $GITHUB_OUTPUT - echo "build_arg_value=$BUILD_ARG_VALUE" >> $GITHUB_OUTPUT - - - name: Log in to the GHCR - uses: docker/login-action@v2 - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - - - name: Build and push MAJOR.MINOR.PATCH tag - uses: docker/build-push-action@v4 - with: - file: docker/Dockerfile - push: True - target: ${{ github.event.inputs.package }} - build-args: | - ${{ steps.build_arg.outputs.build_arg_name }}_ref=${{ steps.build_arg.outputs.build_arg_value }}@v${{ github.event.inputs.version_number }} - tags: | - ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ github.event.inputs.version_number }} - - - name: Build and push MINOR.latest tag - uses: docker/build-push-action@v4 - if: ${{ needs.get_version_meta.outputs.minor_latest == 'True' }} - with: - file: docker/Dockerfile - push: True - target: ${{ github.event.inputs.package }} - build-args: | - ${{ steps.build_arg.outputs.build_arg_name }}_ref=${{ steps.build_arg.outputs.build_arg_value }}@v${{ github.event.inputs.version_number }} - tags: | - ghcr.io/dbt-labs/${{ github.event.inputs.package }}:${{ needs.get_version_meta.outputs.major }}.${{ needs.get_version_meta.outputs.minor }}.latest - - - name: Build and push latest tag - uses: docker/build-push-action@v4 - if: ${{ needs.get_version_meta.outputs.latest == 'True' }} - with: - file: docker/Dockerfile - push: True - target: ${{ github.event.inputs.package }} - build-args: | - ${{ steps.build_arg.outputs.build_arg_name }}_ref=${{ steps.build_arg.outputs.build_arg_value }}@v${{ github.event.inputs.version_number }} - tags: | - ghcr.io/dbt-labs/${{ github.event.inputs.package }}:latest diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e3722ea19cd..63d963f0551 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,6 +7,7 @@ # - run unit and integration tests against given commit; # - build and package that SHA; # - release it to GitHub and PyPI with that specific build; +# - release it to Docker # # **why?** # Ensure an automated and tested release process @@ -14,15 +15,12 @@ # **when?** # This workflow can be run manually on demand or can be called by other workflows -name: Release to GitHub and PyPI +name: "Release to GitHub, PyPI & Docker" +run-name: "Release ${{ inputs.version_number }} to GitHub, PyPI & Docker" on: workflow_dispatch: inputs: - sha: - description: "The last commit sha in the release" - type: string - required: true target_branch: description: "The branch to release from" type: string @@ -31,26 +29,6 @@ on: description: "The release version number (i.e. 1.0.0b1)" type: string required: true - build_script_path: - description: "Build script path" - type: string - default: "scripts/build-dist.sh" - required: true - env_setup_script_path: - description: "Environment setup script path" - type: string - default: "scripts/env-setup.sh" - required: false - s3_bucket_name: - description: "AWS S3 bucket name" - type: string - default: "core-team-artifacts" - required: true - package_test_command: - description: "Package test command" - type: string - default: "dbt --version" - required: true test_run: description: "Test run (Publish release as draft)" type: boolean @@ -61,12 +39,13 @@ on: type: boolean default: false required: false + only_docker: + description: "Only release Docker image, skip GitHub & PyPI" + type: boolean + default: false + required: false workflow_call: inputs: - sha: - description: "The last commit sha in the release" - type: string - required: true target_branch: description: "The branch to release from" type: string @@ -75,26 +54,6 @@ on: description: "The release version number (i.e. 1.0.0b1)" type: string required: true - build_script_path: - description: "Build script path" - type: string - default: "scripts/build-dist.sh" - required: true - env_setup_script_path: - description: "Environment setup script path" - type: string - default: "scripts/env-setup.sh" - required: false - s3_bucket_name: - description: "AWS S3 bucket name" - type: string - default: "core-team-artifacts" - required: true - package_test_command: - description: "Package test command" - type: string - default: "dbt --version" - required: true test_run: description: "Test run (Publish release as draft)" type: boolean @@ -114,32 +73,47 @@ defaults: shell: bash jobs: - log-inputs: + job-setup: name: Log Inputs runs-on: ubuntu-latest + outputs: + starting_sha: ${{ steps.set_sha.outputs.starting_sha }} steps: - name: "[DEBUG] Print Variables" run: | - echo The last commit sha in the release: ${{ inputs.sha }} + echo Inputs echo The branch to release from: ${{ inputs.target_branch }} echo The release version number: ${{ inputs.version_number }} - echo Build script path: ${{ inputs.build_script_path }} - echo Environment setup script path: ${{ inputs.env_setup_script_path }} - echo AWS S3 bucket name: ${{ inputs.s3_bucket_name }} - echo Package test command: ${{ inputs.package_test_command }} echo Test run: ${{ inputs.test_run }} echo Nightly release: ${{ inputs.nightly_release }} + echo Only Docker: ${{ inputs.only_docker }} + + - name: "Checkout target branch" + uses: actions/checkout@v4 + with: + ref: ${{ inputs.target_branch }} + + # release-prep.yml really shouldn't take in the sha but since core + all adapters + # depend on it now this workaround lets us not input it manually with risk of error. + # The changes always get merged into the head so we can't use a specific commit for + # releases anyways. + - name: "Capture sha" + id: set_sha + run: | + echo "starting_sha=$(git rev-parse HEAD)" >> $GITHUB_OUTPUT bump-version-generate-changelog: name: Bump package version, Generate changelog + needs: [job-setup] + if: ${{ !inputs.only_docker }} uses: dbt-labs/dbt-release/.github/workflows/release-prep.yml@main with: - sha: ${{ inputs.sha }} + sha: ${{ needs.job-setup.outputs.starting_sha }} version_number: ${{ inputs.version_number }} target_branch: ${{ inputs.target_branch }} - env_setup_script_path: ${{ inputs.env_setup_script_path }} + env_setup_script_path: "scripts/env-setup.sh" test_run: ${{ inputs.test_run }} nightly_release: ${{ inputs.nightly_release }} @@ -147,7 +121,7 @@ jobs: log-outputs-bump-version-generate-changelog: name: "[Log output] Bump package version, Generate changelog" - if: ${{ !failure() && !cancelled() }} + if: ${{ !failure() && !cancelled() && !inputs.only_docker }} needs: [bump-version-generate-changelog] @@ -161,8 +135,8 @@ jobs: build-test-package: name: Build, Test, Package - if: ${{ !failure() && !cancelled() }} - needs: [bump-version-generate-changelog] + if: ${{ !failure() && !cancelled() && !inputs.only_docker }} + needs: [job-setup, bump-version-generate-changelog] uses: dbt-labs/dbt-release/.github/workflows/build.yml@main @@ -170,9 +144,9 @@ jobs: sha: ${{ needs.bump-version-generate-changelog.outputs.final_sha }} version_number: ${{ inputs.version_number }} changelog_path: ${{ needs.bump-version-generate-changelog.outputs.changelog_path }} - build_script_path: ${{ inputs.build_script_path }} - s3_bucket_name: ${{ inputs.s3_bucket_name }} - package_test_command: ${{ inputs.package_test_command }} + build_script_path: "scripts/build-dist.sh" + s3_bucket_name: "core-team-artifacts" + package_test_command: "dbt --version" test_run: ${{ inputs.test_run }} nightly_release: ${{ inputs.nightly_release }} @@ -182,7 +156,7 @@ jobs: github-release: name: GitHub Release - if: ${{ !failure() && !cancelled() }} + if: ${{ !failure() && !cancelled() && !inputs.only_docker }} needs: [bump-version-generate-changelog, build-test-package] @@ -209,6 +183,51 @@ jobs: PYPI_API_TOKEN: ${{ secrets.PYPI_API_TOKEN }} TEST_PYPI_API_TOKEN: ${{ secrets.TEST_PYPI_API_TOKEN }} + determine-docker-package: + # dbt-postgres exists within dbt-core for versions 1.7 and earlier but is a separate package for 1.8 and later. + # determine if we need to release dbt-core or both dbt-core and dbt-postgres + name: Determine Docker Package + if: ${{ !failure() && !cancelled() }} + runs-on: ubuntu-latest + needs: [pypi-release] + outputs: + matrix: ${{ steps.determine-docker-package.outputs.matrix }} + steps: + - name: "Audit Version And Parse Into Parts" + id: semver + uses: dbt-labs/actions/parse-semver@v1.1.0 + with: + version: ${{ inputs.version_number }} + + - name: "Determine Packages to Release" + id: determine-docker-package + run: | + if [ ${{ steps.semver.outputs.minor }} -ge 8 ]; then + json_output={\"package\":[\"dbt-core\"]} + else + json_output={\"package\":[\"dbt-core\",\"dbt-postgres\"]} + fi + echo "matrix=$json_output" >> $GITHUB_OUTPUT + + docker-release: + name: "Docker Release for ${{ matrix.package }}" + needs: [determine-docker-package] + # We cannot release to docker on a test run because it uses the tag in GitHub as + # what we need to release but draft releases don't actually tag the commit so it + # finds nothing to release + if: ${{ !failure() && !cancelled() && (!inputs.test_run || inputs.only_docker) }} + strategy: + matrix: ${{fromJson(needs.determine-docker-package.outputs.matrix)}} + + permissions: + packages: write + + uses: dbt-labs/dbt-release/.github/workflows/release-docker.yml@main + with: + package: ${{ matrix.package }} + version_number: ${{ inputs.version_number }} + test_run: ${{ inputs.test_run }} + slack-notification: name: Slack Notification if: ${{ failure() && (!inputs.test_run || inputs.nightly_release) }} @@ -219,6 +238,7 @@ jobs: build-test-package, github-release, pypi-release, + docker-release, ] uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main @@ -227,3 +247,24 @@ jobs: secrets: SLACK_WEBHOOK_URL: ${{ secrets.SLACK_DEV_CORE_ALERTS }} + + testing-slack-notification: + # sends notifications to #slackbot-test + name: Testing - Slack Notification + if: ${{ failure() && inputs.test_run && !inputs.nightly_release }} + + needs: + [ + bump-version-generate-changelog, + build-test-package, + github-release, + pypi-release, + docker-release, + ] + + uses: dbt-labs/dbt-release/.github/workflows/slack-post-notification.yml@main + with: + status: "failure" + + secrets: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_TESTING_WEBHOOK_URL }} diff --git a/.github/workflows/repository-cleanup.yml b/.github/workflows/repository-cleanup.yml new file mode 100644 index 00000000000..c1d78028102 --- /dev/null +++ b/.github/workflows/repository-cleanup.yml @@ -0,0 +1,30 @@ +# **what?** +# Cleanup branches left over from automation and testing. Also cleanup +# draft releases from release testing. + +# **why?** +# The automations are leaving behind branches and releases that clutter +# the repository. Sometimes we need them to debug processes so we don't +# want them immediately deleted. Running on Saturday to avoid running +# at the same time as an actual release to prevent breaking a release +# mid-release. + +# **when?** +# Mainly on a schedule of 12:00 Saturday. +# Manual trigger can also run on demand + +name: Repository Cleanup + +on: + schedule: + - cron: '0 12 * * SAT' # At 12:00 on Saturday - details in `why` above + + workflow_dispatch: # for manual triggering + +permissions: + contents: write + +jobs: + cleanup-repo: + uses: dbt-labs/actions/.github/workflows/repository-cleanup.yml@main + secrets: inherit diff --git a/.github/workflows/schema-check.yml b/.github/workflows/schema-check.yml index 9577401e3e6..4cb8fce50c8 100644 --- a/.github/workflows/schema-check.yml +++ b/.github/workflows/schema-check.yml @@ -13,48 +13,63 @@ name: Artifact Schema Check on: + pull_request: + types: [ opened, reopened, labeled, unlabeled, synchronize ] + paths-ignore: [ '.changes/**', '.github/**', 'tests/**', '**.md', '**.yml' ] + workflow_dispatch: - pull_request: #TODO: remove before merging - push: - branches: - - "develop" - - "*.latest" - - "releases/*" # no special access is needed permissions: read-all env: LATEST_SCHEMA_PATH: ${{ github.workspace }}/new_schemas - SCHEMA_DIFF_ARTIFACT: ${{ github.workspace }}//schema_schanges.txt + SCHEMA_DIFF_ARTIFACT: ${{ github.workspace }}/schema_changes.txt DBT_REPO_DIRECTORY: ${{ github.workspace }}/dbt SCHEMA_REPO_DIRECTORY: ${{ github.workspace }}/schemas.getdbt.com jobs: checking-schemas: - name: "Checking schemas" + name: "Post-merge schema changes required" runs-on: ubuntu-latest steps: - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.8 - name: Checkout dbt repo - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: path: ${{ env.DBT_REPO_DIRECTORY }} + - name: Check for changes in core/dbt/artifacts + # https://github.com/marketplace/actions/paths-changes-filter + uses: dorny/paths-filter@v3 + id: check_artifact_changes + with: + filters: | + artifacts_changed: + - 'core/dbt/artifacts/**' + list-files: shell + working-directory: ${{ env.DBT_REPO_DIRECTORY }} + + - name: Succeed if no artifacts have changed + if: steps.check_artifact_changes.outputs.artifacts_changed == 'false' + run: | + echo "No artifact changes found in core/dbt/artifacts. CI check passed." + - name: Checkout schemas.getdbt.com repo - uses: actions/checkout@v3 + if: steps.check_artifact_changes.outputs.artifacts_changed == 'true' + uses: actions/checkout@v4 with: repository: dbt-labs/schemas.getdbt.com ref: 'main' - ssh-key: ${{ secrets.SCHEMA_SSH_PRIVATE_KEY }} path: ${{ env.SCHEMA_REPO_DIRECTORY }} - name: Generate current schema + if: steps.check_artifact_changes.outputs.artifacts_changed == 'true' run: | cd ${{ env.DBT_REPO_DIRECTORY }} python3 -m venv env @@ -65,26 +80,17 @@ jobs: # Copy generated schema files into the schemas.getdbt.com repo # Do a git diff to find any changes - # Ignore any date or version changes though + # Ignore any lines with date-like (yyyy-mm-dd) or version-like (x.y.z) changes - name: Compare schemas + if: steps.check_artifact_changes.outputs.artifacts_changed == 'true' run: | cp -r ${{ env.LATEST_SCHEMA_PATH }}/dbt ${{ env.SCHEMA_REPO_DIRECTORY }} cd ${{ env.SCHEMA_REPO_DIRECTORY }} - diff_results=$(git diff -I='*[0-9]{4}-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])T' \ - -I='*[0-9]{1}.[0-9]{2}.[0-9]{1}(rc[0-9]|b[0-9]| )' --compact-summary) - if [[ $(echo diff_results) ]]; then - echo $diff_results - echo "Schema changes detected!" - git diff -I='*[0-9]{4}-(0[1-9]|1[0-2])-(0[1-9]|[1-2][0-9]|3[0-1])T' \ - -I='*[0-9]{1}.[0-9]{2}.[0-9]{1}(rc[0-9]|b[0-9]| )' > ${{ env.SCHEMA_DIFF_ARTIFACT }} - exit 1 - else - echo "No schema changes detected" - fi + git diff -I='*[0-9]{4}-[0-9]{2}-[0-9]{2}' -I='*[0-9]+\.[0-9]+\.[0-9]+' --exit-code > ${{ env.SCHEMA_DIFF_ARTIFACT }} - name: Upload schema diff - uses: actions/upload-artifact@v3 - if: ${{ failure() }} + uses: actions/upload-artifact@v4 + if: ${{ failure() && steps.check_artifact_changes.outputs.artifacts_changed == 'true' }} with: - name: 'schema_schanges.txt' + name: 'schema_changes.txt' path: '${{ env.SCHEMA_DIFF_ARTIFACT }}' diff --git a/.github/workflows/structured-logging-schema-check.yml b/.github/workflows/structured-logging-schema-check.yml index 745d0a1619f..4934bffcaeb 100644 --- a/.github/workflows/structured-logging-schema-check.yml +++ b/.github/workflows/structured-logging-schema-check.yml @@ -69,12 +69,12 @@ jobs: steps: - name: checkout dev - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: persist-credentials: false - name: Setup Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "3.8" @@ -94,7 +94,11 @@ jobs: # integration tests generate a ton of logs in different files. the next step will find them all. # we actually care if these pass, because the normal test run doesn't usually include many json log outputs - name: Run integration tests - run: tox -e integration -- -nauto + uses: nick-fields/retry@v3 + with: + timeout_minutes: 30 + max_attempts: 3 + command: tox -e integration -- -nauto env: PYTEST_ADDOPTS: ${{ format('--splits {0} --group {1}', env.PYTHON_INTEGRATION_TEST_WORKERS, matrix.split-group) }} diff --git a/.github/workflows/test-repeater.yml b/.github/workflows/test-repeater.yml index e414792140c..315133336e8 100644 --- a/.github/workflows/test-repeater.yml +++ b/.github/workflows/test-repeater.yml @@ -36,7 +36,7 @@ on: type: choice options: - 'ubuntu-latest' - - 'macos-latest' + - 'macos-12' - 'windows-latest' num_runs_per_batch: description: 'Max number of times to run the test per batch. We always run 10 batches.' @@ -83,12 +83,12 @@ jobs: steps: - name: "Checkout code" - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ inputs.branch }} - name: "Setup Python" - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: "${{ inputs.python_version }}" @@ -101,7 +101,7 @@ jobs: # mac and windows don't use make due to limitations with docker with those runners in GitHub - name: "Set up postgres (macos)" - if: inputs.os == 'macos-latest' + if: inputs.os == 'macos-12' uses: ./.github/actions/setup-postgres-macos - name: "Set up postgres (windows)" diff --git a/.github/workflows/test/.actrc b/.github/workflows/test/.actrc deleted file mode 100644 index 027d95f14ff..00000000000 --- a/.github/workflows/test/.actrc +++ /dev/null @@ -1 +0,0 @@ --P ubuntu-latest=ghcr.io/catthehacker/ubuntu:act-latest diff --git a/.github/workflows/test/.gitignore b/.github/workflows/test/.gitignore deleted file mode 100644 index 1233aaed111..00000000000 --- a/.github/workflows/test/.gitignore +++ /dev/null @@ -1 +0,0 @@ -.secrets diff --git a/.github/workflows/test/.secrets.EXAMPLE b/.github/workflows/test/.secrets.EXAMPLE deleted file mode 100644 index 9b3e0acc9c1..00000000000 --- a/.github/workflows/test/.secrets.EXAMPLE +++ /dev/null @@ -1 +0,0 @@ -GITHUB_TOKEN=GH_PERSONAL_ACCESS_TOKEN_GOES_HERE diff --git a/.github/workflows/test/inputs/release_docker.json b/.github/workflows/test/inputs/release_docker.json deleted file mode 100644 index f5bbcb176ba..00000000000 --- a/.github/workflows/test/inputs/release_docker.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "inputs": { - "version_number": "1.0.1", - "package": "dbt-postgres" - } -} diff --git a/.github/workflows/version-bump.yml b/.github/workflows/version-bump.yml deleted file mode 100644 index bde34d68321..00000000000 --- a/.github/workflows/version-bump.yml +++ /dev/null @@ -1,28 +0,0 @@ -# **what?** -# This workflow will take the new version number to bump to. With that -# it will run versionbump to update the version number everywhere in the -# code base and then run changie to create the corresponding changelog. -# A PR will be created with the changes that can be reviewed before committing. - -# **why?** -# This is to aid in releasing dbt and making sure we have updated -# the version in all places and generated the changelog. - -# **when?** -# This is triggered manually - -name: Version Bump - -on: - workflow_dispatch: - inputs: - version_number: - description: 'The version number to bump to (ex. 1.2.0, 1.3.0b1)' - required: true - -jobs: - version_bump_and_changie: - uses: dbt-labs/actions/.github/workflows/version-bump.yml@main - with: - version_number: ${{ inputs.version_number }} - secrets: inherit # ok since what we are calling is internally maintained diff --git a/.isort.cfg b/.isort.cfg new file mode 100644 index 00000000000..a260577deaa --- /dev/null +++ b/.isort.cfg @@ -0,0 +1,4 @@ +[settings] +profile=black +extend_skip_glob=.github/*,third-party-stubs/*,scripts/* +known_first_party=dbt,dbt_adapters,dbt_common,dbt_extractor,dbt_semantic_interfaces diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b41cac4ae5b..a30d2f5be4d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,7 @@ # Configuration for pre-commit hooks (see https://pre-commit.com/). # Eventually the hooks described here will be run as tests before merging each PR. -exclude: ^(core/dbt/docs/build/|core/dbt/events/types_pb2.py) +exclude: ^(core/dbt/docs/build/|core/dbt/common/events/types_pb2.py|core/dbt/events/core_types_pb2.py|core/dbt/adapters/events/adapter_types_pb2.py) # Force all unspecified python hooks to run python 3.8 default_language_version: @@ -15,12 +15,19 @@ repos: args: [--unsafe] - id: check-json - id: end-of-file-fixer + exclude: schemas/dbt/manifest/ - id: trailing-whitespace exclude_types: - "markdown" - id: check-case-conflict +- repo: https://github.com/pycqa/isort + # rev must match what's in dev-requirements.txt + rev: 5.13.2 + hooks: + - id: isort - repo: https://github.com/psf/black - rev: 22.3.0 + # rev must match what's in dev-requirements.txt + rev: 24.3.0 hooks: - id: black - id: black @@ -30,6 +37,7 @@ repos: - "--check" - "--diff" - repo: https://github.com/pycqa/flake8 + # rev must match what's in dev-requirements.txt rev: 4.0.1 hooks: - id: flake8 @@ -37,6 +45,7 @@ repos: alias: flake8-check stages: [manual] - repo: https://github.com/pre-commit/mirrors-mypy + # rev must match what's in dev-requirements.txt rev: v1.4.1 hooks: - id: mypy diff --git a/ARCHITECTURE.md b/ARCHITECTURE.md index e689d32f2ee..f24e573e850 100644 --- a/ARCHITECTURE.md +++ b/ARCHITECTURE.md @@ -26,12 +26,13 @@ Legacy tests are found in the 'test' directory: The "tasks" map to top-level dbt commands. So `dbt run` => task.run.RunTask, etc. Some are more like abstract base classes (GraphRunnableTask, for example) but all the concrete types outside of task should map to tasks. Currently one executes at a time. The tasks kick off their “Runners” and those do execute in parallel. The parallelism is managed via a thread pool, in GraphRunnableTask. -core/dbt/include/index.html +core/dbt/task/docs/index.html This is the docs website code. It comes from the dbt-docs repository, and is generated when a release is packaged. ## Adapters -dbt uses an adapter-plugin pattern to extend support to different databases, warehouses, query engines, etc. For testing and development purposes, the dbt-postgres plugin lives alongside the dbt-core codebase, in the [`plugins`](plugins) subdirectory. Like other adapter plugins, it is a self-contained codebase and package that builds on top of dbt-core. +dbt uses an adapter-plugin pattern to extend support to different databases, warehouses, query engines, etc. +Note: dbt-postgres used to exist in dbt-core but is now in [its own repo](https://github.com/dbt-labs/dbt-postgres) Each adapter is a mix of python, Jinja2, and SQL. The adapter code also makes heavy use of Jinja2 to wrap modular chunks of SQL functionality, define default implementations, and allow plugins to override it. diff --git a/CHANGELOG.md b/CHANGELOG.md index 5331600fca4..900460abb2e 100755 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,6 @@ # dbt Core Changelog -- This file provides a full account of all changes to `dbt-core` and `dbt-postgres` +- This file provides a full account of all changes to `dbt-core` - Changes are listed under the (pre)release in which they first appear. Subsequent releases include changes from previous releases. - "Breaking changes" listed under a version may require action from end users or external maintainers when upgrading to that version. - Do not edit this file directly. This file is auto-generated using [changie](https://github.com/miniscruff/changie). For details on how to document a change, see [the contributing guide](https://github.com/dbt-labs/dbt-core/blob/main/CONTRIBUTING.md#adding-changelog-entry) @@ -10,6 +10,8 @@ For information on prior major and minor releases, see their changelogs: +* [1.8](https://github.com/dbt-labs/dbt-core/blob/1.8.latest/CHANGELOG.md) +* [1.7](https://github.com/dbt-labs/dbt-core/blob/1.7.latest/CHANGELOG.md) * [1.6](https://github.com/dbt-labs/dbt-core/blob/1.6.latest/CHANGELOG.md) * [1.5](https://github.com/dbt-labs/dbt-core/blob/1.5.latest/CHANGELOG.md) * [1.4](https://github.com/dbt-labs/dbt-core/blob/1.4.latest/CHANGELOG.md) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 7b4c215db15..eafd4606b9f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -10,6 +10,7 @@ 6. [Debugging](#debugging) 7. [Adding or modifying a changelog entry](#adding-or-modifying-a-changelog-entry) 8. [Submitting a Pull Request](#submitting-a-pull-request) +9. [Troubleshooting Tips](#troubleshooting-tips) ## About this document @@ -21,10 +22,10 @@ If you get stuck, we're happy to help! Drop us a line in the `#dbt-core-developm ### Notes -- **Adapters:** Is your issue or proposed code change related to a specific [database adapter](https://docs.getdbt.com/docs/available-adapters)? If so, please open issues, PRs, and discussions in that adapter's repository instead. The sole exception is Postgres; the `dbt-postgres` plugin lives in this repository (`dbt-core`). +- **Adapters:** Is your issue or proposed code change related to a specific [database adapter](https://docs.getdbt.com/docs/available-adapters)? If so, please open issues, PRs, and discussions in that adapter's repository instead. - **CLA:** Please note that anyone contributing code to `dbt-core` must sign the [Contributor License Agreement](https://docs.getdbt.com/docs/contributor-license-agreements). If you are unable to sign the CLA, the `dbt-core` maintainers will unfortunately be unable to merge any of your Pull Requests. We welcome you to participate in discussions, open issues, and comment on existing ones. - **Branches:** All pull requests from community contributors should target the `main` branch (default). If the change is needed as a patch for a minor version of dbt that has already been released (or is already a release candidate), a maintainer will backport the changes in your PR to the relevant "latest" release branch (`1.0.latest`, `1.1.latest`, ...). If an issue fix applies to a release branch, that fix should be first committed to the development branch and then to the release branch (rarely release-branch fixes may not apply to `main`). -- **Releases**: Before releasing a new minor version of Core, we prepare a series of alphas and release candidates to allow users (especially employees of dbt Labs!) to test the new version in live environments. This is an important quality assurance step, as it exposes the new code to a wide variety of complicated deployments and can surface bugs before official release. Releases are accessible via pip, homebrew, and dbt Cloud. +- **Releases**: Before releasing a new minor version of Core, we prepare a series of alphas and release candidates to allow users (especially employees of dbt Labs!) to test the new version in live environments. This is an important quality assurance step, as it exposes the new code to a wide variety of complicated deployments and can surface bugs before official release. Releases are accessible via our [supported installation methods](https://docs.getdbt.com/docs/core/installation-overview#install-dbt-core). ## Getting the code @@ -44,9 +45,7 @@ If you are not a member of the `dbt-labs` GitHub organization, you can contribut ### dbt Labs contributors -If you are a member of the `dbt-labs` GitHub organization, you will have push access to the `dbt-core` repo. Rather than forking `dbt-core` to make your changes, just clone the repository, check out a new branch, and push directly to that branch. Branch names should be fixed by `CT-XXX/` where: -* CT stands for 'core team' -* XXX stands for a JIRA ticket number +If you are a member of the `dbt-labs` GitHub organization, you will have push access to the `dbt-core` repo. Rather than forking `dbt-core` to make your changes, just clone the repository, check out a new branch, and push directly to that branch. ## Setting up an environment @@ -171,9 +170,9 @@ Finally, you can also run a specific test or group of tests using [`pytest`](htt ```sh # run all unit tests in a file -python3 -m pytest tests/unit/test_graph.py +python3 -m pytest tests/unit/test_invocation_id.py # run a specific unit test -python3 -m pytest tests/unit/test_graph.py::GraphTest::test__dependency_list +python3 -m pytest tests/unit/test_invocation_id.py::TestInvocationId::test_invocation_id # run specific Postgres functional tests python3 -m pytest tests/functional/sources ``` @@ -221,10 +220,12 @@ You don't need to worry about which `dbt-core` version your change will go into. ## Submitting a Pull Request -Code can be merged into the current development branch `main` by opening a pull request. A `dbt-core` maintainer will review your PR. They may suggest code revision for style or clarity, or request that you add unit or integration test(s). These are good things! We believe that, with a little bit of help, anyone can contribute high-quality code. +Code can be merged into the current development branch `main` by opening a pull request. If the proposal looks like it's on the right track, then a `dbt-core` maintainer will triage the PR and label it as `ready_for_review`. From this point, two code reviewers will be assigned with the aim of responding to any updates to the PR within about one week. They may suggest code revision for style or clarity, or request that you add unit or integration test(s). These are good things! We believe that, with a little bit of help, anyone can contribute high-quality code. Once merged, your contribution will be available for the next release of `dbt-core`. Automated tests run via GitHub Actions. If you're a first-time contributor, all tests (including code checks and unit tests) will require a maintainer to approve. Changes in the `dbt-core` repository trigger integration tests against Postgres. dbt Labs also provides CI environments in which to test changes to other adapters, triggered by PRs in those adapters' repositories, as well as periodic maintenance checks of each adapter in concert with the latest `dbt-core` code changes. Once all tests are passing and your PR has been approved, a `dbt-core` maintainer will merge your changes into the active development branch. And that's it! Happy developing :tada: +## Troubleshooting Tips + Sometimes, the content license agreement auto-check bot doesn't find a user's entry in its roster. If you need to force a rerun, add `@cla-bot check` in a comment on the pull request. diff --git a/Makefile b/Makefile index 9c276902194..eac08719ed6 100644 --- a/Makefile +++ b/Makefile @@ -30,17 +30,22 @@ CI_FLAGS =\ .PHONY: dev_req dev_req: ## Installs dbt-* packages in develop mode along with only development dependencies. @\ - pip install -r dev-requirements.txt - pip install -r editable-requirements.txt + pip install -r dev-requirements.txt -r editable-requirements.txt .PHONY: dev dev: dev_req ## Installs dbt-* packages in develop mode along with development dependencies and pre-commit. @\ pre-commit install -.PHONY: proto_types -proto_types: ## generates google protobuf python file from types.proto - protoc -I=./core/dbt/events --python_out=./core/dbt/events ./core/dbt/events/types.proto +.PHONY: dev-uninstall +dev-uninstall: ## Uninstall all packages in venv except for build tools + @\ + pip freeze | grep -v "^-e" | cut -d "@" -f1 | xargs pip uninstall -y; \ + pip uninstall -y dbt-core + +.PHONY: core_proto_types +core_proto_types: ## generates google protobuf python file from core_types.proto + protoc -I=./core/dbt/events --python_out=./core/dbt/events ./core/dbt/events/core_types.proto .PHONY: mypy mypy: .env ## Runs mypy against staged changes for static type checking. @@ -77,12 +82,12 @@ test: .env ## Runs unit tests with py and code checks against staged changes. $(DOCKER_CMD) pre-commit run mypy-check --hook-stage manual | grep -v "INFO" .PHONY: integration -integration: .env ## Runs postgres integration tests with py-integration +integration: .env ## Runs core integration tests using postgres with py-integration @\ $(CI_FLAGS) $(DOCKER_CMD) tox -e py-integration -- -nauto .PHONY: integration-fail-fast -integration-fail-fast: .env ## Runs postgres integration tests with py-integration in "fail fast" mode. +integration-fail-fast: .env ## Runs core integration tests using postgres with py-integration in "fail fast" mode. @\ $(DOCKER_CMD) tox -e py-integration -- -x -nauto @@ -139,3 +144,7 @@ help: ## Show this help message. @echo @echo 'options:' @echo 'use USE_DOCKER=true to run target in a docker container' + +.PHONY: json_schema +json_schema: ## Update generated JSON schema using code changes. + scripts/collect-artifact-schema.py --path schemas diff --git a/README.md b/README.md index cd56702b4a7..a664b38c767 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ These select statements, or "models", form a dbt project. Models frequently buil ## Getting started -- [Install dbt](https://docs.getdbt.com/docs/get-started/installation) +- [Install dbt Core](https://docs.getdbt.com/docs/get-started/installation) or explore the [dbt Cloud CLI](https://docs.getdbt.com/docs/cloud/cloud-cli-installation), a command-line interface powered by [dbt Cloud](https://docs.getdbt.com/docs/cloud/about-cloud/dbt-cloud-features) that enhances collaboration. - Read the [introduction](https://docs.getdbt.com/docs/introduction/) and [viewpoint](https://docs.getdbt.com/docs/about/viewpoint/) ## Join the dbt Community @@ -31,7 +31,7 @@ These select statements, or "models", form a dbt project. Models frequently buil ## Reporting bugs and contributing code -- Want to report a bug or request a feature? Let us know on [Slack](http://community.getdbt.com/), or open [an issue](https://github.com/dbt-labs/dbt-core/issues/new) +- Want to report a bug or request a feature? Let us know and open [an issue](https://github.com/dbt-labs/dbt-core/issues/new/choose) - Want to help us build dbt? Check out the [Contributing Guide](https://github.com/dbt-labs/dbt-core/blob/HEAD/CONTRIBUTING.md) ## Code of Conduct diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 00000000000..b856030a1f6 --- /dev/null +++ b/SECURITY.md @@ -0,0 +1 @@ +[About dbt Core versions](https://docs.getdbt.com/docs/dbt-versions/core) diff --git a/codecov.yml b/codecov.yml index e69de29bb2d..47ed944cf21 100644 --- a/codecov.yml +++ b/codecov.yml @@ -0,0 +1,39 @@ +ignore: + - ".github" + - ".changes" + +coverage: + status: + project: + default: + target: auto + threshold: 0.1% # Reduce noise by ignoring rounding errors in coverage drops + patch: + default: + target: auto + threshold: 80% + +comment: + layout: "header, diff, flags, components" # show component info in the PR comment + +component_management: + default_rules: # default rules that will be inherited by all components + statuses: + - type: project # in this case every component that doens't have a status defined will have a project type one + target: auto + threshold: 0.1% + - type: patch + target: 80% + individual_components: + - component_id: unittests + name: "Unit Tests" + flag_regexes: + - "unit" + statuses: + - type: patch + target: 80% + threshold: 5% + - component_id: integrationtests + name: "Integration Tests" + flag_regexes: + - "integration" diff --git a/core/MANIFEST.in b/core/MANIFEST.in index ba02c346851..2fe0583d285 100644 --- a/core/MANIFEST.in +++ b/core/MANIFEST.in @@ -1,2 +1,3 @@ recursive-include dbt/include *.py *.sql *.yml *.html *.md .gitkeep .gitignore include dbt/py.typed +recursive-include dbt/task/docs *.html diff --git a/core/dbt/README.md b/core/dbt/README.md index 79123a95f47..6b545ceb888 100644 --- a/core/dbt/README.md +++ b/core/dbt/README.md @@ -22,8 +22,6 @@ ### links.py -### logger.py - ### main.py ### node_types.py diff --git a/core/dbt/adapters/README.md b/core/dbt/adapters/README.md deleted file mode 100644 index 20ae9e7a56a..00000000000 --- a/core/dbt/adapters/README.md +++ /dev/null @@ -1,30 +0,0 @@ -# Adapters README - -The Adapters module is responsible for defining database connection methods, caching information from databases, how relations are defined, and the two major connection types we have - base and sql. - -# Directories - -## `base` - -Defines the base implementation Adapters can use to build out full functionality. - -## `sql` - -Defines a sql implementation for adapters that initially inherits the above base implementation and comes with some premade methods and macros that can be overwritten as needed per adapter. (most common type of adapter.) - -# Files - -## `cache.py` - -Cached information from the database. - -## `factory.py` -Defines how we generate adapter objects - -## `protocol.py` - -Defines various interfaces for various adapter objects. Helps mypy correctly resolve methods. - -## `reference_keys.py` - -Configures naming scheme for cache elements to be universal. diff --git a/core/dbt/adapters/__init__.py b/core/dbt/adapters/__init__.py deleted file mode 100644 index e52cc72d2cd..00000000000 --- a/core/dbt/adapters/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# N.B. -# This will add to the package’s __path__ all subdirectories of directories on sys.path named after the package which effectively combines both modules into a single namespace (dbt.adapters) -# The matching statement is in plugins/postgres/dbt/adapters/__init__.py - -from pkgutil import extend_path - -__path__ = extend_path(__path__, __name__) diff --git a/core/dbt/adapters/base/README.md b/core/dbt/adapters/base/README.md deleted file mode 100644 index d3fa85d71a8..00000000000 --- a/core/dbt/adapters/base/README.md +++ /dev/null @@ -1,10 +0,0 @@ - -## Base adapters - -### impl.py - -The class `SQLAdapter` in [base/imply.py](https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/adapters/base/impl.py) is a (mostly) abstract object that adapter objects inherit from. The base class scaffolds out methods that every adapter project usually should implement for smooth communication between dbt and database. - -Some target databases require more or fewer methods--it all depends on what the warehouse's featureset is. - -Look into the class for function-level comments. diff --git a/core/dbt/adapters/base/__init__.py b/core/dbt/adapters/base/__init__.py deleted file mode 100644 index 07f5303992e..00000000000 --- a/core/dbt/adapters/base/__init__.py +++ /dev/null @@ -1,19 +0,0 @@ -# these are all just exports, #noqa them so flake8 will be happy - -# TODO: Should we still include this in the `adapters` namespace? -from dbt.contracts.connection import Credentials # noqa: F401 -from dbt.adapters.base.meta import available # noqa: F401 -from dbt.adapters.base.connections import BaseConnectionManager # noqa: F401 -from dbt.adapters.base.relation import ( # noqa: F401 - BaseRelation, - RelationType, - SchemaSearchMap, -) -from dbt.adapters.base.column import Column # noqa: F401 -from dbt.adapters.base.impl import ( # noqa: F401 - AdapterConfig, - BaseAdapter, - PythonJobHelper, - ConstraintSupport, -) -from dbt.adapters.base.plugin import AdapterPlugin # noqa: F401 diff --git a/core/dbt/adapters/base/column.py b/core/dbt/adapters/base/column.py deleted file mode 100644 index aa0ce7dc63f..00000000000 --- a/core/dbt/adapters/base/column.py +++ /dev/null @@ -1,161 +0,0 @@ -from dataclasses import dataclass -import re -from typing import Dict, ClassVar, Any, Optional - -from dbt.exceptions import DbtRuntimeError - - -@dataclass -class Column: - TYPE_LABELS: ClassVar[Dict[str, str]] = { - "STRING": "TEXT", - "TIMESTAMP": "TIMESTAMP", - "FLOAT": "FLOAT", - "INTEGER": "INT", - "BOOLEAN": "BOOLEAN", - } - column: str - dtype: str - char_size: Optional[int] = None - numeric_precision: Optional[Any] = None - numeric_scale: Optional[Any] = None - - @classmethod - def translate_type(cls, dtype: str) -> str: - return cls.TYPE_LABELS.get(dtype.upper(), dtype) - - @classmethod - def create(cls, name, label_or_dtype: str) -> "Column": - column_type = cls.translate_type(label_or_dtype) - return cls(name, column_type) - - @property - def name(self) -> str: - return self.column - - @property - def quoted(self) -> str: - return '"{}"'.format(self.column) - - @property - def data_type(self) -> str: - if self.is_string(): - return self.string_type(self.string_size()) - elif self.is_numeric(): - return self.numeric_type(self.dtype, self.numeric_precision, self.numeric_scale) - else: - return self.dtype - - def is_string(self) -> bool: - return self.dtype.lower() in ["text", "character varying", "character", "varchar"] - - def is_number(self): - return any([self.is_integer(), self.is_numeric(), self.is_float()]) - - def is_float(self): - return self.dtype.lower() in [ - # floats - "real", - "float4", - "float", - "double precision", - "float8", - "double", - ] - - def is_integer(self) -> bool: - return self.dtype.lower() in [ - # real types - "smallint", - "integer", - "bigint", - "smallserial", - "serial", - "bigserial", - # aliases - "int2", - "int4", - "int8", - "serial2", - "serial4", - "serial8", - ] - - def is_numeric(self) -> bool: - return self.dtype.lower() in ["numeric", "decimal"] - - def string_size(self) -> int: - if not self.is_string(): - raise DbtRuntimeError("Called string_size() on non-string field!") - - if self.dtype == "text" or self.char_size is None: - # char_size should never be None. Handle it reasonably just in case - return 256 - else: - return int(self.char_size) - - def can_expand_to(self, other_column: "Column") -> bool: - """returns True if this column can be expanded to the size of the - other column""" - if not self.is_string() or not other_column.is_string(): - return False - - return other_column.string_size() > self.string_size() - - def literal(self, value: Any) -> str: - return "{}::{}".format(value, self.data_type) - - @classmethod - def string_type(cls, size: int) -> str: - return "character varying({})".format(size) - - @classmethod - def numeric_type(cls, dtype: str, precision: Any, scale: Any) -> str: - # This could be decimal(...), numeric(...), number(...) - # Just use whatever was fed in here -- don't try to get too clever - if precision is None or scale is None: - return dtype - else: - return "{}({},{})".format(dtype, precision, scale) - - def __repr__(self) -> str: - return "<Column {} ({})>".format(self.name, self.data_type) - - @classmethod - def from_description(cls, name: str, raw_data_type: str) -> "Column": - match = re.match(r"([^(]+)(\([^)]+\))?", raw_data_type) - if match is None: - raise DbtRuntimeError(f'Could not interpret data type "{raw_data_type}"') - data_type, size_info = match.groups() - char_size = None - numeric_precision = None - numeric_scale = None - if size_info is not None: - # strip out the parentheses - size_info = size_info[1:-1] - parts = size_info.split(",") - if len(parts) == 1: - try: - char_size = int(parts[0]) - except ValueError: - raise DbtRuntimeError( - f'Could not interpret data_type "{raw_data_type}": ' - f'could not convert "{parts[0]}" to an integer' - ) - elif len(parts) == 2: - try: - numeric_precision = int(parts[0]) - except ValueError: - raise DbtRuntimeError( - f'Could not interpret data_type "{raw_data_type}": ' - f'could not convert "{parts[0]}" to an integer' - ) - try: - numeric_scale = int(parts[1]) - except ValueError: - raise DbtRuntimeError( - f'Could not interpret data_type "{raw_data_type}": ' - f'could not convert "{parts[1]}" to an integer' - ) - - return cls(name, data_type, char_size, numeric_precision, numeric_scale) diff --git a/core/dbt/adapters/base/connections.py b/core/dbt/adapters/base/connections.py deleted file mode 100644 index d449b27e5e6..00000000000 --- a/core/dbt/adapters/base/connections.py +++ /dev/null @@ -1,414 +0,0 @@ -import abc -import os -from time import sleep -import sys -import traceback - -# multiprocessing.RLock is a function returning this type -from multiprocessing.synchronize import RLock -from threading import get_ident -from typing import ( - Any, - Dict, - Tuple, - Hashable, - Optional, - ContextManager, - List, - Type, - Union, - Iterable, - Callable, -) - -import agate - -import dbt.exceptions -from dbt.contracts.connection import ( - Connection, - Identifier, - ConnectionState, - AdapterRequiredConfig, - LazyHandle, - AdapterResponse, -) -from dbt.contracts.graph.manifest import Manifest -from dbt.adapters.base.query_headers import ( - MacroQueryStringSetter, -) -from dbt.events import AdapterLogger -from dbt.events.functions import fire_event -from dbt.events.types import ( - NewConnection, - ConnectionReused, - ConnectionLeftOpenInCleanup, - ConnectionLeftOpen, - ConnectionClosedInCleanup, - ConnectionClosed, - Rollback, - RollbackFailed, -) -from dbt.events.contextvars import get_node_info -from dbt import flags -from dbt.utils import cast_to_str - -SleepTime = Union[int, float] # As taken by time.sleep. -AdapterHandle = Any # Adapter connection handle objects can be any class. - - -class BaseConnectionManager(metaclass=abc.ABCMeta): - """Methods to implement: - - exception_handler - - cancel_open - - open - - begin - - commit - - clear_transaction - - execute - - You must also set the 'TYPE' class attribute with a class-unique constant - string. - """ - - TYPE: str = NotImplemented - - def __init__(self, profile: AdapterRequiredConfig): - self.profile = profile - self.thread_connections: Dict[Hashable, Connection] = {} - self.lock: RLock = flags.MP_CONTEXT.RLock() - self.query_header: Optional[MacroQueryStringSetter] = None - - def set_query_header(self, manifest: Manifest) -> None: - self.query_header = MacroQueryStringSetter(self.profile, manifest) - - @staticmethod - def get_thread_identifier() -> Hashable: - # note that get_ident() may be re-used, but we should never experience - # that within a single process - return (os.getpid(), get_ident()) - - def get_thread_connection(self) -> Connection: - key = self.get_thread_identifier() - with self.lock: - if key not in self.thread_connections: - raise dbt.exceptions.InvalidConnectionError(key, list(self.thread_connections)) - return self.thread_connections[key] - - def set_thread_connection(self, conn: Connection) -> None: - key = self.get_thread_identifier() - if key in self.thread_connections: - raise dbt.exceptions.DbtInternalError( - "In set_thread_connection, existing connection exists for {}" - ) - self.thread_connections[key] = conn - - def get_if_exists(self) -> Optional[Connection]: - key = self.get_thread_identifier() - with self.lock: - return self.thread_connections.get(key) - - def clear_thread_connection(self) -> None: - key = self.get_thread_identifier() - with self.lock: - if key in self.thread_connections: - del self.thread_connections[key] - - def clear_transaction(self) -> None: - """Clear any existing transactions.""" - conn = self.get_thread_connection() - if conn is not None: - if conn.transaction_open: - self._rollback(conn) - self.begin() - self.commit() - - def rollback_if_open(self) -> None: - conn = self.get_if_exists() - if conn is not None and conn.handle and conn.transaction_open: - self._rollback(conn) - - @abc.abstractmethod - def exception_handler(self, sql: str) -> ContextManager: - """Create a context manager that handles exceptions caused by database - interactions. - - :param str sql: The SQL string that the block inside the context - manager is executing. - :return: A context manager that handles exceptions raised by the - underlying database. - """ - raise dbt.exceptions.NotImplementedError( - "`exception_handler` is not implemented for this adapter!" - ) - - def set_connection_name(self, name: Optional[str] = None) -> Connection: - """Called by 'acquire_connection' in BaseAdapter, which is called by - 'connection_named', called by 'connection_for(node)'. - Creates a connection for this thread if one doesn't already - exist, and will rename an existing connection.""" - - conn_name: str = "master" if name is None else name - - # Get a connection for this thread - conn = self.get_if_exists() - - if conn and conn.name == conn_name and conn.state == "open": - # Found a connection and nothing to do, so just return it - return conn - - if conn is None: - # Create a new connection - conn = Connection( - type=Identifier(self.TYPE), - name=conn_name, - state=ConnectionState.INIT, - transaction_open=False, - handle=None, - credentials=self.profile.credentials, - ) - conn.handle = LazyHandle(self.open) - # Add the connection to thread_connections for this thread - self.set_thread_connection(conn) - fire_event( - NewConnection(conn_name=conn_name, conn_type=self.TYPE, node_info=get_node_info()) - ) - else: # existing connection either wasn't open or didn't have the right name - if conn.state != "open": - conn.handle = LazyHandle(self.open) - if conn.name != conn_name: - orig_conn_name: str = conn.name or "" - conn.name = conn_name - fire_event(ConnectionReused(orig_conn_name=orig_conn_name, conn_name=conn_name)) - - return conn - - @classmethod - def retry_connection( - cls, - connection: Connection, - connect: Callable[[], AdapterHandle], - logger: AdapterLogger, - retryable_exceptions: Iterable[Type[Exception]], - retry_limit: int = 1, - retry_timeout: Union[Callable[[int], SleepTime], SleepTime] = 1, - _attempts: int = 0, - ) -> Connection: - """Given a Connection, set its handle by calling connect. - - The calls to connect will be retried up to retry_limit times to deal with transient - connection errors. By default, one retry will be attempted if retryable_exceptions is set. - - :param Connection connection: An instance of a Connection that needs a handle to be set, - usually when attempting to open it. - :param connect: A callable that returns the appropiate connection handle for a - given adapter. This callable will be retried retry_limit times if a subclass of any - Exception in retryable_exceptions is raised by connect. - :type connect: Callable[[], AdapterHandle] - :param AdapterLogger logger: A logger to emit messages on retry attempts or errors. When - handling expected errors, we call debug, and call warning on unexpected errors or when - all retry attempts have been exhausted. - :param retryable_exceptions: An iterable of exception classes that if raised by - connect should trigger a retry. - :type retryable_exceptions: Iterable[Type[Exception]] - :param int retry_limit: How many times to retry the call to connect. If this limit - is exceeded before a successful call, a FailedToConnectError will be raised. - Must be non-negative. - :param retry_timeout: Time to wait between attempts to connect. Can also take a - Callable that takes the number of attempts so far, beginning at 0, and returns an int - or float to be passed to time.sleep. - :type retry_timeout: Union[Callable[[int], SleepTime], SleepTime] = 1 - :param int _attempts: Parameter used to keep track of the number of attempts in calling the - connect function across recursive calls. Passed as an argument to retry_timeout if it - is a Callable. This parameter should not be set by the initial caller. - :raises dbt.exceptions.FailedToConnectError: Upon exhausting all retry attempts without - successfully acquiring a handle. - :return: The given connection with its appropriate state and handle attributes set - depending on whether we successfully acquired a handle or not. - """ - timeout = retry_timeout(_attempts) if callable(retry_timeout) else retry_timeout - if timeout < 0: - raise dbt.exceptions.FailedToConnectError( - "retry_timeout cannot be negative or return a negative time." - ) - - if retry_limit < 0 or retry_limit > sys.getrecursionlimit(): - # This guard is not perfect others may add to the recursion limit (e.g. built-ins). - connection.handle = None - connection.state = ConnectionState.FAIL - raise dbt.exceptions.FailedToConnectError("retry_limit cannot be negative") - - try: - connection.handle = connect() - connection.state = ConnectionState.OPEN - return connection - - except tuple(retryable_exceptions) as e: - if retry_limit <= 0: - connection.handle = None - connection.state = ConnectionState.FAIL - raise dbt.exceptions.FailedToConnectError(str(e)) - - logger.debug( - f"Got a retryable error when attempting to open a {cls.TYPE} connection.\n" - f"{retry_limit} attempts remaining. Retrying in {timeout} seconds.\n" - f"Error:\n{e}" - ) - - sleep(timeout) - return cls.retry_connection( - connection=connection, - connect=connect, - logger=logger, - retry_limit=retry_limit - 1, - retry_timeout=retry_timeout, - retryable_exceptions=retryable_exceptions, - _attempts=_attempts + 1, - ) - - except Exception as e: - connection.handle = None - connection.state = ConnectionState.FAIL - raise dbt.exceptions.FailedToConnectError(str(e)) - - @abc.abstractmethod - def cancel_open(self) -> Optional[List[str]]: - """Cancel all open connections on the adapter. (passable)""" - raise dbt.exceptions.NotImplementedError( - "`cancel_open` is not implemented for this adapter!" - ) - - @classmethod - @abc.abstractmethod - def open(cls, connection: Connection) -> Connection: - """Open the given connection on the adapter and return it. - - This may mutate the given connection (in particular, its state and its - handle). - - This should be thread-safe, or hold the lock if necessary. The given - connection should not be in either in_use or available. - """ - raise dbt.exceptions.NotImplementedError("`open` is not implemented for this adapter!") - - def release(self) -> None: - with self.lock: - conn = self.get_if_exists() - if conn is None: - return - - try: - # always close the connection. close() calls _rollback() if there - # is an open transaction - self.close(conn) - except Exception: - # if rollback or close failed, remove our busted connection - self.clear_thread_connection() - raise - - def cleanup_all(self) -> None: - with self.lock: - for connection in self.thread_connections.values(): - if connection.state not in {"closed", "init"}: - fire_event(ConnectionLeftOpenInCleanup(conn_name=cast_to_str(connection.name))) - else: - fire_event(ConnectionClosedInCleanup(conn_name=cast_to_str(connection.name))) - self.close(connection) - - # garbage collect these connections - self.thread_connections.clear() - - @abc.abstractmethod - def begin(self) -> None: - """Begin a transaction. (passable)""" - raise dbt.exceptions.NotImplementedError("`begin` is not implemented for this adapter!") - - @abc.abstractmethod - def commit(self) -> None: - """Commit a transaction. (passable)""" - raise dbt.exceptions.NotImplementedError("`commit` is not implemented for this adapter!") - - @classmethod - def _rollback_handle(cls, connection: Connection) -> None: - """Perform the actual rollback operation.""" - try: - connection.handle.rollback() - except Exception: - fire_event( - RollbackFailed( - conn_name=cast_to_str(connection.name), - exc_info=traceback.format_exc(), - node_info=get_node_info(), - ) - ) - - @classmethod - def _close_handle(cls, connection: Connection) -> None: - """Perform the actual close operation.""" - # On windows, sometimes connection handles don't have a close() attr. - if hasattr(connection.handle, "close"): - fire_event( - ConnectionClosed(conn_name=cast_to_str(connection.name), node_info=get_node_info()) - ) - connection.handle.close() - else: - fire_event( - ConnectionLeftOpen( - conn_name=cast_to_str(connection.name), node_info=get_node_info() - ) - ) - - @classmethod - def _rollback(cls, connection: Connection) -> None: - """Roll back the given connection.""" - if connection.transaction_open is False: - raise dbt.exceptions.DbtInternalError( - f"Tried to rollback transaction on connection " - f'"{connection.name}", but it does not have one open!' - ) - - fire_event(Rollback(conn_name=cast_to_str(connection.name), node_info=get_node_info())) - cls._rollback_handle(connection) - - connection.transaction_open = False - - @classmethod - def close(cls, connection: Connection) -> Connection: - # if the connection is in closed or init, there's nothing to do - if connection.state in {ConnectionState.CLOSED, ConnectionState.INIT}: - return connection - - if connection.transaction_open and connection.handle: - fire_event(Rollback(conn_name=cast_to_str(connection.name), node_info=get_node_info())) - cls._rollback_handle(connection) - connection.transaction_open = False - - cls._close_handle(connection) - connection.state = ConnectionState.CLOSED - - return connection - - def commit_if_has_connection(self) -> None: - """If the named connection exists, commit the current transaction.""" - connection = self.get_if_exists() - if connection: - self.commit() - - def _add_query_comment(self, sql: str) -> str: - if self.query_header is None: - return sql - return self.query_header.add(sql) - - @abc.abstractmethod - def execute( - self, sql: str, auto_begin: bool = False, fetch: bool = False - ) -> Tuple[AdapterResponse, agate.Table]: - """Execute the given SQL. - - :param str sql: The sql to execute. - :param bool auto_begin: If set, and dbt is not currently inside a - transaction, automatically begin one. - :param bool fetch: If set, fetch results. - :return: A tuple of the query status and results (empty if fetch=False). - :rtype: Tuple[AdapterResponse, agate.Table] - """ - raise dbt.exceptions.NotImplementedError("`execute` is not implemented for this adapter!") diff --git a/core/dbt/adapters/base/impl.py b/core/dbt/adapters/base/impl.py deleted file mode 100644 index d18c9af7f50..00000000000 --- a/core/dbt/adapters/base/impl.py +++ /dev/null @@ -1,1481 +0,0 @@ -import abc -from concurrent.futures import as_completed, Future -from contextlib import contextmanager -from datetime import datetime -from enum import Enum -import time -from itertools import chain -from typing import ( - Any, - Callable, - Dict, - Iterable, - Iterator, - List, - Mapping, - Optional, - Set, - Tuple, - Type, - Union, -) - -from dbt.contracts.graph.nodes import ColumnLevelConstraint, ConstraintType, ModelLevelConstraint - -import agate -import pytz - -from dbt.exceptions import ( - DbtInternalError, - DbtRuntimeError, - DbtValidationError, - MacroArgTypeError, - MacroResultError, - NotImplementedError, - NullRelationCacheAttemptedError, - NullRelationDropAttemptedError, - QuoteConfigTypeError, - RelationReturnedMultipleResultsError, - RenameToNoneAttemptedError, - SnapshotTargetIncompleteError, - SnapshotTargetNotSnapshotTableError, - UnexpectedNonTimestampError, - UnexpectedNullError, -) - -from dbt.adapters.protocol import AdapterConfig, ConnectionManagerProtocol -from dbt.clients.agate_helper import empty_table, merge_tables, table_from_rows -from dbt.clients.jinja import MacroGenerator -from dbt.contracts.graph.manifest import Manifest, MacroManifest -from dbt.contracts.graph.nodes import ResultNode -from dbt.events.functions import fire_event, warn_or_error -from dbt.events.types import ( - CacheMiss, - ListRelations, - CodeExecution, - CodeExecutionStatus, - CatalogGenerationError, - ConstraintNotSupported, - ConstraintNotEnforced, -) -from dbt.utils import filter_null_values, executor, cast_to_str, AttrDict - -from dbt.adapters.base.connections import Connection, AdapterResponse -from dbt.adapters.base.meta import AdapterMeta, available -from dbt.adapters.base.relation import ( - ComponentName, - BaseRelation, - InformationSchema, - SchemaSearchMap, -) -from dbt.adapters.base import Column as BaseColumn -from dbt.adapters.base import Credentials -from dbt.adapters.cache import RelationsCache, _make_ref_key_dict -from dbt import deprecations - -GET_CATALOG_MACRO_NAME = "get_catalog" -FRESHNESS_MACRO_NAME = "collect_freshness" - - -class ConstraintSupport(str, Enum): - ENFORCED = "enforced" - NOT_ENFORCED = "not_enforced" - NOT_SUPPORTED = "not_supported" - - -def _expect_row_value(key: str, row: agate.Row): - if key not in row.keys(): - raise DbtInternalError( - 'Got a row without "{}" column, columns: {}'.format(key, row.keys()) - ) - return row[key] - - -def _catalog_filter_schemas(manifest: Manifest) -> Callable[[agate.Row], bool]: - """Return a function that takes a row and decides if the row should be - included in the catalog output. - """ - schemas = frozenset((d.lower(), s.lower()) for d, s in manifest.get_used_schemas()) - - def test(row: agate.Row) -> bool: - table_database = _expect_row_value("table_database", row) - table_schema = _expect_row_value("table_schema", row) - # the schema may be present but None, which is not an error and should - # be filtered out - if table_schema is None: - return False - return (table_database.lower(), table_schema.lower()) in schemas - - return test - - -def _utc(dt: Optional[datetime], source: BaseRelation, field_name: str) -> datetime: - """If dt has a timezone, return a new datetime that's in UTC. Otherwise, - assume the datetime is already for UTC and add the timezone. - """ - if dt is None: - raise UnexpectedNullError(field_name, source) - - elif not hasattr(dt, "tzinfo"): - raise UnexpectedNonTimestampError(field_name, source, dt) - - elif dt.tzinfo: - return dt.astimezone(pytz.UTC) - else: - return dt.replace(tzinfo=pytz.UTC) - - -def _relation_name(rel: Optional[BaseRelation]) -> str: - if rel is None: - return "null relation" - else: - return str(rel) - - -def log_code_execution(code_execution_function): - # decorator to log code and execution time - if code_execution_function.__name__ != "submit_python_job": - raise ValueError("this should be only used to log submit_python_job now") - - def execution_with_log(*args): - self = args[0] - connection_name = self.connections.get_thread_connection().name - fire_event(CodeExecution(conn_name=connection_name, code_content=args[2])) - start_time = time.time() - response = code_execution_function(*args) - fire_event( - CodeExecutionStatus( - status=response._message, elapsed=round((time.time() - start_time), 2) - ) - ) - return response - - return execution_with_log - - -class PythonJobHelper: - def __init__(self, parsed_model: Dict, credential: Credentials) -> None: - raise NotImplementedError("PythonJobHelper is not implemented yet") - - def submit(self, compiled_code: str) -> Any: - raise NotImplementedError("PythonJobHelper submit function is not implemented yet") - - -class BaseAdapter(metaclass=AdapterMeta): - """The BaseAdapter provides an abstract base class for adapters. - - Adapters must implement the following methods and macros. Some of the - methods can be safely overridden as a noop, where it makes sense - (transactions on databases that don't support them, for instance). Those - methods are marked with a (passable) in their docstrings. Check docstrings - for type information, etc. - - To implement a macro, implement "${adapter_type}__${macro_name}" in the - adapter's internal project. - - To invoke a method in an adapter macro, call it on the 'adapter' Jinja - object using dot syntax. - - To invoke a method in model code, add the @available decorator atop a method - declaration. Methods are invoked as macros. - - Methods: - - exception_handler - - date_function - - list_schemas - - drop_relation - - truncate_relation - - rename_relation - - get_columns_in_relation - - get_column_schema_from_query - - expand_column_types - - list_relations_without_caching - - is_cancelable - - create_schema - - drop_schema - - quote - - convert_text_type - - convert_number_type - - convert_boolean_type - - convert_datetime_type - - convert_date_type - - convert_time_type - - standardize_grants_dict - - Macros: - - get_catalog - """ - - Relation: Type[BaseRelation] = BaseRelation - Column: Type[BaseColumn] = BaseColumn - ConnectionManager: Type[ConnectionManagerProtocol] - - # A set of clobber config fields accepted by this adapter - # for use in materializations - AdapterSpecificConfigs: Type[AdapterConfig] = AdapterConfig - - CONSTRAINT_SUPPORT = { - ConstraintType.check: ConstraintSupport.NOT_SUPPORTED, - ConstraintType.not_null: ConstraintSupport.ENFORCED, - ConstraintType.unique: ConstraintSupport.NOT_ENFORCED, - ConstraintType.primary_key: ConstraintSupport.NOT_ENFORCED, - ConstraintType.foreign_key: ConstraintSupport.ENFORCED, - } - - def __init__(self, config): - self.config = config - self.cache = RelationsCache() - self.connections = self.ConnectionManager(config) - self._macro_manifest_lazy: Optional[MacroManifest] = None - - ### - # Methods that pass through to the connection manager - ### - def acquire_connection(self, name=None) -> Connection: - return self.connections.set_connection_name(name) - - def release_connection(self) -> None: - self.connections.release() - - def cleanup_connections(self) -> None: - self.connections.cleanup_all() - - def clear_transaction(self) -> None: - self.connections.clear_transaction() - - def commit_if_has_connection(self) -> None: - self.connections.commit_if_has_connection() - - def debug_query(self) -> None: - self.execute("select 1 as id") - - def nice_connection_name(self) -> str: - conn = self.connections.get_if_exists() - if conn is None or conn.name is None: - return "<None>" - return conn.name - - @contextmanager - def connection_named(self, name: str, node: Optional[ResultNode] = None) -> Iterator[None]: - try: - if self.connections.query_header is not None: - self.connections.query_header.set(name, node) - self.acquire_connection(name) - yield - finally: - self.release_connection() - if self.connections.query_header is not None: - self.connections.query_header.reset() - - @contextmanager - def connection_for(self, node: ResultNode) -> Iterator[None]: - with self.connection_named(node.unique_id, node): - yield - - @available.parse(lambda *a, **k: ("", empty_table())) - def execute( - self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None - ) -> Tuple[AdapterResponse, agate.Table]: - """Execute the given SQL. This is a thin wrapper around - ConnectionManager.execute. - - :param str sql: The sql to execute. - :param bool auto_begin: If set, and dbt is not currently inside a - transaction, automatically begin one. - :param bool fetch: If set, fetch results. - :param Optional[int] limit: If set, only fetch n number of rows - :return: A tuple of the query status and results (empty if fetch=False). - :rtype: Tuple[AdapterResponse, agate.Table] - """ - return self.connections.execute(sql=sql, auto_begin=auto_begin, fetch=fetch, limit=limit) - - def validate_sql(self, sql: str) -> AdapterResponse: - """Submit the given SQL to the engine for validation, but not execution. - - This should throw an appropriate exception if the input SQL is invalid, although - in practice that will generally be handled by delegating to an existing method - for execution and allowing the error handler to take care of the rest. - - :param str sql: The sql to validate - """ - raise NotImplementedError("`validate_sql` is not implemented for this adapter!") - - @available.parse(lambda *a, **k: []) - def get_column_schema_from_query(self, sql: str) -> List[BaseColumn]: - """Get a list of the Columns with names and data types from the given sql.""" - _, cursor = self.connections.add_select_query(sql) - columns = [ - self.Column.create( - column_name, self.connections.data_type_code_to_name(column_type_code) - ) - # https://peps.python.org/pep-0249/#description - for column_name, column_type_code, *_ in cursor.description - ] - return columns - - @available.parse(lambda *a, **k: ("", empty_table())) - def get_partitions_metadata(self, table: str) -> Tuple[agate.Table]: - """Obtain partitions metadata for a BigQuery partitioned table. - - :param str table_id: a partitioned table id, in standard SQL format. - :return: a partition metadata tuple, as described in - https://cloud.google.com/bigquery/docs/creating-partitioned-tables#getting_partition_metadata_using_meta_tables. - :rtype: agate.Table - """ - return self.connections.get_partitions_metadata(table=table) - - ### - # Methods that should never be overridden - ### - @classmethod - def type(cls) -> str: - """Get the type of this adapter. Types must be class-unique and - consistent. - - :return: The type name - :rtype: str - """ - return cls.ConnectionManager.TYPE - - @property - def _macro_manifest(self) -> MacroManifest: - if self._macro_manifest_lazy is None: - return self.load_macro_manifest() - return self._macro_manifest_lazy - - def check_macro_manifest(self) -> Optional[MacroManifest]: - """Return the internal manifest (used for executing macros) if it's - been initialized, otherwise return None. - """ - return self._macro_manifest_lazy - - def load_macro_manifest(self, base_macros_only=False) -> MacroManifest: - # base_macros_only is for the test framework - if self._macro_manifest_lazy is None: - # avoid a circular import - from dbt.parser.manifest import ManifestLoader - - manifest = ManifestLoader.load_macros( - self.config, - self.connections.set_query_header, - base_macros_only=base_macros_only, - ) - # TODO CT-211 - self._macro_manifest_lazy = manifest # type: ignore[assignment] - # TODO CT-211 - return self._macro_manifest_lazy # type: ignore[return-value] - - def clear_macro_manifest(self): - if self._macro_manifest_lazy is not None: - self._macro_manifest_lazy = None - - ### - # Caching methods - ### - def _schema_is_cached(self, database: Optional[str], schema: str) -> bool: - """Check if the schema is cached, and by default logs if it is not.""" - - if (database, schema) not in self.cache: - fire_event( - CacheMiss( - conn_name=self.nice_connection_name(), - database=cast_to_str(database), - schema=schema, - ) - ) - return False - else: - return True - - def _get_cache_schemas(self, manifest: Manifest) -> Set[BaseRelation]: - """Get the set of schema relations that the cache logic needs to - populate. This means only executable nodes are included. - """ - # the cache only cares about executable nodes - return { - self.Relation.create_from(self.config, node).without_identifier() - for node in manifest.nodes.values() - if (node.is_relational and not node.is_ephemeral_model and not node.is_external_node) - } - - def _get_catalog_schemas(self, manifest: Manifest) -> SchemaSearchMap: - """Get a mapping of each node's "information_schema" relations to a - set of all schemas expected in that information_schema. - - There may be keys that are technically duplicates on the database side, - for example all of '"foo", 'foo', '"FOO"' and 'FOO' could coexist as - databases, and values could overlap as appropriate. All values are - lowercase strings. - """ - info_schema_name_map = SchemaSearchMap() - nodes: Iterator[ResultNode] = chain( - [ - node - for node in manifest.nodes.values() - if (node.is_relational and not node.is_ephemeral_model) - ], - manifest.sources.values(), - ) - for node in nodes: - relation = self.Relation.create_from(self.config, node) - info_schema_name_map.add(relation) - # result is a map whose keys are information_schema Relations without - # identifiers that have appropriate database prefixes, and whose values - # are sets of lowercase schema names that are valid members of those - # databases - return info_schema_name_map - - def _relations_cache_for_schemas( - self, manifest: Manifest, cache_schemas: Optional[Set[BaseRelation]] = None - ) -> None: - """Populate the relations cache for the given schemas. Returns an - iterable of the schemas populated, as strings. - """ - if not cache_schemas: - cache_schemas = self._get_cache_schemas(manifest) - with executor(self.config) as tpe: - futures: List[Future[List[BaseRelation]]] = [] - for cache_schema in cache_schemas: - fut = tpe.submit_connected( - self, - f"list_{cache_schema.database}_{cache_schema.schema}", - self.list_relations_without_caching, - cache_schema, - ) - futures.append(fut) - - for future in as_completed(futures): - # if we can't read the relations we need to just raise anyway, - # so just call future.result() and let that raise on failure - for relation in future.result(): - self.cache.add(relation) - - # it's possible that there were no relations in some schemas. We want - # to insert the schemas we query into the cache's `.schemas` attribute - # so we can check it later - cache_update: Set[Tuple[Optional[str], Optional[str]]] = set() - for relation in cache_schemas: - cache_update.add((relation.database, relation.schema)) - self.cache.update_schemas(cache_update) - - def set_relations_cache( - self, - manifest: Manifest, - clear: bool = False, - required_schemas: Optional[Set[BaseRelation]] = None, - ) -> None: - """Run a query that gets a populated cache of the relations in the - database and set the cache on this adapter. - """ - with self.cache.lock: - if clear: - self.cache.clear() - self._relations_cache_for_schemas(manifest, required_schemas) - - @available - def cache_added(self, relation: Optional[BaseRelation]) -> str: - """Cache a new relation in dbt. It will show up in `list relations`.""" - if relation is None: - name = self.nice_connection_name() - raise NullRelationCacheAttemptedError(name) - self.cache.add(relation) - # so jinja doesn't render things - return "" - - @available - def cache_dropped(self, relation: Optional[BaseRelation]) -> str: - """Drop a relation in dbt. It will no longer show up in - `list relations`, and any bound views will be dropped from the cache - """ - if relation is None: - name = self.nice_connection_name() - raise NullRelationDropAttemptedError(name) - self.cache.drop(relation) - return "" - - @available - def cache_renamed( - self, - from_relation: Optional[BaseRelation], - to_relation: Optional[BaseRelation], - ) -> str: - """Rename a relation in dbt. It will show up with a new name in - `list_relations`, but bound views will remain bound. - """ - if from_relation is None or to_relation is None: - name = self.nice_connection_name() - src_name = _relation_name(from_relation) - dst_name = _relation_name(to_relation) - raise RenameToNoneAttemptedError(src_name, dst_name, name) - - self.cache.rename(from_relation, to_relation) - return "" - - ### - # Abstract methods for database-specific values, attributes, and types - ### - @classmethod - @abc.abstractmethod - def date_function(cls) -> str: - """Get the date function used by this adapter's database.""" - raise NotImplementedError("`date_function` is not implemented for this adapter!") - - @classmethod - @abc.abstractmethod - def is_cancelable(cls) -> bool: - raise NotImplementedError("`is_cancelable` is not implemented for this adapter!") - - ### - # Abstract methods about schemas - ### - @abc.abstractmethod - def list_schemas(self, database: str) -> List[str]: - """Get a list of existing schemas in database""" - raise NotImplementedError("`list_schemas` is not implemented for this adapter!") - - @available.parse(lambda *a, **k: False) - def check_schema_exists(self, database: str, schema: str) -> bool: - """Check if a schema exists. - - The default implementation of this is potentially unnecessarily slow, - and adapters should implement it if there is an optimized path (and - there probably is) - """ - search = (s.lower() for s in self.list_schemas(database=database)) - return schema.lower() in search - - ### - # Abstract methods about relations - ### - @abc.abstractmethod - @available.parse_none - def drop_relation(self, relation: BaseRelation) -> None: - """Drop the given relation. - - *Implementors must call self.cache.drop() to preserve cache state!* - """ - raise NotImplementedError("`drop_relation` is not implemented for this adapter!") - - @abc.abstractmethod - @available.parse_none - def truncate_relation(self, relation: BaseRelation) -> None: - """Truncate the given relation.""" - raise NotImplementedError("`truncate_relation` is not implemented for this adapter!") - - @abc.abstractmethod - @available.parse_none - def rename_relation(self, from_relation: BaseRelation, to_relation: BaseRelation) -> None: - """Rename the relation from from_relation to to_relation. - - Implementors must call self.cache.rename() to preserve cache state. - """ - raise NotImplementedError("`rename_relation` is not implemented for this adapter!") - - @abc.abstractmethod - @available.parse_list - def get_columns_in_relation(self, relation: BaseRelation) -> List[BaseColumn]: - """Get a list of the columns in the given Relation.""" - raise NotImplementedError("`get_columns_in_relation` is not implemented for this adapter!") - - @available.deprecated("get_columns_in_relation", lambda *a, **k: []) - def get_columns_in_table(self, schema: str, identifier: str) -> List[BaseColumn]: - """DEPRECATED: Get a list of the columns in the given table.""" - relation = self.Relation.create( - database=self.config.credentials.database, - schema=schema, - identifier=identifier, - quote_policy=self.config.quoting, - ) - return self.get_columns_in_relation(relation) - - @abc.abstractmethod - def expand_column_types(self, goal: BaseRelation, current: BaseRelation) -> None: - """Expand the current table's types to match the goal table. (passable) - - :param self.Relation goal: A relation that currently exists in the - database with columns of the desired types. - :param self.Relation current: A relation that currently exists in the - database with columns of unspecified types. - """ - raise NotImplementedError( - "`expand_target_column_types` is not implemented for this adapter!" - ) - - @abc.abstractmethod - def list_relations_without_caching(self, schema_relation: BaseRelation) -> List[BaseRelation]: - """List relations in the given schema, bypassing the cache. - - This is used as the underlying behavior to fill the cache. - - :param schema_relation: A relation containing the database and schema - as appropraite for the underlying data warehouse - :return: The relations in schema - :rtype: List[self.Relation] - """ - raise NotImplementedError( - "`list_relations_without_caching` is not implemented for this adapter!" - ) - - ### - # Methods about grants - ### - @available - def standardize_grants_dict(self, grants_table: agate.Table) -> dict: - """Translate the result of `show grants` (or equivalent) to match the - grants which a user would configure in their project. - - Ideally, the SQL to show grants should also be filtering: - filter OUT any grants TO the current user/role (e.g. OWNERSHIP). - If that's not possible in SQL, it can be done in this method instead. - - :param grants_table: An agate table containing the query result of - the SQL returned by get_show_grant_sql - :return: A standardized dictionary matching the `grants` config - :rtype: dict - """ - grants_dict: Dict[str, List[str]] = {} - for row in grants_table: - grantee = row["grantee"] - privilege = row["privilege_type"] - if privilege in grants_dict.keys(): - grants_dict[privilege].append(grantee) - else: - grants_dict.update({privilege: [grantee]}) - return grants_dict - - ### - # Provided methods about relations - ### - @available.parse_list - def get_missing_columns( - self, from_relation: BaseRelation, to_relation: BaseRelation - ) -> List[BaseColumn]: - """Returns a list of Columns in from_relation that are missing from - to_relation. - """ - if not isinstance(from_relation, self.Relation): - raise MacroArgTypeError( - method_name="get_missing_columns", - arg_name="from_relation", - got_value=from_relation, - expected_type=self.Relation, - ) - - if not isinstance(to_relation, self.Relation): - raise MacroArgTypeError( - method_name="get_missing_columns", - arg_name="to_relation", - got_value=to_relation, - expected_type=self.Relation, - ) - - from_columns = {col.name: col for col in self.get_columns_in_relation(from_relation)} - - to_columns = {col.name: col for col in self.get_columns_in_relation(to_relation)} - - missing_columns = set(from_columns.keys()) - set(to_columns.keys()) - - return [col for (col_name, col) in from_columns.items() if col_name in missing_columns] - - @available.parse_none - def valid_snapshot_target(self, relation: BaseRelation) -> None: - """Ensure that the target relation is valid, by making sure it has the - expected columns. - - :param Relation relation: The relation to check - :raises InvalidMacroArgType: If the columns are - incorrect. - """ - if not isinstance(relation, self.Relation): - raise MacroArgTypeError( - method_name="valid_snapshot_target", - arg_name="relation", - got_value=relation, - expected_type=self.Relation, - ) - - columns = self.get_columns_in_relation(relation) - names = set(c.name.lower() for c in columns) - expanded_keys = ("scd_id", "valid_from", "valid_to") - extra = [] - missing = [] - for legacy in expanded_keys: - desired = "dbt_" + legacy - if desired not in names: - missing.append(desired) - if legacy in names: - extra.append(legacy) - - if missing: - if extra: - raise SnapshotTargetIncompleteError(extra, missing) - else: - raise SnapshotTargetNotSnapshotTableError(missing) - - @available.parse_none - def expand_target_column_types( - self, from_relation: BaseRelation, to_relation: BaseRelation - ) -> None: - if not isinstance(from_relation, self.Relation): - raise MacroArgTypeError( - method_name="expand_target_column_types", - arg_name="from_relation", - got_value=from_relation, - expected_type=self.Relation, - ) - - if not isinstance(to_relation, self.Relation): - raise MacroArgTypeError( - method_name="expand_target_column_types", - arg_name="to_relation", - got_value=to_relation, - expected_type=self.Relation, - ) - - self.expand_column_types(from_relation, to_relation) - - def list_relations(self, database: Optional[str], schema: str) -> List[BaseRelation]: - if self._schema_is_cached(database, schema): - return self.cache.get_relations(database, schema) - - schema_relation = self.Relation.create( - database=database, - schema=schema, - identifier="", - quote_policy=self.config.quoting, - ).without_identifier() - - # we can't build the relations cache because we don't have a - # manifest so we can't run any operations. - relations = self.list_relations_without_caching(schema_relation) - - # if the cache is already populated, add this schema in - # otherwise, skip updating the cache and just ignore - if self.cache: - for relation in relations: - self.cache.add(relation) - if not relations: - # it's possible that there were no relations in some schemas. We want - # to insert the schemas we query into the cache's `.schemas` attribute - # so we can check it later - self.cache.update_schemas([(database, schema)]) - - fire_event( - ListRelations( - database=cast_to_str(database), - schema=schema, - relations=[_make_ref_key_dict(x) for x in relations], - ) - ) - - return relations - - def _make_match_kwargs(self, database: str, schema: str, identifier: str) -> Dict[str, str]: - quoting = self.config.quoting - if identifier is not None and quoting["identifier"] is False: - identifier = identifier.lower() - - if schema is not None and quoting["schema"] is False: - schema = schema.lower() - - if database is not None and quoting["database"] is False: - database = database.lower() - - return filter_null_values( - { - "database": database, - "identifier": identifier, - "schema": schema, - } - ) - - def _make_match( - self, - relations_list: List[BaseRelation], - database: str, - schema: str, - identifier: str, - ) -> List[BaseRelation]: - matches = [] - - search = self._make_match_kwargs(database, schema, identifier) - - for relation in relations_list: - if relation.matches(**search): - matches.append(relation) - - return matches - - @available.parse_none - def get_relation(self, database: str, schema: str, identifier: str) -> Optional[BaseRelation]: - relations_list = self.list_relations(database, schema) - - matches = self._make_match(relations_list, database, schema, identifier) - - if len(matches) > 1: - kwargs = { - "identifier": identifier, - "schema": schema, - "database": database, - } - raise RelationReturnedMultipleResultsError(kwargs, matches) - - elif matches: - return matches[0] - - return None - - @available.deprecated("get_relation", lambda *a, **k: False) - def already_exists(self, schema: str, name: str) -> bool: - """DEPRECATED: Return if a model already exists in the database""" - database = self.config.credentials.database - relation = self.get_relation(database, schema, name) - return relation is not None - - ### - # ODBC FUNCTIONS -- these should not need to change for every adapter, - # although some adapters may override them - ### - @abc.abstractmethod - @available.parse_none - def create_schema(self, relation: BaseRelation): - """Create the given schema if it does not exist.""" - raise NotImplementedError("`create_schema` is not implemented for this adapter!") - - @abc.abstractmethod - @available.parse_none - def drop_schema(self, relation: BaseRelation): - """Drop the given schema (and everything in it) if it exists.""" - raise NotImplementedError("`drop_schema` is not implemented for this adapter!") - - @available - @classmethod - @abc.abstractmethod - def quote(cls, identifier: str) -> str: - """Quote the given identifier, as appropriate for the database.""" - raise NotImplementedError("`quote` is not implemented for this adapter!") - - @available - def quote_as_configured(self, identifier: str, quote_key: str) -> str: - """Quote or do not quote the given identifer as configured in the - project config for the quote key. - - The quote key should be one of 'database' (on bigquery, 'profile'), - 'identifier', or 'schema', or it will be treated as if you set `True`. - """ - try: - key = ComponentName(quote_key) - except ValueError: - return identifier - - default = self.Relation.get_default_quote_policy().get_part(key) - if self.config.quoting.get(key, default): - return self.quote(identifier) - else: - return identifier - - @available - def quote_seed_column(self, column: str, quote_config: Optional[bool]) -> str: - quote_columns: bool = True - if isinstance(quote_config, bool): - quote_columns = quote_config - elif quote_config is None: - pass - else: - raise QuoteConfigTypeError(quote_config) - - if quote_columns: - return self.quote(column) - else: - return column - - ### - # Conversions: These must be implemented by concrete implementations, for - # converting agate types into their sql equivalents. - ### - @classmethod - @abc.abstractmethod - def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str: - """Return the type in the database that best maps to the agate.Text - type for the given agate table and column index. - - :param agate_table: The table - :param col_idx: The index into the agate table for the column. - :return: The name of the type in the database - """ - raise NotImplementedError("`convert_text_type` is not implemented for this adapter!") - - @classmethod - @abc.abstractmethod - def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str: - """Return the type in the database that best maps to the agate.Number - type for the given agate table and column index. - - :param agate_table: The table - :param col_idx: The index into the agate table for the column. - :return: The name of the type in the database - """ - raise NotImplementedError("`convert_number_type` is not implemented for this adapter!") - - @classmethod - @abc.abstractmethod - def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str: - """Return the type in the database that best maps to the agate.Boolean - type for the given agate table and column index. - - :param agate_table: The table - :param col_idx: The index into the agate table for the column. - :return: The name of the type in the database - """ - raise NotImplementedError("`convert_boolean_type` is not implemented for this adapter!") - - @classmethod - @abc.abstractmethod - def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str: - """Return the type in the database that best maps to the agate.DateTime - type for the given agate table and column index. - - :param agate_table: The table - :param col_idx: The index into the agate table for the column. - :return: The name of the type in the database - """ - raise NotImplementedError("`convert_datetime_type` is not implemented for this adapter!") - - @classmethod - @abc.abstractmethod - def convert_date_type(cls, agate_table: agate.Table, col_idx: int) -> str: - """Return the type in the database that best maps to the agate.Date - type for the given agate table and column index. - - :param agate_table: The table - :param col_idx: The index into the agate table for the column. - :return: The name of the type in the database - """ - raise NotImplementedError("`convert_date_type` is not implemented for this adapter!") - - @classmethod - @abc.abstractmethod - def convert_time_type(cls, agate_table: agate.Table, col_idx: int) -> str: - """Return the type in the database that best maps to the - agate.TimeDelta type for the given agate table and column index. - - :param agate_table: The table - :param col_idx: The index into the agate table for the column. - :return: The name of the type in the database - """ - raise NotImplementedError("`convert_time_type` is not implemented for this adapter!") - - @available - @classmethod - def convert_type(cls, agate_table: agate.Table, col_idx: int) -> Optional[str]: - return cls.convert_agate_type(agate_table, col_idx) - - @classmethod - def convert_agate_type(cls, agate_table: agate.Table, col_idx: int) -> Optional[str]: - agate_type: Type = agate_table.column_types[col_idx] - conversions: List[Tuple[Type, Callable[..., str]]] = [ - (agate.Text, cls.convert_text_type), - (agate.Number, cls.convert_number_type), - (agate.Boolean, cls.convert_boolean_type), - (agate.DateTime, cls.convert_datetime_type), - (agate.Date, cls.convert_date_type), - (agate.TimeDelta, cls.convert_time_type), - ] - for agate_cls, func in conversions: - if isinstance(agate_type, agate_cls): - return func(agate_table, col_idx) - - return None - - ### - # Operations involving the manifest - ### - def execute_macro( - self, - macro_name: str, - manifest: Optional[Manifest] = None, - project: Optional[str] = None, - context_override: Optional[Dict[str, Any]] = None, - kwargs: Optional[Dict[str, Any]] = None, - text_only_columns: Optional[Iterable[str]] = None, - ) -> AttrDict: - """Look macro_name up in the manifest and execute its results. - - :param macro_name: The name of the macro to execute. - :param manifest: The manifest to use for generating the base macro - execution context. If none is provided, use the internal manifest. - :param project: The name of the project to search in, or None for the - first match. - :param context_override: An optional dict to update() the macro - execution context. - :param kwargs: An optional dict of keyword args used to pass to the - macro. - """ - - if kwargs is None: - kwargs = {} - if context_override is None: - context_override = {} - - if manifest is None: - # TODO CT-211 - manifest = self._macro_manifest # type: ignore[assignment] - # TODO CT-211 - macro = manifest.find_macro_by_name( # type: ignore[union-attr] - macro_name, self.config.project_name, project - ) - if macro is None: - if project is None: - package_name = "any package" - else: - package_name = 'the "{}" package'.format(project) - - raise DbtRuntimeError( - 'dbt could not find a macro with the name "{}" in {}'.format( - macro_name, package_name - ) - ) - # This causes a reference cycle, as generate_runtime_macro_context() - # ends up calling get_adapter, so the import has to be here. - from dbt.context.providers import generate_runtime_macro_context - - macro_context = generate_runtime_macro_context( - # TODO CT-211 - macro=macro, - config=self.config, - manifest=manifest, # type: ignore[arg-type] - package_name=project, - ) - macro_context.update(context_override) - - macro_function = MacroGenerator(macro, macro_context) - - with self.connections.exception_handler(f"macro {macro_name}"): - result = macro_function(**kwargs) - return result - - @classmethod - def _catalog_filter_table(cls, table: agate.Table, manifest: Manifest) -> agate.Table: - """Filter the table as appropriate for catalog entries. Subclasses can - override this to change filtering rules on a per-adapter basis. - """ - # force database + schema to be strings - table = table_from_rows( - table.rows, - table.column_names, - text_only_columns=["table_database", "table_schema", "table_name"], - ) - return table.where(_catalog_filter_schemas(manifest)) - - def _get_one_catalog( - self, - information_schema: InformationSchema, - schemas: Set[str], - manifest: Manifest, - ) -> agate.Table: - kwargs = {"information_schema": information_schema, "schemas": schemas} - table = self.execute_macro( - GET_CATALOG_MACRO_NAME, - kwargs=kwargs, - # pass in the full manifest so we get any local project - # overrides - manifest=manifest, - ) - - results = self._catalog_filter_table(table, manifest) # type: ignore[arg-type] - return results - - def get_catalog(self, manifest: Manifest) -> Tuple[agate.Table, List[Exception]]: - schema_map = self._get_catalog_schemas(manifest) - - with executor(self.config) as tpe: - futures: List[Future[agate.Table]] = [] - for info, schemas in schema_map.items(): - if len(schemas) == 0: - continue - name = ".".join([str(info.database), "information_schema"]) - - fut = tpe.submit_connected( - self, name, self._get_one_catalog, info, schemas, manifest - ) - futures.append(fut) - - catalogs, exceptions = catch_as_completed(futures) - - return catalogs, exceptions - - def cancel_open_connections(self): - """Cancel all open connections.""" - return self.connections.cancel_open() - - def calculate_freshness( - self, - source: BaseRelation, - loaded_at_field: str, - filter: Optional[str], - manifest: Optional[Manifest] = None, - ) -> Tuple[Optional[AdapterResponse], Dict[str, Any]]: - """Calculate the freshness of sources in dbt, and return it""" - kwargs: Dict[str, Any] = { - "source": source, - "loaded_at_field": loaded_at_field, - "filter": filter, - } - - # run the macro - # in older versions of dbt-core, the 'collect_freshness' macro returned the table of results directly - # starting in v1.5, by default, we return both the table and the adapter response (metadata about the query) - result: Union[ - AttrDict, # current: contains AdapterResponse + agate.Table - agate.Table, # previous: just table - ] - result = self.execute_macro(FRESHNESS_MACRO_NAME, kwargs=kwargs, manifest=manifest) - if isinstance(result, agate.Table): - deprecations.warn("collect-freshness-return-signature") - adapter_response = None - table = result - else: - adapter_response, table = result.response, result.table # type: ignore[attr-defined] - # now we have a 1-row table of the maximum `loaded_at_field` value and - # the current time according to the db. - if len(table) != 1 or len(table[0]) != 2: - raise MacroResultError(FRESHNESS_MACRO_NAME, table) - if table[0][0] is None: - # no records in the table, so really the max_loaded_at was - # infinitely long ago. Just call it 0:00 January 1 year UTC - max_loaded_at = datetime(1, 1, 1, 0, 0, 0, tzinfo=pytz.UTC) - else: - max_loaded_at = _utc(table[0][0], source, loaded_at_field) - - snapshotted_at = _utc(table[0][1], source, loaded_at_field) - age = (snapshotted_at - max_loaded_at).total_seconds() - freshness = { - "max_loaded_at": max_loaded_at, - "snapshotted_at": snapshotted_at, - "age": age, - } - return adapter_response, freshness - - def pre_model_hook(self, config: Mapping[str, Any]) -> Any: - """A hook for running some operation before the model materialization - runs. The hook can assume it has a connection available. - - The only parameter is a configuration dictionary (the same one - available in the materialization context). It should be considered - read-only. - - The pre-model hook may return anything as a context, which will be - passed to the post-model hook. - """ - pass - - def post_model_hook(self, config: Mapping[str, Any], context: Any) -> None: - """A hook for running some operation after the model materialization - runs. The hook can assume it has a connection available. - - The first parameter is a configuration dictionary (the same one - available in the materialization context). It should be considered - read-only. - - The second parameter is the value returned by pre_mdoel_hook. - """ - pass - - def get_compiler(self): - from dbt.compilation import Compiler - - return Compiler(self.config) - - # Methods used in adapter tests - def update_column_sql( - self, - dst_name: str, - dst_column: str, - clause: str, - where_clause: Optional[str] = None, - ) -> str: - clause = f"update {dst_name} set {dst_column} = {clause}" - if where_clause is not None: - clause += f" where {where_clause}" - return clause - - def timestamp_add_sql(self, add_to: str, number: int = 1, interval: str = "hour") -> str: - # for backwards compatibility, we're compelled to set some sort of - # default. A lot of searching has lead me to believe that the - # '+ interval' syntax used in postgres/redshift is relatively common - # and might even be the SQL standard's intention. - return f"{add_to} + interval '{number} {interval}'" - - def string_add_sql( - self, - add_to: str, - value: str, - location="append", - ) -> str: - if location == "append": - return f"{add_to} || '{value}'" - elif location == "prepend": - return f"'{value}' || {add_to}" - else: - raise DbtRuntimeError(f'Got an unexpected location value of "{location}"') - - def get_rows_different_sql( - self, - relation_a: BaseRelation, - relation_b: BaseRelation, - column_names: Optional[List[str]] = None, - except_operator: str = "EXCEPT", - ) -> str: - """Generate SQL for a query that returns a single row with a two - columns: the number of rows that are different between the two - relations and the number of mismatched rows. - """ - # This method only really exists for test reasons. - names: List[str] - if column_names is None: - columns = self.get_columns_in_relation(relation_a) - names = sorted((self.quote(c.name) for c in columns)) - else: - names = sorted((self.quote(n) for n in column_names)) - columns_csv = ", ".join(names) - - sql = COLUMNS_EQUAL_SQL.format( - columns=columns_csv, - relation_a=str(relation_a), - relation_b=str(relation_b), - except_op=except_operator, - ) - - return sql - - @property - def python_submission_helpers(self) -> Dict[str, Type[PythonJobHelper]]: - raise NotImplementedError("python_submission_helpers is not specified") - - @property - def default_python_submission_method(self) -> str: - raise NotImplementedError("default_python_submission_method is not specified") - - @log_code_execution - def submit_python_job(self, parsed_model: dict, compiled_code: str) -> AdapterResponse: - submission_method = parsed_model["config"].get( - "submission_method", self.default_python_submission_method - ) - if submission_method not in self.python_submission_helpers: - raise NotImplementedError( - "Submission method {} is not supported for current adapter".format( - submission_method - ) - ) - job_helper = self.python_submission_helpers[submission_method]( - parsed_model, self.connections.profile.credentials - ) - submission_result = job_helper.submit(compiled_code) - # process submission result to generate adapter response - return self.generate_python_submission_response(submission_result) - - def generate_python_submission_response(self, submission_result: Any) -> AdapterResponse: - raise NotImplementedError( - "Your adapter need to implement generate_python_submission_response" - ) - - def valid_incremental_strategies(self): - """The set of standard builtin strategies which this adapter supports out-of-the-box. - Not used to validate custom strategies defined by end users. - """ - return ["append"] - - def builtin_incremental_strategies(self): - return ["append", "delete+insert", "merge", "insert_overwrite"] - - @available.parse_none - def get_incremental_strategy_macro(self, model_context, strategy: str): - # Construct macro_name from strategy name - if strategy is None: - strategy = "default" - - # validate strategies for this adapter - valid_strategies = self.valid_incremental_strategies() - valid_strategies.append("default") - builtin_strategies = self.builtin_incremental_strategies() - if strategy in builtin_strategies and strategy not in valid_strategies: - raise DbtRuntimeError( - f"The incremental strategy '{strategy}' is not valid for this adapter" - ) - - strategy = strategy.replace("+", "_") - macro_name = f"get_incremental_{strategy}_sql" - # The model_context should have MacroGenerator callable objects for all macros - if macro_name not in model_context: - raise DbtRuntimeError( - 'dbt could not find an incremental strategy macro with the name "{}" in {}'.format( - macro_name, self.config.project_name - ) - ) - - # This returns a callable macro - return model_context[macro_name] - - @classmethod - def _parse_column_constraint(cls, raw_constraint: Dict[str, Any]) -> ColumnLevelConstraint: - try: - ColumnLevelConstraint.validate(raw_constraint) - return ColumnLevelConstraint.from_dict(raw_constraint) - except Exception: - raise DbtValidationError(f"Could not parse constraint: {raw_constraint}") - - @classmethod - def render_column_constraint(cls, constraint: ColumnLevelConstraint) -> Optional[str]: - """Render the given constraint as DDL text. Should be overriden by adapters which need custom constraint - rendering.""" - constraint_expression = constraint.expression or "" - - rendered_column_constraint = None - if constraint.type == ConstraintType.check and constraint_expression: - rendered_column_constraint = f"check ({constraint_expression})" - elif constraint.type == ConstraintType.not_null: - rendered_column_constraint = f"not null {constraint_expression}" - elif constraint.type == ConstraintType.unique: - rendered_column_constraint = f"unique {constraint_expression}" - elif constraint.type == ConstraintType.primary_key: - rendered_column_constraint = f"primary key {constraint_expression}" - elif constraint.type == ConstraintType.foreign_key and constraint_expression: - rendered_column_constraint = f"references {constraint_expression}" - elif constraint.type == ConstraintType.custom and constraint_expression: - rendered_column_constraint = constraint_expression - - if rendered_column_constraint: - rendered_column_constraint = rendered_column_constraint.strip() - - return rendered_column_constraint - - @available - @classmethod - def render_raw_columns_constraints(cls, raw_columns: Dict[str, Dict[str, Any]]) -> List: - rendered_column_constraints = [] - - for v in raw_columns.values(): - col_name = cls.quote(v["name"]) if v.get("quote") else v["name"] - rendered_column_constraint = [f"{col_name} {v['data_type']}"] - for con in v.get("constraints", None): - constraint = cls._parse_column_constraint(con) - c = cls.process_parsed_constraint(constraint, cls.render_column_constraint) - if c is not None: - rendered_column_constraint.append(c) - rendered_column_constraints.append(" ".join(rendered_column_constraint)) - - return rendered_column_constraints - - @classmethod - def process_parsed_constraint( - cls, parsed_constraint: Union[ColumnLevelConstraint, ModelLevelConstraint], render_func - ) -> Optional[str]: - if ( - parsed_constraint.warn_unsupported - and cls.CONSTRAINT_SUPPORT[parsed_constraint.type] == ConstraintSupport.NOT_SUPPORTED - ): - warn_or_error( - ConstraintNotSupported(constraint=parsed_constraint.type.value, adapter=cls.type()) - ) - if ( - parsed_constraint.warn_unenforced - and cls.CONSTRAINT_SUPPORT[parsed_constraint.type] == ConstraintSupport.NOT_ENFORCED - ): - warn_or_error( - ConstraintNotEnforced(constraint=parsed_constraint.type.value, adapter=cls.type()) - ) - if cls.CONSTRAINT_SUPPORT[parsed_constraint.type] != ConstraintSupport.NOT_SUPPORTED: - return render_func(parsed_constraint) - - return None - - @classmethod - def _parse_model_constraint(cls, raw_constraint: Dict[str, Any]) -> ModelLevelConstraint: - try: - ModelLevelConstraint.validate(raw_constraint) - c = ModelLevelConstraint.from_dict(raw_constraint) - return c - except Exception: - raise DbtValidationError(f"Could not parse constraint: {raw_constraint}") - - @available - @classmethod - def render_raw_model_constraints(cls, raw_constraints: List[Dict[str, Any]]) -> List[str]: - return [c for c in map(cls.render_raw_model_constraint, raw_constraints) if c is not None] - - @classmethod - def render_raw_model_constraint(cls, raw_constraint: Dict[str, Any]) -> Optional[str]: - constraint = cls._parse_model_constraint(raw_constraint) - return cls.process_parsed_constraint(constraint, cls.render_model_constraint) - - @classmethod - def render_model_constraint(cls, constraint: ModelLevelConstraint) -> Optional[str]: - """Render the given constraint as DDL text. Should be overriden by adapters which need custom constraint - rendering.""" - constraint_prefix = f"constraint {constraint.name} " if constraint.name else "" - column_list = ", ".join(constraint.columns) - if constraint.type == ConstraintType.check and constraint.expression: - return f"{constraint_prefix}check ({constraint.expression})" - elif constraint.type == ConstraintType.unique: - constraint_expression = f" {constraint.expression}" if constraint.expression else "" - return f"{constraint_prefix}unique{constraint_expression} ({column_list})" - elif constraint.type == ConstraintType.primary_key: - constraint_expression = f" {constraint.expression}" if constraint.expression else "" - return f"{constraint_prefix}primary key{constraint_expression} ({column_list})" - elif constraint.type == ConstraintType.foreign_key and constraint.expression: - return f"{constraint_prefix}foreign key ({column_list}) references {constraint.expression}" - elif constraint.type == ConstraintType.custom and constraint.expression: - return f"{constraint_prefix}{constraint.expression}" - else: - return None - - -COLUMNS_EQUAL_SQL = """ -with diff_count as ( - SELECT - 1 as id, - COUNT(*) as num_missing FROM ( - (SELECT {columns} FROM {relation_a} {except_op} - SELECT {columns} FROM {relation_b}) - UNION ALL - (SELECT {columns} FROM {relation_b} {except_op} - SELECT {columns} FROM {relation_a}) - ) as a -), table_a as ( - SELECT COUNT(*) as num_rows FROM {relation_a} -), table_b as ( - SELECT COUNT(*) as num_rows FROM {relation_b} -), row_count_diff as ( - select - 1 as id, - table_a.num_rows - table_b.num_rows as difference - from table_a, table_b -) -select - row_count_diff.difference as row_count_difference, - diff_count.num_missing as num_mismatched -from row_count_diff -join diff_count using (id) -""".strip() - - -def catch_as_completed( - futures, # typing: List[Future[agate.Table]] -) -> Tuple[agate.Table, List[Exception]]: - # catalogs: agate.Table = agate.Table(rows=[]) - tables: List[agate.Table] = [] - exceptions: List[Exception] = [] - - for future in as_completed(futures): - exc = future.exception() - # we want to re-raise on ctrl+c and BaseException - if exc is None: - catalog = future.result() - tables.append(catalog) - elif isinstance(exc, KeyboardInterrupt) or not isinstance(exc, Exception): - raise exc - else: - warn_or_error(CatalogGenerationError(exc=str(exc))) - # exc is not None, derives from Exception, and isn't ctrl+c - exceptions.append(exc) - return merge_tables(tables), exceptions diff --git a/core/dbt/adapters/base/meta.py b/core/dbt/adapters/base/meta.py deleted file mode 100644 index de35a4f826a..00000000000 --- a/core/dbt/adapters/base/meta.py +++ /dev/null @@ -1,128 +0,0 @@ -import abc -from functools import wraps -from typing import Callable, Optional, Any, FrozenSet, Dict, Set - -from dbt.deprecations import warn, renamed_method - - -Decorator = Callable[[Any], Callable] - - -class _Available: - def __call__(self, func: Callable) -> Callable: - func._is_available_ = True # type: ignore - return func - - def parse(self, parse_replacement: Callable) -> Decorator: - """A decorator factory to indicate that a method on the adapter will be - exposed to the database wrapper, and will be stubbed out at parse time - with the given function. - - @available.parse() - def my_method(self, a, b): - if something: - return None - return big_expensive_db_query() - - @available.parse(lambda *args, **args: {}) - def my_other_method(self, a, b): - x = {} - x.update(big_expensive_db_query()) - return x - """ - - def inner(func): - func._parse_replacement_ = parse_replacement - return self(func) - - return inner - - def deprecated( - self, supported_name: str, parse_replacement: Optional[Callable] = None - ) -> Decorator: - """A decorator that marks a function as available, but also prints a - deprecation warning. Use like - - @available.deprecated('my_new_method') - def my_old_method(self, arg): - args = compatability_shim(arg) - return self.my_new_method(*args) - - @available.deprecated('my_new_slow_method', lambda *a, **k: (0, '')) - def my_old_slow_method(self, arg): - args = compatibility_shim(arg) - return self.my_new_slow_method(*args) - - To make `adapter.my_old_method` available but also print out a warning - on use directing users to `my_new_method`. - - The optional parse_replacement, if provided, will provide a parse-time - replacement for the actual method (see `available.parse`). - """ - - def wrapper(func): - func_name = func.__name__ - renamed_method(func_name, supported_name) - - @wraps(func) - def inner(*args, **kwargs): - warn("adapter:{}".format(func_name)) - return func(*args, **kwargs) - - if parse_replacement: - available_function = self.parse(parse_replacement) - else: - available_function = self - return available_function(inner) - - return wrapper - - def parse_none(self, func: Callable) -> Callable: - wrapper = self.parse(lambda *a, **k: None) - return wrapper(func) - - def parse_list(self, func: Callable) -> Callable: - wrapper = self.parse(lambda *a, **k: []) - return wrapper(func) - - -available = _Available() - - -class AdapterMeta(abc.ABCMeta): - _available_: FrozenSet[str] - _parse_replacements_: Dict[str, Callable] - - def __new__(mcls, name, bases, namespace, **kwargs): - # mypy does not like the `**kwargs`. But `ABCMeta` itself takes - # `**kwargs` in its argspec here (and passes them to `type.__new__`. - # I'm not sure there is any benefit to it after poking around a bit, - # but having it doesn't hurt on the python side (and omitting it could - # hurt for obscure metaclass reasons, for all I know) - cls = abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs) # type: ignore - - # this is very much inspired by ABCMeta's own implementation - - # dict mapping the method name to whether the model name should be - # injected into the arguments. All methods in here are exposed to the - # context. - available: Set[str] = set() - replacements: Dict[str, Any] = {} - - # collect base class data first - for base in bases: - available.update(getattr(base, "_available_", set())) - replacements.update(getattr(base, "_parse_replacements_", set())) - - # override with local data if it exists - for name, value in namespace.items(): - if getattr(value, "_is_available_", False): - available.add(name) - parse_replacement = getattr(value, "_parse_replacement_", None) - if parse_replacement is not None: - replacements[name] = parse_replacement - - cls._available_ = frozenset(available) - # should this be a namedtuple so it will be immutable like _available_? - cls._parse_replacements_ = replacements - return cls diff --git a/core/dbt/adapters/base/plugin.py b/core/dbt/adapters/base/plugin.py deleted file mode 100644 index 58481f75439..00000000000 --- a/core/dbt/adapters/base/plugin.py +++ /dev/null @@ -1,42 +0,0 @@ -from typing import List, Optional, Type - -from dbt.adapters.base import Credentials -from dbt.exceptions import CompilationError -from dbt.adapters.protocol import AdapterProtocol - - -def project_name_from_path(include_path: str) -> str: - # avoid an import cycle - from dbt.config.project import PartialProject - - partial = PartialProject.from_project_root(include_path) - if partial.project_name is None: - raise CompilationError(f"Invalid project at {include_path}: name not set!") - return partial.project_name - - -class AdapterPlugin: - """Defines the basic requirements for a dbt adapter plugin. - - :param include_path: The path to this adapter plugin's root - :param dependencies: A list of adapter names that this adapter depends - upon. - """ - - def __init__( - self, - adapter: Type[AdapterProtocol], - credentials: Type[Credentials], - include_path: str, - dependencies: Optional[List[str]] = None, - ): - - self.adapter: Type[AdapterProtocol] = adapter - self.credentials: Type[Credentials] = credentials - self.include_path: str = include_path - self.project_name: str = project_name_from_path(include_path) - self.dependencies: List[str] - if dependencies is None: - self.dependencies = [] - else: - self.dependencies = dependencies diff --git a/core/dbt/adapters/base/query_headers.py b/core/dbt/adapters/base/query_headers.py deleted file mode 100644 index bfacd2aee8c..00000000000 --- a/core/dbt/adapters/base/query_headers.py +++ /dev/null @@ -1,102 +0,0 @@ -from threading import local -from typing import Optional, Callable, Dict, Any - -from dbt.clients.jinja import QueryStringGenerator - -from dbt.context.manifest import generate_query_header_context -from dbt.contracts.connection import AdapterRequiredConfig, QueryComment -from dbt.contracts.graph.nodes import ResultNode -from dbt.contracts.graph.manifest import Manifest -from dbt.exceptions import DbtRuntimeError - - -class NodeWrapper: - def __init__(self, node): - self._inner_node = node - - def __getattr__(self, name): - return getattr(self._inner_node, name, "") - - -class _QueryComment(local): - """A thread-local class storing thread-specific state information for - connection management, namely: - - the current thread's query comment. - - a source_name indicating what set the current thread's query comment - """ - - def __init__(self, initial): - self.query_comment: Optional[str] = initial - self.append = False - - def add(self, sql: str) -> str: - if not self.query_comment: - return sql - - if self.append: - # replace last ';' with '<comment>;' - sql = sql.rstrip() - if sql[-1] == ";": - sql = sql[:-1] - return "{}\n/* {} */;".format(sql, self.query_comment.strip()) - - return "{}\n/* {} */".format(sql, self.query_comment.strip()) - - return "/* {} */\n{}".format(self.query_comment.strip(), sql) - - def set(self, comment: Optional[str], append: bool): - if isinstance(comment, str) and "*/" in comment: - # tell the user "no" so they don't hurt themselves by writing - # garbage - raise DbtRuntimeError(f'query comment contains illegal value "*/": {comment}') - self.query_comment = comment - self.append = append - - -QueryStringFunc = Callable[[str, Optional[NodeWrapper]], str] - - -class MacroQueryStringSetter: - def __init__(self, config: AdapterRequiredConfig, manifest: Manifest): - self.manifest = manifest - self.config = config - - comment_macro = self._get_comment_macro() - self.generator: QueryStringFunc = lambda name, model: "" - # if the comment value was None or the empty string, just skip it - if comment_macro: - assert isinstance(comment_macro, str) - macro = "\n".join( - ( - "{%- macro query_comment_macro(connection_name, node) -%}", - comment_macro, - "{% endmacro %}", - ) - ) - ctx = self._get_context() - self.generator = QueryStringGenerator(macro, ctx) - self.comment = _QueryComment(None) - self.reset() - - def _get_comment_macro(self) -> Optional[str]: - return self.config.query_comment.comment - - def _get_context(self) -> Dict[str, Any]: - return generate_query_header_context(self.config, self.manifest) - - def add(self, sql: str) -> str: - return self.comment.add(sql) - - def reset(self): - self.set("master", None) - - def set(self, name: str, node: Optional[ResultNode]): - wrapped: Optional[NodeWrapper] = None - if node is not None: - wrapped = NodeWrapper(node) - comment_str = self.generator(name, wrapped) - - append = False - if isinstance(self.config.query_comment, QueryComment): - append = self.config.query_comment.append - self.comment.set(comment_str, append) diff --git a/core/dbt/adapters/base/relation.py b/core/dbt/adapters/base/relation.py deleted file mode 100644 index ae4e585d524..00000000000 --- a/core/dbt/adapters/base/relation.py +++ /dev/null @@ -1,465 +0,0 @@ -from collections.abc import Hashable -from dataclasses import dataclass, field -from typing import Optional, TypeVar, Any, Type, Dict, Iterator, Tuple, Set - -from dbt.contracts.graph.nodes import SourceDefinition, ManifestNode, ResultNode, ParsedNode -from dbt.contracts.relation import ( - RelationType, - ComponentName, - HasQuoting, - FakeAPIObject, - Policy, - Path, -) -from dbt.exceptions import ( - ApproximateMatchError, - DbtInternalError, - MultipleDatabasesNotAllowedError, -) -from dbt.node_types import NodeType -from dbt.utils import filter_null_values, deep_merge, classproperty - -import dbt.exceptions - - -Self = TypeVar("Self", bound="BaseRelation") - - -@dataclass(frozen=True, eq=False, repr=False) -class BaseRelation(FakeAPIObject, Hashable): - path: Path - type: Optional[RelationType] = None - quote_character: str = '"' - # Python 3.11 requires that these use default_factory instead of simple default - # ValueError: mutable default <class 'dbt.contracts.relation.Policy'> for field include_policy is not allowed: use default_factory - include_policy: Policy = field(default_factory=lambda: Policy()) - quote_policy: Policy = field(default_factory=lambda: Policy()) - dbt_created: bool = False - - def _is_exactish_match(self, field: ComponentName, value: str) -> bool: - if self.dbt_created and self.quote_policy.get_part(field) is False: - return self.path.get_lowered_part(field) == value.lower() - else: - return self.path.get_part(field) == value - - @classmethod - def _get_field_named(cls, field_name): - for f, _ in cls._get_fields(): - if f.name == field_name: - return f - # this should be unreachable - raise ValueError(f"BaseRelation has no {field_name} field!") - - def __eq__(self, other): - if not isinstance(other, self.__class__): - return False - return self.to_dict(omit_none=True) == other.to_dict(omit_none=True) - - @classmethod - def get_default_quote_policy(cls) -> Policy: - return cls._get_field_named("quote_policy").default_factory() - - @classmethod - def get_default_include_policy(cls) -> Policy: - return cls._get_field_named("include_policy").default_factory() - - def get(self, key, default=None): - """Override `.get` to return a metadata object so we don't break - dbt_utils. - """ - if key == "metadata": - return {"type": self.__class__.__name__} - return super().get(key, default) - - def matches( - self, - database: Optional[str] = None, - schema: Optional[str] = None, - identifier: Optional[str] = None, - ) -> bool: - search = filter_null_values( - { - ComponentName.Database: database, - ComponentName.Schema: schema, - ComponentName.Identifier: identifier, - } - ) - - if not search: - # nothing was passed in - raise dbt.exceptions.DbtRuntimeError( - "Tried to match relation, but no search path was passed!" - ) - - exact_match = True - approximate_match = True - - for k, v in search.items(): - if not self._is_exactish_match(k, v): - exact_match = False - if str(self.path.get_lowered_part(k)).strip(self.quote_character) != v.lower().strip( - self.quote_character - ): - approximate_match = False # type: ignore[union-attr] - - if approximate_match and not exact_match: - target = self.create(database=database, schema=schema, identifier=identifier) - raise ApproximateMatchError(target, self) - - return exact_match - - def replace_path(self, **kwargs): - return self.replace(path=self.path.replace(**kwargs)) - - def quote( - self: Self, - database: Optional[bool] = None, - schema: Optional[bool] = None, - identifier: Optional[bool] = None, - ) -> Self: - policy = filter_null_values( - { - ComponentName.Database: database, - ComponentName.Schema: schema, - ComponentName.Identifier: identifier, - } - ) - - new_quote_policy = self.quote_policy.replace_dict(policy) - return self.replace(quote_policy=new_quote_policy) - - def include( - self: Self, - database: Optional[bool] = None, - schema: Optional[bool] = None, - identifier: Optional[bool] = None, - ) -> Self: - policy = filter_null_values( - { - ComponentName.Database: database, - ComponentName.Schema: schema, - ComponentName.Identifier: identifier, - } - ) - - new_include_policy = self.include_policy.replace_dict(policy) - return self.replace(include_policy=new_include_policy) - - def information_schema(self, view_name=None) -> "InformationSchema": - # some of our data comes from jinja, where things can be `Undefined`. - if not isinstance(view_name, str): - view_name = None - - # Kick the user-supplied schema out of the information schema relation - # Instead address this as <database>.information_schema by default - info_schema = InformationSchema.from_relation(self, view_name) - return info_schema.incorporate(path={"schema": None}) - - def information_schema_only(self) -> "InformationSchema": - return self.information_schema() - - def without_identifier(self) -> "BaseRelation": - """Return a form of this relation that only has the database and schema - set to included. To get the appropriately-quoted form the schema out of - the result (for use as part of a query), use `.render()`. To get the - raw database or schema name, use `.database` or `.schema`. - - The hash of the returned object is the result of render(). - """ - return self.include(identifier=False).replace_path(identifier=None) - - def _render_iterator(self) -> Iterator[Tuple[Optional[ComponentName], Optional[str]]]: - - for key in ComponentName: - path_part: Optional[str] = None - if self.include_policy.get_part(key): - path_part = self.path.get_part(key) - if path_part is not None and self.quote_policy.get_part(key): - path_part = self.quoted(path_part) - yield key, path_part - - def render(self) -> str: - # if there is nothing set, this will return the empty string. - return ".".join(part for _, part in self._render_iterator() if part is not None) - - def quoted(self, identifier): - return "{quote_char}{identifier}{quote_char}".format( - quote_char=self.quote_character, - identifier=identifier, - ) - - @classmethod - def create_from_source(cls: Type[Self], source: SourceDefinition, **kwargs: Any) -> Self: - source_quoting = source.quoting.to_dict(omit_none=True) - source_quoting.pop("column", None) - quote_policy = deep_merge( - cls.get_default_quote_policy().to_dict(omit_none=True), - source_quoting, - kwargs.get("quote_policy", {}), - ) - - return cls.create( - database=source.database, - schema=source.schema, - identifier=source.identifier, - quote_policy=quote_policy, - **kwargs, - ) - - @staticmethod - def add_ephemeral_prefix(name: str): - return f"__dbt__cte__{name}" - - @classmethod - def create_ephemeral_from_node( - cls: Type[Self], - config: HasQuoting, - node: ManifestNode, - ) -> Self: - # Note that ephemeral models are based on the name. - identifier = cls.add_ephemeral_prefix(node.name) - return cls.create( - type=cls.CTE, - identifier=identifier, - ).quote(identifier=False) - - @classmethod - def create_from_node( - cls: Type[Self], - config: HasQuoting, - node, - quote_policy: Optional[Dict[str, bool]] = None, - **kwargs: Any, - ) -> Self: - if quote_policy is None: - quote_policy = {} - - quote_policy = dbt.utils.merge(config.quoting, quote_policy) - - return cls.create( - database=node.database, - schema=node.schema, - identifier=node.alias, - quote_policy=quote_policy, - **kwargs, - ) - - @classmethod - def create_from( - cls: Type[Self], - config: HasQuoting, - node: ResultNode, - **kwargs: Any, - ) -> Self: - if node.resource_type == NodeType.Source: - if not isinstance(node, SourceDefinition): - raise DbtInternalError( - "type mismatch, expected SourceDefinition but got {}".format(type(node)) - ) - return cls.create_from_source(node, **kwargs) - else: - # Can't use ManifestNode here because of parameterized generics - if not isinstance(node, (ParsedNode)): - raise DbtInternalError( - f"type mismatch, expected ManifestNode but got {type(node)}" - ) - return cls.create_from_node(config, node, **kwargs) - - @classmethod - def create( - cls: Type[Self], - database: Optional[str] = None, - schema: Optional[str] = None, - identifier: Optional[str] = None, - type: Optional[RelationType] = None, - **kwargs, - ) -> Self: - kwargs.update( - { - "path": { - "database": database, - "schema": schema, - "identifier": identifier, - }, - "type": type, - } - ) - return cls.from_dict(kwargs) - - def __repr__(self) -> str: - return "<{} {}>".format(self.__class__.__name__, self.render()) - - def __hash__(self) -> int: - return hash(self.render()) - - def __str__(self) -> str: - return self.render() - - @property - def database(self) -> Optional[str]: - return self.path.database - - @property - def schema(self) -> Optional[str]: - return self.path.schema - - @property - def identifier(self) -> Optional[str]: - return self.path.identifier - - @property - def table(self) -> Optional[str]: - return self.path.identifier - - # Here for compatibility with old Relation interface - @property - def name(self) -> Optional[str]: - return self.identifier - - @property - def is_table(self) -> bool: - return self.type == RelationType.Table - - @property - def is_cte(self) -> bool: - return self.type == RelationType.CTE - - @property - def is_view(self) -> bool: - return self.type == RelationType.View - - @property - def is_materialized_view(self) -> bool: - return self.type == RelationType.MaterializedView - - @classproperty - def Table(cls) -> str: - return str(RelationType.Table) - - @classproperty - def CTE(cls) -> str: - return str(RelationType.CTE) - - @classproperty - def View(cls) -> str: - return str(RelationType.View) - - @classproperty - def External(cls) -> str: - return str(RelationType.External) - - @classproperty - def MaterializedView(cls) -> str: - return str(RelationType.MaterializedView) - - @classproperty - def get_relation_type(cls) -> Type[RelationType]: - return RelationType - - -Info = TypeVar("Info", bound="InformationSchema") - - -@dataclass(frozen=True, eq=False, repr=False) -class InformationSchema(BaseRelation): - information_schema_view: Optional[str] = None - - def __post_init__(self): - if not isinstance(self.information_schema_view, (type(None), str)): - raise dbt.exceptions.CompilationError( - "Got an invalid name: {}".format(self.information_schema_view) - ) - - @classmethod - def get_path(cls, relation: BaseRelation, information_schema_view: Optional[str]) -> Path: - return Path( - database=relation.database, - schema=relation.schema, - identifier="INFORMATION_SCHEMA", - ) - - @classmethod - def get_include_policy( - cls, - relation, - information_schema_view: Optional[str], - ) -> Policy: - return relation.include_policy.replace( - database=relation.database is not None, - schema=False, - identifier=True, - ) - - @classmethod - def get_quote_policy( - cls, - relation, - information_schema_view: Optional[str], - ) -> Policy: - return relation.quote_policy.replace( - identifier=False, - ) - - @classmethod - def from_relation( - cls: Type[Info], - relation: BaseRelation, - information_schema_view: Optional[str], - ) -> Info: - include_policy = cls.get_include_policy(relation, information_schema_view) - quote_policy = cls.get_quote_policy(relation, information_schema_view) - path = cls.get_path(relation, information_schema_view) - return cls( - type=RelationType.View, - path=path, - include_policy=include_policy, - quote_policy=quote_policy, - information_schema_view=information_schema_view, - ) - - def _render_iterator(self): - for k, v in super()._render_iterator(): - yield k, v - yield None, self.information_schema_view - - -class SchemaSearchMap(Dict[InformationSchema, Set[Optional[str]]]): - """A utility class to keep track of what information_schema tables to - search for what schemas. The schema values are all lowercased to avoid - duplication. - """ - - def add(self, relation: BaseRelation): - key = relation.information_schema_only() - if key not in self: - self[key] = set() - schema: Optional[str] = None - if relation.schema is not None: - schema = relation.schema.lower() - self[key].add(schema) - - def search(self) -> Iterator[Tuple[InformationSchema, Optional[str]]]: - for information_schema_name, schemas in self.items(): - for schema in schemas: - yield information_schema_name, schema - - def flatten(self, allow_multiple_databases: bool = False): - new = self.__class__() - - # make sure we don't have multiple databases if allow_multiple_databases is set to False - if not allow_multiple_databases: - seen = {r.database.lower() for r in self if r.database} - if len(seen) > 1: - raise MultipleDatabasesNotAllowedError(seen) - - for information_schema_name, schema in self.search(): - path = {"database": information_schema_name.database, "schema": schema} - new.add( - information_schema_name.incorporate( - path=path, - quote_policy={"database": False}, - include_policy={"database": False}, - ) - ) - - return new diff --git a/core/dbt/adapters/cache.py b/core/dbt/adapters/cache.py deleted file mode 100644 index 3e783de21e9..00000000000 --- a/core/dbt/adapters/cache.py +++ /dev/null @@ -1,520 +0,0 @@ -import threading -from copy import deepcopy -from typing import Any, Dict, Iterable, List, Optional, Set, Tuple - -from dbt.adapters.reference_keys import ( - _make_ref_key, - _make_ref_key_dict, - _ReferenceKey, -) -from dbt.exceptions import ( - DependentLinkNotCachedError, - NewNameAlreadyInCacheError, - NoneRelationFoundError, - ReferencedLinkNotCachedError, - TruncatedModelNameCausedCollisionError, -) -from dbt.events.functions import fire_event, fire_event_if -from dbt.events.types import CacheAction, CacheDumpGraph -from dbt.flags import get_flags -from dbt.utils import lowercase - - -def dot_separated(key: _ReferenceKey) -> str: - """Return the key in dot-separated string form. - - :param _ReferenceKey key: The key to stringify. - """ - return ".".join(map(str, key)) - - -class _CachedRelation: - """Nothing about _CachedRelation is guaranteed to be thread-safe! - - :attr str schema: The schema of this relation. - :attr str identifier: The identifier of this relation. - :attr Dict[_ReferenceKey, _CachedRelation] referenced_by: The relations - that refer to this relation. - :attr BaseRelation inner: The underlying dbt relation. - """ - - def __init__(self, inner): - self.referenced_by = {} - self.inner = inner - - def __str__(self) -> str: - return ("_CachedRelation(database={}, schema={}, identifier={}, inner={})").format( - self.database, self.schema, self.identifier, self.inner - ) - - @property - def database(self) -> Optional[str]: - return lowercase(self.inner.database) - - @property - def schema(self) -> Optional[str]: - return lowercase(self.inner.schema) - - @property - def identifier(self) -> Optional[str]: - return lowercase(self.inner.identifier) - - def __copy__(self): - new = self.__class__(self.inner) - new.__dict__.update(self.__dict__) - return new - - def __deepcopy__(self, memo): - new = self.__class__(self.inner.incorporate()) - new.__dict__.update(self.__dict__) - new.referenced_by = deepcopy(self.referenced_by, memo) - - def is_referenced_by(self, key): - return key in self.referenced_by - - def key(self): - """Get the _ReferenceKey that represents this relation - - :return _ReferenceKey: A key for this relation. - """ - return _make_ref_key(self) - - def add_reference(self, referrer: "_CachedRelation"): - """Add a reference from referrer to self, indicating that if this node - were drop...cascaded, the referrer would be dropped as well. - - :param _CachedRelation referrer: The node that refers to this node. - """ - self.referenced_by[referrer.key()] = referrer - - def collect_consequences(self): - """Recursively collect a set of _ReferenceKeys that would - consequentially get dropped if this were dropped via - "drop ... cascade". - - :return Set[_ReferenceKey]: All the relations that would be dropped - """ - consequences = {self.key()} - for relation in self.referenced_by.values(): - consequences.update(relation.collect_consequences()) - return consequences - - def release_references(self, keys): - """Non-recursively indicate that an iterable of _ReferenceKey no longer - exist. Unknown keys are ignored. - - :param Iterable[_ReferenceKey] keys: The keys to drop. - """ - keys = set(self.referenced_by) & set(keys) - for key in keys: - self.referenced_by.pop(key) - - def rename(self, new_relation): - """Rename this cached relation to new_relation. - Note that this will change the output of key(), all refs must be - updated! - - :param _CachedRelation new_relation: The new name to apply to the - relation - """ - # Relations store this stuff inside their `path` dict. But they - # also store a table_name, and usually use it in their .render(), - # so we need to update that as well. It doesn't appear that - # table_name is ever anything but the identifier (via .create()) - self.inner = self.inner.incorporate( - path={ - "database": new_relation.inner.database, - "schema": new_relation.inner.schema, - "identifier": new_relation.inner.identifier, - }, - ) - - def rename_key(self, old_key, new_key): - """Rename a reference that may or may not exist. Only handles the - reference itself, so this is the other half of what `rename` does. - - If old_key is not in referenced_by, this is a no-op. - - :param _ReferenceKey old_key: The old key to be renamed. - :param _ReferenceKey new_key: The new key to rename to. - :raises InternalError: If the new key already exists. - """ - if new_key in self.referenced_by: - raise NewNameAlreadyInCacheError(old_key, new_key) - - if old_key not in self.referenced_by: - return - value = self.referenced_by.pop(old_key) - self.referenced_by[new_key] = value - - def dump_graph_entry(self): - """Return a key/value pair representing this key and its referents. - - return List[str]: The dot-separated form of all referent keys. - """ - return [dot_separated(r) for r in self.referenced_by] - - -class RelationsCache: - """A cache of the relations known to dbt. Keeps track of relationships - declared between tables and handles renames/drops as a real database would. - - :attr Dict[_ReferenceKey, _CachedRelation] relations: The known relations. - :attr threading.RLock lock: The lock around relations, held during updates. - The adapters also hold this lock while filling the cache. - :attr Set[str] schemas: The set of known/cached schemas, all lowercased. - """ - - def __init__(self) -> None: - self.relations: Dict[_ReferenceKey, _CachedRelation] = {} - self.lock = threading.RLock() - self.schemas: Set[Tuple[Optional[str], Optional[str]]] = set() - - def add_schema( - self, - database: Optional[str], - schema: Optional[str], - ) -> None: - """Add a schema to the set of known schemas (case-insensitive) - - :param database: The database name to add. - :param schema: The schema name to add. - """ - self.schemas.add((lowercase(database), lowercase(schema))) - - def drop_schema( - self, - database: Optional[str], - schema: Optional[str], - ) -> None: - """Drop the given schema and remove it from the set of known schemas. - - Then remove all its contents (and their dependents, etc) as well. - """ - key = (lowercase(database), lowercase(schema)) - if key not in self.schemas: - return - - # avoid iterating over self.relations while removing things by - # collecting the list first. - - with self.lock: - to_remove = self._list_relations_in_schema(database, schema) - self._remove_all(to_remove) - # handle a drop_schema race by using discard() over remove() - self.schemas.discard(key) - - def update_schemas(self, schemas: Iterable[Tuple[Optional[str], str]]): - """Add multiple schemas to the set of known schemas (case-insensitive) - - :param schemas: An iterable of the schema names to add. - """ - self.schemas.update((lowercase(d), s.lower()) for (d, s) in schemas) - - def __contains__(self, schema_id: Tuple[Optional[str], str]): - """A schema is 'in' the relations cache if it is in the set of cached - schemas. - - :param schema_id: The db name and schema name to look up. - """ - db, schema = schema_id - return (lowercase(db), schema.lower()) in self.schemas - - def dump_graph(self): - """Dump a key-only representation of the schema to a dictionary. Every - known relation is a key with a value of a list of keys it is referenced - by. - """ - # we have to hold the lock for the entire dump, if other threads modify - # self.relations or any cache entry's referenced_by during iteration - # it's a runtime error! - with self.lock: - return {dot_separated(k): str(v.dump_graph_entry()) for k, v in self.relations.items()} - - def _setdefault(self, relation: _CachedRelation): - """Add a relation to the cache, or return it if it already exists. - - :param _CachedRelation relation: The relation to set or get. - :return _CachedRelation: The relation stored under the given relation's - key - """ - self.add_schema(relation.database, relation.schema) - key = relation.key() - return self.relations.setdefault(key, relation) - - def _add_link(self, referenced_key, dependent_key): - """Add a link between two relations to the database. Both the old and - new entries must alraedy exist in the database. - - :param _ReferenceKey referenced_key: The key identifying the referenced - model (the one that if dropped will drop the dependent model). - :param _ReferenceKey dependent_key: The key identifying the dependent - model. - :raises InternalError: If either entry does not exist. - """ - referenced = self.relations.get(referenced_key) - if referenced is None: - return - if referenced is None: - raise ReferencedLinkNotCachedError(referenced_key) - - dependent = self.relations.get(dependent_key) - if dependent is None: - raise DependentLinkNotCachedError(dependent_key) - - assert dependent is not None # we just raised! - - referenced.add_reference(dependent) - - # This is called in plugins/postgres/dbt/adapters/postgres/impl.py - def add_link(self, referenced, dependent): - """Add a link between two relations to the database. If either relation - does not exist, it will be added as an "external" relation. - - The dependent model refers _to_ the referenced model. So, given - arguments of (jake_test, bar, jake_test, foo): - both values are in the schema jake_test and foo is a view that refers - to bar, so "drop bar cascade" will drop foo and all of foo's - dependents. - - :param BaseRelation referenced: The referenced model. - :param BaseRelation dependent: The dependent model. - :raises InternalError: If either entry does not exist. - """ - ref_key = _make_ref_key(referenced) - dep_key = _make_ref_key(dependent) - if (ref_key.database, ref_key.schema) not in self: - # if we have not cached the referenced schema at all, we must be - # referring to a table outside our control. There's no need to make - # a link - we will never drop the referenced relation during a run. - fire_event( - CacheAction( - ref_key=ref_key._asdict(), - ref_key_2=dep_key._asdict(), - ) - ) - return - if ref_key not in self.relations: - # Insert a dummy "external" relation. - referenced = referenced.replace(type=referenced.External) - self.add(referenced) - if dep_key not in self.relations: - # Insert a dummy "external" relation. - dependent = dependent.replace(type=referenced.External) - self.add(dependent) - fire_event( - CacheAction( - action="add_link", - ref_key=dep_key._asdict(), - ref_key_2=ref_key._asdict(), - ) - ) - with self.lock: - self._add_link(ref_key, dep_key) - - def add(self, relation): - """Add the relation inner to the cache, under the schema schema and - identifier identifier - - :param BaseRelation relation: The underlying relation. - """ - flags = get_flags() - cached = _CachedRelation(relation) - fire_event_if( - flags.LOG_CACHE_EVENTS, - lambda: CacheDumpGraph(before_after="before", action="adding", dump=self.dump_graph()), - ) - fire_event(CacheAction(action="add_relation", ref_key=_make_ref_key_dict(cached))) - - with self.lock: - self._setdefault(cached) - fire_event_if( - flags.LOG_CACHE_EVENTS, - lambda: CacheDumpGraph(before_after="after", action="adding", dump=self.dump_graph()), - ) - - def _remove_refs(self, keys): - """Removes all references to all entries in keys. This does not - cascade! - - :param Iterable[_ReferenceKey] keys: The keys to remove. - """ - # remove direct refs - for key in keys: - del self.relations[key] - # then remove all entries from each child - for cached in self.relations.values(): - cached.release_references(keys) - - def drop(self, relation): - """Drop the named relation and cascade it appropriately to all - dependent relations. - - Because dbt proactively does many `drop relation if exist ... cascade` - that are noops, nonexistent relation drops cause a debug log and no - other actions. - - :param str schema: The schema of the relation to drop. - :param str identifier: The identifier of the relation to drop. - """ - dropped_key = _make_ref_key(relation) - dropped_key_msg = _make_ref_key_dict(relation) - fire_event(CacheAction(action="drop_relation", ref_key=dropped_key_msg)) - with self.lock: - if dropped_key not in self.relations: - fire_event(CacheAction(action="drop_missing_relation", ref_key=dropped_key_msg)) - return - consequences = self.relations[dropped_key].collect_consequences() - # convert from a list of _ReferenceKeys to a list of ReferenceKeyMsgs - consequence_msgs = [key._asdict() for key in consequences] - fire_event( - CacheAction( - action="drop_cascade", ref_key=dropped_key_msg, ref_list=consequence_msgs - ) - ) - self._remove_refs(consequences) - - def _rename_relation(self, old_key, new_relation): - """Rename a relation named old_key to new_key, updating references. - Return whether or not there was a key to rename. - - :param _ReferenceKey old_key: The existing key, to rename from. - :param _CachedRelation new_key: The new relation, to rename to. - """ - # On the database level, a rename updates all values that were - # previously referenced by old_name to be referenced by new_name. - # basically, the name changes but some underlying ID moves. Kind of - # like an object reference! - relation = self.relations.pop(old_key) - new_key = new_relation.key() - - # relation has to rename its innards, so it needs the _CachedRelation. - relation.rename(new_relation) - # update all the relations that refer to it - for cached in self.relations.values(): - if cached.is_referenced_by(old_key): - fire_event( - CacheAction( - action="update_reference", - ref_key=_make_ref_key_dict(old_key), - ref_key_2=_make_ref_key_dict(new_key), - ref_key_3=_make_ref_key_dict(cached.key()), - ) - ) - - cached.rename_key(old_key, new_key) - - self.relations[new_key] = relation - # also fixup the schemas! - self.add_schema(new_key.database, new_key.schema) - - return True - - def _check_rename_constraints(self, old_key, new_key): - """Check the rename constraints, and return whether or not the rename - can proceed. - - If the new key is already present, that is an error. - If the old key is absent, we debug log and return False, assuming it's - a temp table being renamed. - - :param _ReferenceKey old_key: The existing key, to rename from. - :param _ReferenceKey new_key: The new key, to rename to. - :return bool: If the old relation exists for renaming. - :raises InternalError: If the new key is already present. - """ - if new_key in self.relations: - # Tell user when collision caused by model names truncated during - # materialization. - raise TruncatedModelNameCausedCollisionError(new_key, self.relations) - - if old_key not in self.relations: - fire_event(CacheAction(action="temporary_relation", ref_key=old_key._asdict())) - return False - return True - - def rename(self, old, new): - """Rename the old schema/identifier to the new schema/identifier and - update references. - - If the new schema/identifier is already present, that is an error. - If the schema/identifier key is absent, we only debug log and return, - assuming it's a temp table being renamed. - - :param BaseRelation old: The existing relation name information. - :param BaseRelation new: The new relation name information. - :raises InternalError: If the new key is already present. - """ - old_key = _make_ref_key(old) - new_key = _make_ref_key(new) - fire_event( - CacheAction( - action="rename_relation", - ref_key=old_key._asdict(), - ref_key_2=new_key._asdict(), - ) - ) - flags = get_flags() - fire_event_if( - flags.LOG_CACHE_EVENTS, - lambda: CacheDumpGraph(before_after="before", action="rename", dump=self.dump_graph()), - ) - - with self.lock: - if self._check_rename_constraints(old_key, new_key): - self._rename_relation(old_key, _CachedRelation(new)) - else: - self._setdefault(_CachedRelation(new)) - - fire_event_if( - flags.LOG_CACHE_EVENTS, - lambda: CacheDumpGraph(before_after="after", action="rename", dump=self.dump_graph()), - ) - - def get_relations(self, database: Optional[str], schema: Optional[str]) -> List[Any]: - """Case-insensitively yield all relations matching the given schema. - - :param str schema: The case-insensitive schema name to list from. - :return List[BaseRelation]: The list of relations with the given - schema - """ - database = lowercase(database) - schema = lowercase(schema) - with self.lock: - results = [ - r.inner - for r in self.relations.values() - if (lowercase(r.schema) == schema and lowercase(r.database) == database) - ] - - if None in results: - raise NoneRelationFoundError() - return results - - def clear(self): - """Clear the cache""" - with self.lock: - self.relations.clear() - self.schemas.clear() - - def _list_relations_in_schema( - self, database: Optional[str], schema: Optional[str] - ) -> List[_CachedRelation]: - """Get the relations in a schema. Callers should hold the lock.""" - key = (lowercase(database), lowercase(schema)) - - to_remove: List[_CachedRelation] = [] - for cachekey, relation in self.relations.items(): - if (cachekey.database, cachekey.schema) == key: - to_remove.append(relation) - return to_remove - - def _remove_all(self, to_remove: List[_CachedRelation]): - """Remove all the listed relations. Ignore relations that have been - cascaded out. - """ - for relation in to_remove: - # it may have been cascaded out already - drop_key = _make_ref_key(relation) - if drop_key in self.relations: - self.drop(drop_key) diff --git a/core/dbt/adapters/factory.py b/core/dbt/adapters/factory.py deleted file mode 100644 index c5e94c27466..00000000000 --- a/core/dbt/adapters/factory.py +++ /dev/null @@ -1,237 +0,0 @@ -import threading -import traceback -from contextlib import contextmanager -from importlib import import_module -from pathlib import Path -from typing import Any, Dict, List, Optional, Set, Type - -from dbt.adapters.base.plugin import AdapterPlugin -from dbt.adapters.protocol import AdapterConfig, AdapterProtocol, RelationProtocol -from dbt.contracts.connection import AdapterRequiredConfig, Credentials -from dbt.events.functions import fire_event -from dbt.events.types import AdapterImportError, PluginLoadError, AdapterRegistered -from dbt.exceptions import DbtInternalError, DbtRuntimeError -from dbt.include.global_project import PACKAGE_PATH as GLOBAL_PROJECT_PATH -from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME -from dbt.semver import VersionSpecifier - -Adapter = AdapterProtocol - - -class AdapterContainer: - def __init__(self): - self.lock = threading.Lock() - self.adapters: Dict[str, Adapter] = {} - self.plugins: Dict[str, AdapterPlugin] = {} - # map package names to their include paths - self.packages: Dict[str, Path] = { - GLOBAL_PROJECT_NAME: Path(GLOBAL_PROJECT_PATH), - } - - def get_plugin_by_name(self, name: str) -> AdapterPlugin: - with self.lock: - if name in self.plugins: - return self.plugins[name] - names = ", ".join(self.plugins.keys()) - - message = f"Invalid adapter type {name}! Must be one of {names}" - raise DbtRuntimeError(message) - - def get_adapter_class_by_name(self, name: str) -> Type[Adapter]: - plugin = self.get_plugin_by_name(name) - return plugin.adapter - - def get_relation_class_by_name(self, name: str) -> Type[RelationProtocol]: - adapter = self.get_adapter_class_by_name(name) - return adapter.Relation - - def get_config_class_by_name(self, name: str) -> Type[AdapterConfig]: - adapter = self.get_adapter_class_by_name(name) - return adapter.AdapterSpecificConfigs - - def load_plugin(self, name: str) -> Type[Credentials]: - # this doesn't need a lock: in the worst case we'll overwrite packages - # and adapter_type entries with the same value, as they're all - # singletons - try: - # mypy doesn't think modules have any attributes. - mod: Any = import_module("." + name, "dbt.adapters") - except ModuleNotFoundError as exc: - # if we failed to import the target module in particular, inform - # the user about it via a runtime error - if exc.name == "dbt.adapters." + name: - fire_event(AdapterImportError(exc=str(exc))) - raise DbtRuntimeError(f"Could not find adapter type {name}!") - # otherwise, the error had to have come from some underlying - # library. Log the stack trace. - - fire_event(PluginLoadError(exc_info=traceback.format_exc())) - raise - plugin: AdapterPlugin = mod.Plugin - plugin_type = plugin.adapter.type() - - if plugin_type != name: - raise DbtRuntimeError( - f"Expected to find adapter with type named {name}, got " - f"adapter with type {plugin_type}" - ) - - with self.lock: - # things do hold the lock to iterate over it so we need it to add - self.plugins[name] = plugin - - self.packages[plugin.project_name] = Path(plugin.include_path) - - for dep in plugin.dependencies: - self.load_plugin(dep) - - return plugin.credentials - - def register_adapter(self, config: AdapterRequiredConfig) -> None: - adapter_name = config.credentials.type - adapter_type = self.get_adapter_class_by_name(adapter_name) - adapter_version = import_module(f".{adapter_name}.__version__", "dbt.adapters").version - adapter_version_specifier = VersionSpecifier.from_version_string( - adapter_version - ).to_version_string() - fire_event( - AdapterRegistered(adapter_name=adapter_name, adapter_version=adapter_version_specifier) - ) - with self.lock: - if adapter_name in self.adapters: - # this shouldn't really happen... - return - - adapter: Adapter = adapter_type(config) # type: ignore - self.adapters[adapter_name] = adapter - - def lookup_adapter(self, adapter_name: str) -> Adapter: - return self.adapters[adapter_name] - - def reset_adapters(self): - """Clear the adapters. This is useful for tests, which change configs.""" - with self.lock: - for adapter in self.adapters.values(): - adapter.cleanup_connections() - self.adapters.clear() - - def cleanup_connections(self): - """Only clean up the adapter connections list without resetting the - actual adapters. - """ - with self.lock: - for adapter in self.adapters.values(): - adapter.cleanup_connections() - - def get_adapter_plugins(self, name: Optional[str]) -> List[AdapterPlugin]: - """Iterate over the known adapter plugins. If a name is provided, - iterate in dependency order over the named plugin and its dependencies. - """ - if name is None: - return list(self.plugins.values()) - - plugins: List[AdapterPlugin] = [] - seen: Set[str] = set() - plugin_names: List[str] = [name] - while plugin_names: - plugin_name = plugin_names[0] - plugin_names = plugin_names[1:] - try: - plugin = self.plugins[plugin_name] - except KeyError: - raise DbtInternalError(f"No plugin found for {plugin_name}") from None - plugins.append(plugin) - seen.add(plugin_name) - for dep in plugin.dependencies: - if dep not in seen: - plugin_names.append(dep) - return plugins - - def get_adapter_package_names(self, name: Optional[str]) -> List[str]: - package_names: List[str] = [p.project_name for p in self.get_adapter_plugins(name)] - package_names.append(GLOBAL_PROJECT_NAME) - return package_names - - def get_include_paths(self, name: Optional[str]) -> List[Path]: - paths = [] - for package_name in self.get_adapter_package_names(name): - try: - path = self.packages[package_name] - except KeyError: - raise DbtInternalError(f"No internal package listing found for {package_name}") - paths.append(path) - return paths - - def get_adapter_type_names(self, name: Optional[str]) -> List[str]: - return [p.adapter.type() for p in self.get_adapter_plugins(name)] - - def get_adapter_constraint_support(self, name: Optional[str]) -> List[str]: - return self.lookup_adapter(name).CONSTRAINT_SUPPORT # type: ignore - - -FACTORY: AdapterContainer = AdapterContainer() - - -def register_adapter(config: AdapterRequiredConfig) -> None: - FACTORY.register_adapter(config) - - -def get_adapter(config: AdapterRequiredConfig): - return FACTORY.lookup_adapter(config.credentials.type) - - -def get_adapter_by_type(adapter_type): - return FACTORY.lookup_adapter(adapter_type) - - -def reset_adapters(): - """Clear the adapters. This is useful for tests, which change configs.""" - FACTORY.reset_adapters() - - -def cleanup_connections(): - """Only clean up the adapter connections list without resetting the actual - adapters. - """ - FACTORY.cleanup_connections() - - -def get_adapter_class_by_name(name: str) -> Type[AdapterProtocol]: - return FACTORY.get_adapter_class_by_name(name) - - -def get_config_class_by_name(name: str) -> Type[AdapterConfig]: - return FACTORY.get_config_class_by_name(name) - - -def get_relation_class_by_name(name: str) -> Type[RelationProtocol]: - return FACTORY.get_relation_class_by_name(name) - - -def load_plugin(name: str) -> Type[Credentials]: - return FACTORY.load_plugin(name) - - -def get_include_paths(name: Optional[str]) -> List[Path]: - return FACTORY.get_include_paths(name) - - -def get_adapter_package_names(name: Optional[str]) -> List[str]: - return FACTORY.get_adapter_package_names(name) - - -def get_adapter_type_names(name: Optional[str]) -> List[str]: - return FACTORY.get_adapter_type_names(name) - - -def get_adapter_constraint_support(name: Optional[str]) -> List[str]: - return FACTORY.get_adapter_constraint_support(name) - - -@contextmanager -def adapter_management(): - reset_adapters() - try: - yield - finally: - cleanup_connections() diff --git a/core/dbt/adapters/protocol.py b/core/dbt/adapters/protocol.py deleted file mode 100644 index 13b9bd79968..00000000000 --- a/core/dbt/adapters/protocol.py +++ /dev/null @@ -1,158 +0,0 @@ -from dataclasses import dataclass -from typing import ( - Type, - Hashable, - Optional, - ContextManager, - List, - Generic, - TypeVar, - Tuple, - Dict, - Any, -) -from typing_extensions import Protocol - -import agate - -from dbt.contracts.connection import Connection, AdapterRequiredConfig, AdapterResponse -from dbt.contracts.graph.nodes import ResultNode, ManifestNode -from dbt.contracts.graph.model_config import BaseConfig -from dbt.contracts.graph.manifest import Manifest -from dbt.contracts.relation import Policy, HasQuoting - -from dbt.graph import Graph - - -@dataclass -class AdapterConfig(BaseConfig): - pass - - -class ConnectionManagerProtocol(Protocol): - TYPE: str - - -class ColumnProtocol(Protocol): - pass - - -Self = TypeVar("Self", bound="RelationProtocol") - - -class RelationProtocol(Protocol): - @classmethod - def get_default_quote_policy(cls) -> Policy: - ... - - @classmethod - def create_from(cls: Type[Self], config: HasQuoting, node: ResultNode) -> Self: - ... - - -class CompilerProtocol(Protocol): - def compile(self, manifest: Manifest, write=True) -> Graph: - ... - - def compile_node( - self, - node: ManifestNode, - manifest: Manifest, - extra_context: Optional[Dict[str, Any]] = None, - ) -> ManifestNode: - ... - - -AdapterConfig_T = TypeVar("AdapterConfig_T", bound=AdapterConfig) -ConnectionManager_T = TypeVar("ConnectionManager_T", bound=ConnectionManagerProtocol) -Relation_T = TypeVar("Relation_T", bound=RelationProtocol) -Column_T = TypeVar("Column_T", bound=ColumnProtocol) -Compiler_T = TypeVar("Compiler_T", bound=CompilerProtocol) - - -# TODO CT-211 -class AdapterProtocol( # type: ignore[misc] - Protocol, - Generic[ - AdapterConfig_T, - ConnectionManager_T, - Relation_T, - Column_T, - Compiler_T, - ], -): - # N.B. Technically these are ClassVars, but mypy doesn't support putting type vars in a - # ClassVar due to the restrictiveness of PEP-526 - # See: https://github.com/python/mypy/issues/5144 - AdapterSpecificConfigs: Type[AdapterConfig_T] - Column: Type[Column_T] - Relation: Type[Relation_T] - ConnectionManager: Type[ConnectionManager_T] - connections: ConnectionManager_T - - def __init__(self, config: AdapterRequiredConfig): - ... - - @classmethod - def type(cls) -> str: - pass - - def set_query_header(self, manifest: Manifest) -> None: - ... - - @staticmethod - def get_thread_identifier() -> Hashable: - ... - - def get_thread_connection(self) -> Connection: - ... - - def set_thread_connection(self, conn: Connection) -> None: - ... - - def get_if_exists(self) -> Optional[Connection]: - ... - - def clear_thread_connection(self) -> None: - ... - - def clear_transaction(self) -> None: - ... - - def exception_handler(self, sql: str) -> ContextManager: - ... - - def set_connection_name(self, name: Optional[str] = None) -> Connection: - ... - - def cancel_open(self) -> Optional[List[str]]: - ... - - def open(cls, connection: Connection) -> Connection: - ... - - def release(self) -> None: - ... - - def cleanup_all(self) -> None: - ... - - def begin(self) -> None: - ... - - def commit(self) -> None: - ... - - def close(cls, connection: Connection) -> Connection: - ... - - def commit_if_has_connection(self) -> None: - ... - - def execute( - self, sql: str, auto_begin: bool = False, fetch: bool = False - ) -> Tuple[AdapterResponse, agate.Table]: - ... - - def get_compiler(self) -> Compiler_T: - ... diff --git a/core/dbt/adapters/reference_keys.py b/core/dbt/adapters/reference_keys.py deleted file mode 100644 index 53a0a9d9819..00000000000 --- a/core/dbt/adapters/reference_keys.py +++ /dev/null @@ -1,37 +0,0 @@ -# this module exists to resolve circular imports with the events module - -from collections import namedtuple -from typing import Any, Optional - - -_ReferenceKey = namedtuple("_ReferenceKey", "database schema identifier") - - -def lowercase(value: Optional[str]) -> Optional[str]: - if value is None: - return None - else: - return value.lower() - - -# For backwards compatibility. New code should use _make_ref_key -def _make_key(relation: Any) -> _ReferenceKey: - return _make_ref_key(relation) - - -def _make_ref_key(relation: Any) -> _ReferenceKey: - """Make _ReferenceKeys with lowercase values for the cache so we don't have - to keep track of quoting - """ - # databases and schemas can both be None - return _ReferenceKey( - lowercase(relation.database), lowercase(relation.schema), lowercase(relation.identifier) - ) - - -def _make_ref_key_dict(relation: Any): - return { - "database": relation.database, - "schema": relation.schema, - "identifier": relation.identifier, - } diff --git a/core/dbt/adapters/relation_configs/README.md b/core/dbt/adapters/relation_configs/README.md deleted file mode 100644 index 6be3bc59d12..00000000000 --- a/core/dbt/adapters/relation_configs/README.md +++ /dev/null @@ -1,25 +0,0 @@ -# RelationConfig -This package serves as an initial abstraction for managing the inspection of existing relations and determining -changes on those relations. It arose from the materialized view work and is currently only supporting -materialized views for Postgres and Redshift as well as dynamic tables for Snowflake. There are three main -classes in this package. - -## RelationConfigBase -This is a very small class that only has a `from_dict()` method and a default `NotImplementedError()`. At some -point this could be replaced by a more robust framework, like `mashumaro` or `pydantic`. - -## RelationConfigChange -This class inherits from `RelationConfigBase` ; however, this can be thought of as a separate class. The subclassing -merely points to the idea that both classes would likely inherit from the same class in a `mashumaro` or -`pydantic` implementation. This class is much more restricted in attribution. It should really only -ever need an `action` and a `context`. This can be though of as being analogous to a web request. You need to -know what you're doing (`action`: 'create' = GET, 'drop' = DELETE, etc.) and the information (`context`) needed -to make the change. In our scenarios, the context tends to be an instance of `RelationConfigBase` corresponding -to the new state. - -## RelationConfigValidationMixin -This mixin provides optional validation mechanics that can be applied to either `RelationConfigBase` or -`RelationConfigChange` subclasses. A validation rule is a combination of a `validation_check`, something -that should evaluate to `True`, and an optional `validation_error`, an instance of `DbtRuntimeError` -that should be raised in the event the `validation_check` fails. While optional, it's recommended that -the `validation_error` be provided for clearer transparency to the end user. diff --git a/core/dbt/adapters/relation_configs/__init__.py b/core/dbt/adapters/relation_configs/__init__.py deleted file mode 100644 index b8c73447a68..00000000000 --- a/core/dbt/adapters/relation_configs/__init__.py +++ /dev/null @@ -1,12 +0,0 @@ -from dbt.adapters.relation_configs.config_base import ( # noqa: F401 - RelationConfigBase, - RelationResults, -) -from dbt.adapters.relation_configs.config_change import ( # noqa: F401 - RelationConfigChangeAction, - RelationConfigChange, -) -from dbt.adapters.relation_configs.config_validation import ( # noqa: F401 - RelationConfigValidationMixin, - RelationConfigValidationRule, -) diff --git a/core/dbt/adapters/relation_configs/config_base.py b/core/dbt/adapters/relation_configs/config_base.py deleted file mode 100644 index 9d0cddb0d21..00000000000 --- a/core/dbt/adapters/relation_configs/config_base.py +++ /dev/null @@ -1,44 +0,0 @@ -from dataclasses import dataclass -from typing import Union, Dict - -import agate -from dbt.utils import filter_null_values - - -""" -This is what relation metadata from the database looks like. It's a dictionary because there will be -multiple grains of data for a single object. For example, a materialized view in Postgres has base level information, -like name. But it also can have multiple indexes, which needs to be a separate query. It might look like this: - -{ - "base": agate.Row({"table_name": "table_abc", "query": "select * from table_def"}) - "indexes": agate.Table("rows": [ - agate.Row({"name": "index_a", "columns": ["column_a"], "type": "hash", "unique": False}), - agate.Row({"name": "index_b", "columns": ["time_dim_a"], "type": "btree", "unique": False}), - ]) -} -""" -RelationResults = Dict[str, Union[agate.Row, agate.Table]] - - -@dataclass(frozen=True) -class RelationConfigBase: - @classmethod - def from_dict(cls, kwargs_dict) -> "RelationConfigBase": - """ - This assumes the subclass of `RelationConfigBase` is flat, in the sense that no attribute is - itself another subclass of `RelationConfigBase`. If that's not the case, this should be overriden - to manually manage that complexity. - - Args: - kwargs_dict: the dict representation of this instance - - Returns: the `RelationConfigBase` representation associated with the provided dict - """ - return cls(**filter_null_values(kwargs_dict)) # type: ignore - - @classmethod - def _not_implemented_error(cls) -> NotImplementedError: - return NotImplementedError( - "This relation type has not been fully configured for this adapter." - ) diff --git a/core/dbt/adapters/relation_configs/config_change.py b/core/dbt/adapters/relation_configs/config_change.py deleted file mode 100644 index ac653fa5210..00000000000 --- a/core/dbt/adapters/relation_configs/config_change.py +++ /dev/null @@ -1,23 +0,0 @@ -from abc import ABC, abstractmethod -from dataclasses import dataclass -from typing import Hashable - -from dbt.adapters.relation_configs.config_base import RelationConfigBase -from dbt.dataclass_schema import StrEnum - - -class RelationConfigChangeAction(StrEnum): - alter = "alter" - create = "create" - drop = "drop" - - -@dataclass(frozen=True, eq=True, unsafe_hash=True) -class RelationConfigChange(RelationConfigBase, ABC): - action: RelationConfigChangeAction - context: Hashable # this is usually a RelationConfig, e.g. IndexConfig, but shouldn't be limited - - @property - @abstractmethod - def requires_full_refresh(self) -> bool: - raise self._not_implemented_error() diff --git a/core/dbt/adapters/relation_configs/config_validation.py b/core/dbt/adapters/relation_configs/config_validation.py deleted file mode 100644 index 17bf74bf3e7..00000000000 --- a/core/dbt/adapters/relation_configs/config_validation.py +++ /dev/null @@ -1,57 +0,0 @@ -from dataclasses import dataclass -from typing import Set, Optional - -from dbt.exceptions import DbtRuntimeError - - -@dataclass(frozen=True, eq=True, unsafe_hash=True) -class RelationConfigValidationRule: - validation_check: bool - validation_error: Optional[DbtRuntimeError] - - @property - def default_error(self): - return DbtRuntimeError( - "There was a validation error in preparing this relation config." - "No additional context was provided by this adapter." - ) - - -@dataclass(frozen=True) -class RelationConfigValidationMixin: - def __post_init__(self): - self.run_validation_rules() - - @property - def validation_rules(self) -> Set[RelationConfigValidationRule]: - """ - A set of validation rules to run against the object upon creation. - - A validation rule is a combination of a validation check (bool) and an optional error message. - - This defaults to no validation rules if not implemented. It's recommended to override this with values, - but that may not always be necessary. - - Returns: a set of validation rules - """ - return set() - - def run_validation_rules(self): - for validation_rule in self.validation_rules: - try: - assert validation_rule.validation_check - except AssertionError: - if validation_rule.validation_error: - raise validation_rule.validation_error - else: - raise validation_rule.default_error - self.run_child_validation_rules() - - def run_child_validation_rules(self): - for attr_value in vars(self).values(): - if hasattr(attr_value, "validation_rules"): - attr_value.run_validation_rules() - if isinstance(attr_value, set): - for member in attr_value: - if hasattr(member, "validation_rules"): - member.run_validation_rules() diff --git a/core/dbt/adapters/sql/__init__.py b/core/dbt/adapters/sql/__init__.py deleted file mode 100644 index 3535806364d..00000000000 --- a/core/dbt/adapters/sql/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# these are all just exports, #noqa them so flake8 will be happy -from dbt.adapters.sql.connections import SQLConnectionManager # noqa -from dbt.adapters.sql.impl import SQLAdapter # noqa diff --git a/core/dbt/adapters/sql/connections.py b/core/dbt/adapters/sql/connections.py deleted file mode 100644 index 43463d3a47a..00000000000 --- a/core/dbt/adapters/sql/connections.py +++ /dev/null @@ -1,190 +0,0 @@ -import abc -import time -from typing import List, Optional, Tuple, Any, Iterable, Dict, Union - -import agate - -import dbt.clients.agate_helper -import dbt.exceptions -from dbt.adapters.base import BaseConnectionManager -from dbt.contracts.connection import Connection, ConnectionState, AdapterResponse -from dbt.events.functions import fire_event -from dbt.events.types import ConnectionUsed, SQLQuery, SQLCommit, SQLQueryStatus -from dbt.events.contextvars import get_node_info -from dbt.utils import cast_to_str - - -class SQLConnectionManager(BaseConnectionManager): - """The default connection manager with some common SQL methods implemented. - - Methods to implement: - - exception_handler - - cancel - - get_response - - open - """ - - @abc.abstractmethod - def cancel(self, connection: Connection): - """Cancel the given connection.""" - raise dbt.exceptions.NotImplementedError("`cancel` is not implemented for this adapter!") - - def cancel_open(self) -> List[str]: - names = [] - this_connection = self.get_if_exists() - with self.lock: - for connection in self.thread_connections.values(): - if connection is this_connection: - continue - - # if the connection failed, the handle will be None so we have - # nothing to cancel. - if connection.handle is not None and connection.state == ConnectionState.OPEN: - self.cancel(connection) - if connection.name is not None: - names.append(connection.name) - return names - - def add_query( - self, - sql: str, - auto_begin: bool = True, - bindings: Optional[Any] = None, - abridge_sql_log: bool = False, - ) -> Tuple[Connection, Any]: - connection = self.get_thread_connection() - if auto_begin and connection.transaction_open is False: - self.begin() - fire_event( - ConnectionUsed( - conn_type=self.TYPE, - conn_name=cast_to_str(connection.name), - node_info=get_node_info(), - ) - ) - - with self.exception_handler(sql): - if abridge_sql_log: - log_sql = "{}...".format(sql[:512]) - else: - log_sql = sql - - fire_event( - SQLQuery( - conn_name=cast_to_str(connection.name), sql=log_sql, node_info=get_node_info() - ) - ) - pre = time.time() - - cursor = connection.handle.cursor() - cursor.execute(sql, bindings) - - fire_event( - SQLQueryStatus( - status=str(self.get_response(cursor)), - elapsed=round((time.time() - pre)), - node_info=get_node_info(), - ) - ) - - return connection, cursor - - @classmethod - @abc.abstractmethod - def get_response(cls, cursor: Any) -> AdapterResponse: - """Get the status of the cursor.""" - raise dbt.exceptions.NotImplementedError( - "`get_response` is not implemented for this adapter!" - ) - - @classmethod - def process_results( - cls, column_names: Iterable[str], rows: Iterable[Any] - ) -> List[Dict[str, Any]]: - # TODO CT-211 - unique_col_names = dict() # type: ignore[var-annotated] - # TODO CT-211 - for idx in range(len(column_names)): # type: ignore[arg-type] - # TODO CT-211 - col_name = column_names[idx] # type: ignore[index] - if col_name in unique_col_names: - unique_col_names[col_name] += 1 - # TODO CT-211 - column_names[idx] = f"{col_name}_{unique_col_names[col_name]}" # type: ignore[index] # noqa - else: - # TODO CT-211 - unique_col_names[column_names[idx]] = 1 # type: ignore[index] - return [dict(zip(column_names, row)) for row in rows] - - @classmethod - def get_result_from_cursor(cls, cursor: Any, limit: Optional[int]) -> agate.Table: - data: List[Any] = [] - column_names: List[str] = [] - - if cursor.description is not None: - column_names = [col[0] for col in cursor.description] - if limit: - rows = cursor.fetchmany(limit) - else: - rows = cursor.fetchall() - data = cls.process_results(column_names, rows) - - return dbt.clients.agate_helper.table_from_data_flat(data, column_names) - - @classmethod - def data_type_code_to_name(cls, type_code: Union[int, str]) -> str: - """Get the string representation of the data type from the type_code.""" - # https://peps.python.org/pep-0249/#type-objects - raise dbt.exceptions.NotImplementedError( - "`data_type_code_to_name` is not implemented for this adapter!" - ) - - def execute( - self, sql: str, auto_begin: bool = False, fetch: bool = False, limit: Optional[int] = None - ) -> Tuple[AdapterResponse, agate.Table]: - sql = self._add_query_comment(sql) - _, cursor = self.add_query(sql, auto_begin) - response = self.get_response(cursor) - if fetch: - table = self.get_result_from_cursor(cursor, limit) - else: - table = dbt.clients.agate_helper.empty_table() - return response, table - - def add_begin_query(self): - return self.add_query("BEGIN", auto_begin=False) - - def add_commit_query(self): - return self.add_query("COMMIT", auto_begin=False) - - def add_select_query(self, sql: str) -> Tuple[Connection, Any]: - sql = self._add_query_comment(sql) - return self.add_query(sql, auto_begin=False) - - def begin(self): - connection = self.get_thread_connection() - if connection.transaction_open is True: - raise dbt.exceptions.DbtInternalError( - 'Tried to begin a new transaction on connection "{}", but ' - "it already had one open!".format(connection.name) - ) - - self.add_begin_query() - - connection.transaction_open = True - return connection - - def commit(self): - connection = self.get_thread_connection() - if connection.transaction_open is False: - raise dbt.exceptions.DbtInternalError( - 'Tried to commit transaction on connection "{}", but ' - "it does not have one open!".format(connection.name) - ) - - fire_event(SQLCommit(conn_name=connection.name, node_info=get_node_info())) - self.add_commit_query() - - connection.transaction_open = False - - return connection diff --git a/core/dbt/adapters/sql/impl.py b/core/dbt/adapters/sql/impl.py deleted file mode 100644 index b74eb02d991..00000000000 --- a/core/dbt/adapters/sql/impl.py +++ /dev/null @@ -1,270 +0,0 @@ -import agate -from typing import Any, Optional, Tuple, Type, List - -from dbt.contracts.connection import Connection, AdapterResponse -from dbt.exceptions import RelationTypeNullError -from dbt.adapters.base import BaseAdapter, available -from dbt.adapters.cache import _make_ref_key_dict -from dbt.adapters.sql import SQLConnectionManager -from dbt.events.functions import fire_event -from dbt.events.types import ColTypeChange, SchemaCreation, SchemaDrop - - -from dbt.adapters.base.relation import BaseRelation - -LIST_RELATIONS_MACRO_NAME = "list_relations_without_caching" -GET_COLUMNS_IN_RELATION_MACRO_NAME = "get_columns_in_relation" -LIST_SCHEMAS_MACRO_NAME = "list_schemas" -CHECK_SCHEMA_EXISTS_MACRO_NAME = "check_schema_exists" -CREATE_SCHEMA_MACRO_NAME = "create_schema" -DROP_SCHEMA_MACRO_NAME = "drop_schema" -RENAME_RELATION_MACRO_NAME = "rename_relation" -TRUNCATE_RELATION_MACRO_NAME = "truncate_relation" -DROP_RELATION_MACRO_NAME = "drop_relation" -ALTER_COLUMN_TYPE_MACRO_NAME = "alter_column_type" -VALIDATE_SQL_MACRO_NAME = "validate_sql" - - -class SQLAdapter(BaseAdapter): - """The default adapter with the common agate conversions and some SQL - methods was implemented. This adapter has a different much shorter list of - methods to implement, but some more macros that must be implemented. - - To implement a macro, implement "${adapter_type}__${macro_name}". in the - adapter's internal project. - - Methods to implement: - - date_function - - Macros to implement: - - get_catalog - - list_relations_without_caching - - get_columns_in_relation - """ - - ConnectionManager: Type[SQLConnectionManager] - connections: SQLConnectionManager - - @available.parse(lambda *a, **k: (None, None)) - def add_query( - self, - sql: str, - auto_begin: bool = True, - bindings: Optional[Any] = None, - abridge_sql_log: bool = False, - ) -> Tuple[Connection, Any]: - """Add a query to the current transaction. A thin wrapper around - ConnectionManager.add_query. - - :param sql: The SQL query to add - :param auto_begin: If set and there is no transaction in progress, - begin a new one. - :param bindings: An optional list of bindings for the query. - :param abridge_sql_log: If set, limit the raw sql logged to 512 - characters - """ - return self.connections.add_query(sql, auto_begin, bindings, abridge_sql_log) - - @classmethod - def convert_text_type(cls, agate_table: agate.Table, col_idx: int) -> str: - return "text" - - @classmethod - def convert_number_type(cls, agate_table: agate.Table, col_idx: int) -> str: - # TODO CT-211 - decimals = agate_table.aggregate(agate.MaxPrecision(col_idx)) # type: ignore[attr-defined] - return "float8" if decimals else "integer" - - @classmethod - def convert_boolean_type(cls, agate_table: agate.Table, col_idx: int) -> str: - return "boolean" - - @classmethod - def convert_datetime_type(cls, agate_table: agate.Table, col_idx: int) -> str: - return "timestamp without time zone" - - @classmethod - def convert_date_type(cls, agate_table: agate.Table, col_idx: int) -> str: - return "date" - - @classmethod - def convert_time_type(cls, agate_table: agate.Table, col_idx: int) -> str: - return "time" - - @classmethod - def is_cancelable(cls) -> bool: - return True - - def expand_column_types(self, goal, current): - reference_columns = {c.name: c for c in self.get_columns_in_relation(goal)} - - target_columns = {c.name: c for c in self.get_columns_in_relation(current)} - - for column_name, reference_column in reference_columns.items(): - target_column = target_columns.get(column_name) - - if target_column is not None and target_column.can_expand_to(reference_column): - col_string_size = reference_column.string_size() - new_type = self.Column.string_type(col_string_size) - fire_event( - ColTypeChange( - orig_type=target_column.data_type, - new_type=new_type, - table=_make_ref_key_dict(current), - ) - ) - - self.alter_column_type(current, column_name, new_type) - - def alter_column_type(self, relation, column_name, new_column_type) -> None: - """ - 1. Create a new column (w/ temp name and correct type) - 2. Copy data over to it - 3. Drop the existing column (cascade!) - 4. Rename the new column to existing column - """ - kwargs = { - "relation": relation, - "column_name": column_name, - "new_column_type": new_column_type, - } - self.execute_macro(ALTER_COLUMN_TYPE_MACRO_NAME, kwargs=kwargs) - - def drop_relation(self, relation): - if relation.type is None: - raise RelationTypeNullError(relation) - - self.cache_dropped(relation) - self.execute_macro(DROP_RELATION_MACRO_NAME, kwargs={"relation": relation}) - - def truncate_relation(self, relation): - self.execute_macro(TRUNCATE_RELATION_MACRO_NAME, kwargs={"relation": relation}) - - def rename_relation(self, from_relation, to_relation): - self.cache_renamed(from_relation, to_relation) - - kwargs = {"from_relation": from_relation, "to_relation": to_relation} - self.execute_macro(RENAME_RELATION_MACRO_NAME, kwargs=kwargs) - - def get_columns_in_relation(self, relation): - return self.execute_macro( - GET_COLUMNS_IN_RELATION_MACRO_NAME, kwargs={"relation": relation} - ) - - def create_schema(self, relation: BaseRelation) -> None: - relation = relation.without_identifier() - fire_event(SchemaCreation(relation=_make_ref_key_dict(relation))) - kwargs = { - "relation": relation, - } - self.execute_macro(CREATE_SCHEMA_MACRO_NAME, kwargs=kwargs) - self.commit_if_has_connection() - # we can't update the cache here, as if the schema already existed we - # don't want to (incorrectly) say that it's empty - - def drop_schema(self, relation: BaseRelation) -> None: - relation = relation.without_identifier() - fire_event(SchemaDrop(relation=_make_ref_key_dict(relation))) - kwargs = { - "relation": relation, - } - self.execute_macro(DROP_SCHEMA_MACRO_NAME, kwargs=kwargs) - self.commit_if_has_connection() - # we can update the cache here - self.cache.drop_schema(relation.database, relation.schema) - - def list_relations_without_caching( - self, - schema_relation: BaseRelation, - ) -> List[BaseRelation]: - kwargs = {"schema_relation": schema_relation} - results = self.execute_macro(LIST_RELATIONS_MACRO_NAME, kwargs=kwargs) - - relations = [] - quote_policy = {"database": True, "schema": True, "identifier": True} - for _database, name, _schema, _type in results: - try: - _type = self.Relation.get_relation_type(_type) - except ValueError: - _type = self.Relation.External - relations.append( - self.Relation.create( - database=_database, - schema=_schema, - identifier=name, - quote_policy=quote_policy, - type=_type, - ) - ) - return relations - - @classmethod - def quote(self, identifier): - return '"{}"'.format(identifier) - - def list_schemas(self, database: str) -> List[str]: - results = self.execute_macro(LIST_SCHEMAS_MACRO_NAME, kwargs={"database": database}) - - return [row[0] for row in results] - - def check_schema_exists(self, database: str, schema: str) -> bool: - information_schema = self.Relation.create( - database=database, - schema=schema, - identifier="INFORMATION_SCHEMA", - quote_policy=self.config.quoting, - ).information_schema() - - kwargs = {"information_schema": information_schema, "schema": schema} - results = self.execute_macro(CHECK_SCHEMA_EXISTS_MACRO_NAME, kwargs=kwargs) - return results[0][0] > 0 - - def validate_sql(self, sql: str) -> AdapterResponse: - """Submit the given SQL to the engine for validation, but not execution. - - By default we simply prefix the query with the explain keyword and allow the - exceptions thrown by the underlying engine on invalid SQL inputs to bubble up - to the exception handler. For adjustments to the explain statement - such as - for adapters that have different mechanisms for hinting at query validation - or dry-run - callers may be able to override the validate_sql_query macro with - the addition of an <adapter>__validate_sql implementation. - - :param sql str: The sql to validate - """ - kwargs = { - "sql": sql, - } - result = self.execute_macro(VALIDATE_SQL_MACRO_NAME, kwargs=kwargs) - # The statement macro always returns an AdapterResponse in the output AttrDict's - # `response` property, and we preserve the full payload in case we want to - # return fetched output for engines where explain plans are emitted as columnar - # results. Any macro override that deviates from this behavior may encounter an - # assertion error in the runtime. - adapter_response = result.response # type: ignore[attr-defined] - assert isinstance(adapter_response, AdapterResponse), ( - f"Expected AdapterResponse from validate_sql macro execution, " - f"got {type(adapter_response)}." - ) - return adapter_response - - # This is for use in the test suite - def run_sql_for_tests(self, sql, fetch, conn): - cursor = conn.handle.cursor() - try: - cursor.execute(sql) - if hasattr(conn.handle, "commit"): - conn.handle.commit() - if fetch == "one": - return cursor.fetchone() - elif fetch == "all": - return cursor.fetchall() - else: - return - except BaseException as e: - if conn.handle and not getattr(conn.handle, "closed", True): - conn.handle.rollback() - print(sql) - print(e) - raise - finally: - conn.transaction_open = False diff --git a/core/dbt/artifacts/README.md b/core/dbt/artifacts/README.md new file mode 100644 index 00000000000..cc4ac0fae2b --- /dev/null +++ b/core/dbt/artifacts/README.md @@ -0,0 +1,74 @@ +# dbt/artifacts + +## Overview +This directory is meant to be a lightweight module that is independent (and upstream of) the rest of `dbt-core` internals. + +Its primary responsibility is to define simple data classes that represent the versioned artifact schemas that dbt writes as JSON files throughout execution. + +Eventually, this module may be released as a standalone package (e.g. `dbt-artifacts`) to support stable programmatic parsing of dbt artifacts. + +`dbt/artifacts` is organized into artifact 'schemas' and 'resources'. Schemas represent the final serialized artifact objects, while resources represent smaller components within those schemas. + +### dbt/artifacts/schemas + +Each major version of a schema under `dbt/artifacts/schema` is defined in its corresponding `dbt/artifacts/schema/<artifact-name>/v<version>` directory. Before `dbt/artifacts` artifact schemas were always modified in-place, which is why older artifacts are those missing class definitions. + +Currently, there are four artifact schemas defined in `dbt/artifact/schemas`: + +| Artifact name | File | Class | Latest definition | +|---------------|------------------|----------------------------------|-----------------------------------| +| manifest | manifest.json | WritableManifest | dbt/artifacts/schema/manifest/v12 | +| catalog | catalog.json | CatalogArtifact | dbt/artifacts/schema/catalog/v1 | +| run | run_results.json | RunResultsArtifact | dbt/artifacts/schema/run/v5 | +| freshness | sources.json | FreshnessExecutionResultArtifact | dbt/artifacts/schema/freshness/v3 | + + +### dbt/artifacts/resources + +All existing resources are defined under `dbt/artifacts/resources/v1`. + +## Making changes to dbt/artifacts + +### All changes + +All changes to any fields will require a manual update to [dbt-jsonschema](https://github.com/dbt-labs/dbt-jsonschema) to ensure live checking continues to work. + +### Non-breaking changes + +Freely make incremental, non-breaking changes in-place to the latest major version of any artifact (minor or patch bumps). The only changes that are fully forward and backward compatible are: +* Adding a new field with a default +* Deleting a field with a default. This is compatible in terms of serialization and deserialization, but still may be lead to suprising behaviour: + * For artifact consumers relying on the fields existence (e.g. `manifest["deleted_field"]` will stop working unless the access was implemented safely) + * Old code (e.g. in dbt-core) that relies on the value of the deleted field may have surprising behaviour given only the default value will be set when instantiated from the new schema + +These types of minor, non-breaking changes are tested by [tests/unit/artifacts/test_base_resource.py::TestMinorSchemaChange](https://github.com/dbt-labs/dbt-core/blob/main/tests/unit/artifacts/test_base_resource.py). + + +#### Updating [schemas.getdbt.com](https://schemas.getdbt.com) +Non-breaking changes to artifact schemas require an update to the corresponding jsonschemas published to [schemas.getdbt.com](https://schemas.getdbt.com), which are defined in https://github.com/dbt-labs/schemas.getdbt.com. To do so: +Note this must be done AFTER the core pull request is merged, otherwise we may end up with unresolvable conflicts and schemas that are invalid prior to base pull request merge. You may create the schemas.getdbt.com pull request prior to merging the base pull request, but do not merge until afterward. +1. Create a PR in https://github.com/dbt-labs/schemas.getdbt.com which reflects the schema changes to the artifact. The schema can be updated in-place for non-breaking changes. Example PR: https://github.com/dbt-labs/schemas.getdbt.com/pull/39 +2. Merge the https://github.com/dbt-labs/schemas.getdbt.com PR + +Note: Although `jsonschema` validation using the schemas in [schemas.getdbt.com](https://schemas.getdbt.com) is not encouraged or formally supported, `jsonschema` validation should still continue to work once the schemas are updated because they are forward-compatible and can therefore be used to validate previous minor versions of the schema. + +### Breaking changes +A breaking change is anything that: +* Deletes a required field +* Changes the name or type of an existing field +* Removes the default value of an existing field + +These should be avoided however possible. When necessary, multiple breaking changes should be bundled together, to aim for minimal disruption across the ecosystem of tools that leverage dbt metadata. + +When it comes time to make breaking changes, a new versioned artifact should be created as follows: + 1. Create a new version directory and file that defines the new artifact schema under `dbt/artifacts/schemas/<artifact>/v<next-artifact-version>/<artifact>.py` + 2. If any resources are having breaking changes introduced, create a new resource class that defines the new resource schema under `dbt/artifacts/resources/v<next-resource-version>/<resource>.py` + 3. Implement upgrade paths on the new versioned artifact class so it can be constructed given a dictionary representation of any previous version of the same artifact + * TODO: link example once available +4. Implement downgrade paths on all previous versions of the artifact class so they can still be constructed given a dictionary representation of the new artifact schema + * TODO: link example once available +5. Update the 'latest' aliases to point to the new version of the artifact and/or resource: + * Artifact: `dbt/artifacts/schemas/<artifact>/__init__.py ` + * Resource: `dbt/artifacts/resources/__init__.py ` + +Downstream consumers (e.g. `dbt-core`) importing from the latest alias are susceptible to breaking changes. Ideally, any incompatibilities should be caught my static type checking in those systems. However, it is always possible for consumers to pin imports to previous versions via `dbt.artifacts.schemas.<artifact>.v<prev-version>`. diff --git a/core/dbt/contracts/graph/searcher.py b/core/dbt/artifacts/__init__.py similarity index 100% rename from core/dbt/contracts/graph/searcher.py rename to core/dbt/artifacts/__init__.py diff --git a/core/dbt/artifacts/exceptions/__init__.py b/core/dbt/artifacts/exceptions/__init__.py new file mode 100644 index 00000000000..ad8d4ae51b7 --- /dev/null +++ b/core/dbt/artifacts/exceptions/__init__.py @@ -0,0 +1 @@ +from dbt.artifacts.exceptions.schemas import IncompatibleSchemaError diff --git a/core/dbt/artifacts/exceptions/schemas.py b/core/dbt/artifacts/exceptions/schemas.py new file mode 100644 index 00000000000..c9f1b0e151f --- /dev/null +++ b/core/dbt/artifacts/exceptions/schemas.py @@ -0,0 +1,31 @@ +from typing import Optional + +from dbt_common.exceptions import DbtRuntimeError + + +class IncompatibleSchemaError(DbtRuntimeError): + def __init__(self, expected: str, found: Optional[str] = None) -> None: + self.expected = expected + self.found = found + self.filename = "input file" + + super().__init__(msg=self.get_message()) + + def add_filename(self, filename: str): + self.filename = filename + self.msg = self.get_message() + + def get_message(self) -> str: + found_str = "nothing" + if self.found is not None: + found_str = f'"{self.found}"' + + msg = ( + f'Expected a schema version of "{self.expected}" in ' + f"{self.filename}, but found {found_str}. Are you running with a " + f"different version of dbt?" + ) + return msg + + CODE = 10014 + MESSAGE = "Incompatible Schema" diff --git a/core/dbt/artifacts/resources/__init__.py b/core/dbt/artifacts/resources/__init__.py new file mode 100644 index 00000000000..3435c386daf --- /dev/null +++ b/core/dbt/artifacts/resources/__init__.py @@ -0,0 +1,97 @@ +from dbt.artifacts.resources.base import BaseResource, Docs, FileHash, GraphResource +from dbt.artifacts.resources.v1.analysis import Analysis + +# alias to latest resource definitions +from dbt.artifacts.resources.v1.components import ( + ColumnInfo, + CompiledResource, + Contract, + DeferRelation, + DependsOn, + FreshnessThreshold, + HasRelationMetadata, + InjectedCTE, + NodeVersion, + ParsedResource, + ParsedResourceMandatory, + Quoting, + RefArgs, + Time, +) +from dbt.artifacts.resources.v1.config import ( + Hook, + NodeAndTestConfig, + NodeConfig, + TestConfig, +) +from dbt.artifacts.resources.v1.documentation import Documentation +from dbt.artifacts.resources.v1.exposure import ( + Exposure, + ExposureConfig, + ExposureType, + MaturityType, +) +from dbt.artifacts.resources.v1.generic_test import GenericTest, TestMetadata +from dbt.artifacts.resources.v1.group import Group +from dbt.artifacts.resources.v1.hook import HookNode +from dbt.artifacts.resources.v1.macro import Macro, MacroArgument, MacroDependsOn +from dbt.artifacts.resources.v1.metric import ( + ConstantPropertyInput, + ConversionTypeParams, + CumulativeTypeParams, + Metric, + MetricConfig, + MetricInput, + MetricInputMeasure, + MetricTimeWindow, + MetricTypeParams, +) +from dbt.artifacts.resources.v1.model import Model, ModelConfig, TimeSpine +from dbt.artifacts.resources.v1.owner import Owner +from dbt.artifacts.resources.v1.saved_query import ( + Export, + ExportConfig, + QueryParams, + SavedQuery, + SavedQueryConfig, + SavedQueryMandatory, +) +from dbt.artifacts.resources.v1.seed import Seed, SeedConfig +from dbt.artifacts.resources.v1.semantic_layer_components import ( + FileSlice, + SourceFileMetadata, + WhereFilter, + WhereFilterIntersection, +) +from dbt.artifacts.resources.v1.semantic_model import ( + Defaults, + Dimension, + DimensionTypeParams, + DimensionValidityParams, + Entity, + Measure, + MeasureAggregationParameters, + NodeRelation, + NonAdditiveDimension, + SemanticModel, + SemanticModelConfig, +) +from dbt.artifacts.resources.v1.singular_test import SingularTest +from dbt.artifacts.resources.v1.snapshot import Snapshot, SnapshotConfig +from dbt.artifacts.resources.v1.source_definition import ( + ExternalPartition, + ExternalTable, + ParsedSourceMandatory, + SourceConfig, + SourceDefinition, +) +from dbt.artifacts.resources.v1.sql_operation import SqlOperation +from dbt.artifacts.resources.v1.unit_test_definition import ( + UnitTestConfig, + UnitTestDefinition, + UnitTestFormat, + UnitTestInputFixture, + UnitTestNodeVersions, + UnitTestOutputFixture, + UnitTestOverrides, +) diff --git a/core/dbt/artifacts/resources/base.py b/core/dbt/artifacts/resources/base.py new file mode 100644 index 00000000000..0911a997c19 --- /dev/null +++ b/core/dbt/artifacts/resources/base.py @@ -0,0 +1,67 @@ +import hashlib +from dataclasses import dataclass +from typing import List, Optional + +from dbt.artifacts.resources.types import NodeType +from dbt_common.dataclass_schema import dbtClassMixin + + +@dataclass +class BaseResource(dbtClassMixin): + name: str + resource_type: NodeType + package_name: str + path: str + original_file_path: str + unique_id: str + + +@dataclass +class GraphResource(BaseResource): + fqn: List[str] + + +@dataclass +class FileHash(dbtClassMixin): + name: str # the hash type name + checksum: str # the hashlib.hash_type().hexdigest() of the file contents + + @classmethod + def empty(cls): + return FileHash(name="none", checksum="") + + @classmethod + def path(cls, path: str): + return FileHash(name="path", checksum=path) + + def __eq__(self, other): + if not isinstance(other, FileHash): + return NotImplemented + + if self.name == "none" or self.name != other.name: + return False + + return self.checksum == other.checksum + + def compare(self, contents: str) -> bool: + """Compare the file contents with the given hash""" + if self.name == "none": + return False + + return self.from_contents(contents, name=self.name) == self.checksum + + @classmethod + def from_contents(cls, contents: str, name="sha256") -> "FileHash": + """Create a file hash from the given file contents. The hash is always + the utf-8 encoding of the contents given, because dbt only reads files + as utf-8. + """ + data = contents.encode("utf-8") + checksum = hashlib.new(name, data).hexdigest() + return cls(name=name, checksum=checksum) + + +@dataclass +class Docs(dbtClassMixin): + show: bool = True + node_color: Optional[str] = None diff --git a/core/dbt/artifacts/resources/types.py b/core/dbt/artifacts/resources/types.py new file mode 100644 index 00000000000..bac25bd2e0e --- /dev/null +++ b/core/dbt/artifacts/resources/types.py @@ -0,0 +1,77 @@ +from dbt_common.dataclass_schema import StrEnum + + +class AccessType(StrEnum): + Private = "private" + Protected = "protected" + Public = "public" + + @classmethod + def is_valid(cls, item): + try: + cls(item) + except ValueError: + return False + return True + + +class NodeType(StrEnum): + Model = "model" + Analysis = "analysis" + Test = "test" # renamed to 'data_test'; preserved as 'test' here for back-compat + Snapshot = "snapshot" + Operation = "operation" + Seed = "seed" + # TODO: rm? + RPCCall = "rpc" + SqlOperation = "sql_operation" + Documentation = "doc" + Source = "source" + Macro = "macro" + Exposure = "exposure" + Metric = "metric" + Group = "group" + SavedQuery = "saved_query" + SemanticModel = "semantic_model" + Unit = "unit_test" + Fixture = "fixture" + + def pluralize(self) -> str: + if self is self.Analysis: + return "analyses" + elif self is self.SavedQuery: + return "saved_queries" + elif self is self.Test: + return "data_tests" + return f"{self}s" + + +class RunHookType(StrEnum): + Start = "on-run-start" + End = "on-run-end" + + +class ModelLanguage(StrEnum): + python = "python" + sql = "sql" + + +class ModelHookType(StrEnum): + PreHook = "pre-hook" + PostHook = "post-hook" + + +class TimePeriod(StrEnum): + minute = "minute" + hour = "hour" + day = "day" + + def plural(self) -> str: + return str(self) + "s" + + +class BatchSize(StrEnum): + hour = "hour" + day = "day" + month = "month" + year = "year" diff --git a/core/dbt/artifacts/resources/v1/analysis.py b/core/dbt/artifacts/resources/v1/analysis.py new file mode 100644 index 00000000000..325db979c61 --- /dev/null +++ b/core/dbt/artifacts/resources/v1/analysis.py @@ -0,0 +1,10 @@ +from dataclasses import dataclass +from typing import Literal + +from dbt.artifacts.resources.types import NodeType +from dbt.artifacts.resources.v1.components import CompiledResource + + +@dataclass +class Analysis(CompiledResource): + resource_type: Literal[NodeType.Analysis] diff --git a/core/dbt/artifacts/resources/v1/components.py b/core/dbt/artifacts/resources/v1/components.py new file mode 100644 index 00000000000..fc6f44a38f0 --- /dev/null +++ b/core/dbt/artifacts/resources/v1/components.py @@ -0,0 +1,237 @@ +import time +from dataclasses import dataclass, field +from datetime import timedelta +from typing import Any, Dict, List, Optional, Union + +from dbt.artifacts.resources.base import Docs, FileHash, GraphResource +from dbt.artifacts.resources.types import NodeType, TimePeriod +from dbt.artifacts.resources.v1.config import NodeConfig +from dbt_common.contracts.config.properties import AdditionalPropertiesMixin +from dbt_common.contracts.constraints import ColumnLevelConstraint +from dbt_common.contracts.util import Mergeable +from dbt_common.dataclass_schema import ExtensibleDbtClassMixin, dbtClassMixin +from dbt_semantic_interfaces.type_enums import TimeGranularity + +NodeVersion = Union[str, float] + + +@dataclass +class MacroDependsOn(dbtClassMixin): + macros: List[str] = field(default_factory=list) + + # 'in' on lists is O(n) so this is O(n^2) for # of macros + def add_macro(self, value: str): + if value not in self.macros: + self.macros.append(value) + + +@dataclass +class DependsOn(MacroDependsOn): + nodes: List[str] = field(default_factory=list) + + def add_node(self, value: str): + if value not in self.nodes: + self.nodes.append(value) + + +@dataclass +class RefArgs(dbtClassMixin): + name: str + package: Optional[str] = None + version: Optional[NodeVersion] = None + + @property + def positional_args(self) -> List[str]: + if self.package: + return [self.package, self.name] + else: + return [self.name] + + @property + def keyword_args(self) -> Dict[str, Optional[NodeVersion]]: + if self.version: + return {"version": self.version} + else: + return {} + + +@dataclass +class ColumnInfo(AdditionalPropertiesMixin, ExtensibleDbtClassMixin): + """Used in all ManifestNodes and SourceDefinition""" + + name: str + description: str = "" + meta: Dict[str, Any] = field(default_factory=dict) + data_type: Optional[str] = None + constraints: List[ColumnLevelConstraint] = field(default_factory=list) + quote: Optional[bool] = None + tags: List[str] = field(default_factory=list) + _extra: Dict[str, Any] = field(default_factory=dict) + granularity: Optional[TimeGranularity] = None + + +@dataclass +class InjectedCTE(dbtClassMixin): + """Used in CompiledNodes as part of ephemeral model processing""" + + id: str + sql: str + + +@dataclass +class Contract(dbtClassMixin): + enforced: bool = False + alias_types: bool = True + checksum: Optional[str] = None + + +@dataclass +class Quoting(dbtClassMixin, Mergeable): + database: Optional[bool] = None + schema: Optional[bool] = None + identifier: Optional[bool] = None + column: Optional[bool] = None + + +@dataclass +class Time(dbtClassMixin, Mergeable): + count: Optional[int] = None + period: Optional[TimePeriod] = None + + def exceeded(self, actual_age: float) -> bool: + if self.period is None or self.count is None: + return False + kwargs: Dict[str, int] = {self.period.plural(): self.count} + difference = timedelta(**kwargs).total_seconds() + return actual_age > difference + + def __bool__(self): + return self.count is not None and self.period is not None + + +@dataclass +class FreshnessThreshold(dbtClassMixin, Mergeable): + warn_after: Optional[Time] = field(default_factory=Time) + error_after: Optional[Time] = field(default_factory=Time) + filter: Optional[str] = None + + def status(self, age: float) -> "dbt.artifacts.schemas.results.FreshnessStatus": # type: ignore # noqa F821 + from dbt.artifacts.schemas.results import FreshnessStatus + + if self.error_after and self.error_after.exceeded(age): + return FreshnessStatus.Error + elif self.warn_after and self.warn_after.exceeded(age): + return FreshnessStatus.Warn + else: + return FreshnessStatus.Pass + + def __bool__(self): + return bool(self.warn_after) or bool(self.error_after) + + +@dataclass +class HasRelationMetadata(dbtClassMixin): + database: Optional[str] + schema: str + + # Can't set database to None like it ought to be + # because it messes up the subclasses and default parameters + # so hack it here + @classmethod + def __pre_deserialize__(cls, data): + data = super().__pre_deserialize__(data) + if "database" not in data: + data["database"] = None + return data + + @property + def quoting_dict(self) -> Dict[str, bool]: + if hasattr(self, "quoting"): + return self.quoting.to_dict(omit_none=True) + else: + return {} + + +@dataclass +class DeferRelation(HasRelationMetadata): + alias: str + relation_name: Optional[str] + # The rest of these fields match RelationConfig protocol exactly + resource_type: NodeType + name: str + description: str + compiled_code: Optional[str] + meta: Dict[str, Any] + tags: List[str] + config: Optional[NodeConfig] + + @property + def identifier(self): + return self.alias + + +@dataclass +class ParsedResourceMandatory(GraphResource, HasRelationMetadata): + alias: str + checksum: FileHash + config: NodeConfig = field(default_factory=NodeConfig) + + @property + def identifier(self): + return self.alias + + +@dataclass +class ParsedResource(ParsedResourceMandatory): + tags: List[str] = field(default_factory=list) + description: str = field(default="") + columns: Dict[str, ColumnInfo] = field(default_factory=dict) + meta: Dict[str, Any] = field(default_factory=dict) + group: Optional[str] = None + docs: Docs = field(default_factory=Docs) + patch_path: Optional[str] = None + build_path: Optional[str] = None + unrendered_config: Dict[str, Any] = field(default_factory=dict) + created_at: float = field(default_factory=lambda: time.time()) + config_call_dict: Dict[str, Any] = field(default_factory=dict) + relation_name: Optional[str] = None + raw_code: str = "" + + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) + if context and context.get("artifact") and "config_call_dict" in dct: + del dct["config_call_dict"] + return dct + + +@dataclass +class CompiledResource(ParsedResource): + """Contains attributes necessary for SQL files and nodes with refs, sources, etc, + so all ManifestNodes except SeedNode.""" + + language: str = "sql" + refs: List[RefArgs] = field(default_factory=list) + sources: List[List[str]] = field(default_factory=list) + metrics: List[List[str]] = field(default_factory=list) + depends_on: DependsOn = field(default_factory=DependsOn) + compiled_path: Optional[str] = None + compiled: bool = False + compiled_code: Optional[str] = None + extra_ctes_injected: bool = False + extra_ctes: List[InjectedCTE] = field(default_factory=list) + _pre_injected_sql: Optional[str] = None + contract: Contract = field(default_factory=Contract) + + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) + if "_pre_injected_sql" in dct: + del dct["_pre_injected_sql"] + # Remove compiled attributes + if "compiled" in dct and dct["compiled"] is False: + del dct["compiled"] + del dct["extra_ctes_injected"] + del dct["extra_ctes"] + # "omit_none" means these might not be in the dictionary + if "compiled_code" in dct: + del dct["compiled_code"] + return dct diff --git a/core/dbt/artifacts/resources/v1/config.py b/core/dbt/artifacts/resources/v1/config.py new file mode 100644 index 00000000000..e6cd26ec823 --- /dev/null +++ b/core/dbt/artifacts/resources/v1/config.py @@ -0,0 +1,263 @@ +import re +from dataclasses import dataclass, field +from typing import Any, Dict, List, Optional, Union + +from mashumaro.jsonschema.annotations import Pattern +from typing_extensions import Annotated + +from dbt import hooks +from dbt.artifacts.resources.base import Docs +from dbt.artifacts.resources.types import ModelHookType +from dbt.artifacts.utils.validation import validate_color +from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior +from dbt_common.contracts.config.materialization import OnConfigurationChangeOption +from dbt_common.contracts.config.metadata import Metadata, ShowBehavior +from dbt_common.dataclass_schema import ValidationError, dbtClassMixin + + +def list_str() -> List[str]: + return [] + + +class Severity(str): + pass + + +def metas(*metas: Metadata) -> Dict[str, Any]: + existing: Dict[str, Any] = {} + for m in metas: + existing = m.meta(existing) + return existing + + +@dataclass +class ContractConfig(dbtClassMixin): + enforced: bool = False + alias_types: bool = True + + +@dataclass +class Hook(dbtClassMixin): + sql: str + transaction: bool = True + index: Optional[int] = None + + +@dataclass +class NodeAndTestConfig(BaseConfig): + enabled: bool = True + # these fields are included in serialized output, but are not part of + # config comparison (they are part of database_representation) + alias: Optional[str] = field( + default=None, + metadata=CompareBehavior.Exclude.meta(), + ) + schema: Optional[str] = field( + default=None, + metadata=CompareBehavior.Exclude.meta(), + ) + database: Optional[str] = field( + default=None, + metadata=CompareBehavior.Exclude.meta(), + ) + tags: Union[List[str], str] = field( + default_factory=list_str, + metadata=metas(ShowBehavior.Hide, MergeBehavior.Append, CompareBehavior.Exclude), + ) + meta: Dict[str, Any] = field( + default_factory=dict, + metadata=MergeBehavior.Update.meta(), + ) + group: Optional[str] = field( + default=None, + metadata=CompareBehavior.Exclude.meta(), + ) + + +@dataclass +class NodeConfig(NodeAndTestConfig): + # Note: if any new fields are added with MergeBehavior, also update the + # 'mergebehavior' dictionary + materialized: str = "view" + incremental_strategy: Optional[str] = None + batch_size: Any = None + lookback: Any = 0 + persist_docs: Dict[str, Any] = field(default_factory=dict) + post_hook: List[Hook] = field( + default_factory=list, + metadata={"merge": MergeBehavior.Append, "alias": "post-hook"}, + ) + pre_hook: List[Hook] = field( + default_factory=list, + metadata={"merge": MergeBehavior.Append, "alias": "pre-hook"}, + ) + quoting: Dict[str, Any] = field( + default_factory=dict, + metadata=MergeBehavior.Update.meta(), + ) + # This is actually only used by seeds. Should it be available to others? + # That would be a breaking change! + column_types: Dict[str, Any] = field( + default_factory=dict, + metadata=MergeBehavior.Update.meta(), + ) + full_refresh: Optional[bool] = None + # 'unique_key' doesn't use 'Optional' because typing.get_type_hints was + # sometimes getting the Union order wrong, causing serialization failures. + unique_key: Union[str, List[str], None] = None + on_schema_change: Optional[str] = "ignore" + on_configuration_change: OnConfigurationChangeOption = field( + default_factory=OnConfigurationChangeOption.default + ) + grants: Dict[str, Any] = field( + default_factory=dict, metadata=MergeBehavior.DictKeyAppend.meta() + ) + packages: List[str] = field( + default_factory=list, + metadata=MergeBehavior.Append.meta(), + ) + docs: Docs = field( + default_factory=Docs, + metadata=MergeBehavior.Update.meta(), + ) + contract: ContractConfig = field( + default_factory=ContractConfig, + metadata=MergeBehavior.Update.meta(), + ) + event_time: Any = None + + def __post_init__(self): + # we validate that node_color has a suitable value to prevent dbt-docs from crashing + if self.docs.node_color: + node_color = self.docs.node_color + if not validate_color(node_color): + raise ValidationError( + f"Invalid color name for docs.node_color: {node_color}. " + "It is neither a valid HTML color name nor a valid HEX code." + ) + + if ( + self.contract.enforced + and self.materialized == "incremental" + and self.on_schema_change not in ("append_new_columns", "fail") + ): + raise ValidationError( + f"Invalid value for on_schema_change: {self.on_schema_change}. Models " + "materialized as incremental with contracts enabled must set " + "on_schema_change to 'append_new_columns' or 'fail'" + ) + + @classmethod + def __pre_deserialize__(cls, data): + data = super().__pre_deserialize__(data) + for key in ModelHookType: + if key in data: + data[key] = [hooks.get_hook_dict(h) for h in data[key]] + return data + + +SEVERITY_PATTERN = r"^([Ww][Aa][Rr][Nn]|[Ee][Rr][Rr][Oo][Rr])$" + + +@dataclass +class TestConfig(NodeAndTestConfig): + __test__ = False + + # this is repeated because of a different default + schema: Optional[str] = field( + default="dbt_test__audit", + metadata=CompareBehavior.Exclude.meta(), + ) + materialized: str = "test" + # Annotated is used by mashumaro for jsonschema generation + severity: Annotated[Severity, Pattern(SEVERITY_PATTERN)] = Severity("ERROR") + store_failures: Optional[bool] = None + store_failures_as: Optional[str] = None + where: Optional[str] = None + limit: Optional[int] = None + fail_calc: str = "count(*)" + warn_if: str = "!= 0" + error_if: str = "!= 0" + + def __post_init__(self): + """ + The presence of a setting for `store_failures_as` overrides any existing setting for `store_failures`, + regardless of level of granularity. If `store_failures_as` is not set, then `store_failures` takes effect. + At the time of implementation, `store_failures = True` would always create a table; the user could not + configure this. Hence, if `store_failures = True` and `store_failures_as` is not specified, then it + should be set to "table" to mimic the existing functionality. + + A side effect of this overriding functionality is that `store_failures_as="view"` at the project + level cannot be turned off at the model level without setting both `store_failures_as` and + `store_failures`. The former would cascade down and override `store_failures=False`. The proposal + is to include "ephemeral" as a value for `store_failures_as`, which effectively sets + `store_failures=False`. + + The exception handling for this is tricky. If we raise an exception here, the entire run fails at + parse time. We would rather well-formed models run successfully, leaving only exceptions to be rerun + if necessary. Hence, the exception needs to be raised in the test materialization. In order to do so, + we need to make sure that we go down the `store_failures = True` route with the invalid setting for + `store_failures_as`. This results in the `.get()` defaulted to `True` below, instead of a normal + dictionary lookup as is done in the `if` block. Refer to the test materialization for the + exception that is raise as a result of an invalid value. + + The intention of this block is to behave as if `store_failures_as` is the only setting, + but still allow for backwards compatibility for `store_failures`. + See https://github.com/dbt-labs/dbt-core/issues/6914 for more information. + """ + + # if `store_failures_as` is not set, it gets set by `store_failures` + # the settings below mimic existing behavior prior to `store_failures_as` + get_store_failures_as_map = { + True: "table", + False: "ephemeral", + None: None, + } + + # if `store_failures_as` is set, it dictates what `store_failures` gets set to + # the settings below overrides whatever `store_failures` is set to by the user + get_store_failures_map = { + "ephemeral": False, + "table": True, + "view": True, + } + + if self.store_failures_as is None: + self.store_failures_as = get_store_failures_as_map[self.store_failures] + else: + self.store_failures = get_store_failures_map.get(self.store_failures_as, True) + + @classmethod + def same_contents(cls, unrendered: Dict[str, Any], other: Dict[str, Any]) -> bool: + """This is like __eq__, except it explicitly checks certain fields.""" + modifiers = [ + "severity", + "where", + "limit", + "fail_calc", + "warn_if", + "error_if", + "store_failures", + "store_failures_as", + ] + + seen = set() + for _, target_name in cls._get_fields(): + key = target_name + seen.add(key) + if key in modifiers: + if not cls.compare_key(unrendered, other, key): + return False + return True + + @classmethod + def validate(cls, data): + if data.get("severity") and not re.match(SEVERITY_PATTERN, data.get("severity")): + raise ValidationError( + f"Severity must be either 'warn' or 'error'. Got '{data.get('severity')}'" + ) + + super().validate(data) + + if data.get("materialized") and data.get("materialized") != "test": + raise ValidationError("A test must have a materialized value of 'test'") diff --git a/core/dbt/artifacts/resources/v1/documentation.py b/core/dbt/artifacts/resources/v1/documentation.py new file mode 100644 index 00000000000..59c19e4ee6d --- /dev/null +++ b/core/dbt/artifacts/resources/v1/documentation.py @@ -0,0 +1,11 @@ +from dataclasses import dataclass +from typing import Literal + +from dbt.artifacts.resources.base import BaseResource +from dbt.artifacts.resources.types import NodeType + + +@dataclass +class Documentation(BaseResource): + resource_type: Literal[NodeType.Documentation] + block_contents: str diff --git a/core/dbt/artifacts/resources/v1/exposure.py b/core/dbt/artifacts/resources/v1/exposure.py new file mode 100644 index 00000000000..00f3c8b89e1 --- /dev/null +++ b/core/dbt/artifacts/resources/v1/exposure.py @@ -0,0 +1,49 @@ +import time +from dataclasses import dataclass, field +from typing import Any, Dict, List, Literal, Optional + +from dbt.artifacts.resources.base import GraphResource +from dbt.artifacts.resources.types import NodeType +from dbt.artifacts.resources.v1.components import DependsOn, RefArgs +from dbt.artifacts.resources.v1.owner import Owner +from dbt_common.contracts.config.base import BaseConfig +from dbt_common.dataclass_schema import StrEnum + + +class ExposureType(StrEnum): + Dashboard = "dashboard" + Notebook = "notebook" + Analysis = "analysis" + ML = "ml" + Application = "application" + + +class MaturityType(StrEnum): + Low = "low" + Medium = "medium" + High = "high" + + +@dataclass +class ExposureConfig(BaseConfig): + enabled: bool = True + + +@dataclass +class Exposure(GraphResource): + type: ExposureType + owner: Owner + resource_type: Literal[NodeType.Exposure] + description: str = "" + label: Optional[str] = None + maturity: Optional[MaturityType] = None + meta: Dict[str, Any] = field(default_factory=dict) + tags: List[str] = field(default_factory=list) + config: ExposureConfig = field(default_factory=ExposureConfig) + unrendered_config: Dict[str, Any] = field(default_factory=dict) + url: Optional[str] = None + depends_on: DependsOn = field(default_factory=DependsOn) + refs: List[RefArgs] = field(default_factory=list) + sources: List[List[str]] = field(default_factory=list) + metrics: List[List[str]] = field(default_factory=list) + created_at: float = field(default_factory=lambda: time.time()) diff --git a/core/dbt/artifacts/resources/v1/generic_test.py b/core/dbt/artifacts/resources/v1/generic_test.py new file mode 100644 index 00000000000..504dbb07940 --- /dev/null +++ b/core/dbt/artifacts/resources/v1/generic_test.py @@ -0,0 +1,31 @@ +from dataclasses import dataclass, field +from typing import Any, Dict, Literal, Optional + +from dbt.artifacts.resources.types import NodeType +from dbt.artifacts.resources.v1.components import CompiledResource +from dbt.artifacts.resources.v1.config import TestConfig +from dbt_common.dataclass_schema import dbtClassMixin + + +@dataclass +class TestMetadata(dbtClassMixin): + __test__ = False + + name: str = "test" # dummy default to allow default in GenericTestNode. Should always be set. + # kwargs are the args that are left in the test builder after + # removing configs. They are set from the test builder when + # the test node is created. + kwargs: Dict[str, Any] = field(default_factory=dict) + namespace: Optional[str] = None + + +@dataclass +class GenericTest(CompiledResource): + resource_type: Literal[NodeType.Test] + column_name: Optional[str] = None + file_key_name: Optional[str] = None + # Was not able to make mypy happy and keep the code working. We need to + # refactor the various configs. + config: TestConfig = field(default_factory=TestConfig) # type: ignore + attached_node: Optional[str] = None + test_metadata: TestMetadata = field(default_factory=TestMetadata) diff --git a/core/dbt/artifacts/resources/v1/group.py b/core/dbt/artifacts/resources/v1/group.py new file mode 100644 index 00000000000..c5351268eaa --- /dev/null +++ b/core/dbt/artifacts/resources/v1/group.py @@ -0,0 +1,13 @@ +from dataclasses import dataclass +from typing import Literal + +from dbt.artifacts.resources.base import BaseResource +from dbt.artifacts.resources.types import NodeType +from dbt.artifacts.resources.v1.owner import Owner + + +@dataclass +class Group(BaseResource): + name: str + owner: Owner + resource_type: Literal[NodeType.Group] diff --git a/core/dbt/artifacts/resources/v1/hook.py b/core/dbt/artifacts/resources/v1/hook.py new file mode 100644 index 00000000000..93fe314518e --- /dev/null +++ b/core/dbt/artifacts/resources/v1/hook.py @@ -0,0 +1,11 @@ +from dataclasses import dataclass +from typing import Literal, Optional + +from dbt.artifacts.resources.types import NodeType +from dbt.artifacts.resources.v1.components import CompiledResource + + +@dataclass +class HookNode(CompiledResource): + resource_type: Literal[NodeType.Operation] + index: Optional[int] = None diff --git a/core/dbt/artifacts/resources/v1/macro.py b/core/dbt/artifacts/resources/v1/macro.py new file mode 100644 index 00000000000..c5154a9a6d4 --- /dev/null +++ b/core/dbt/artifacts/resources/v1/macro.py @@ -0,0 +1,29 @@ +import time +from dataclasses import dataclass, field +from typing import Any, Dict, List, Literal, Optional + +from dbt.artifacts.resources.base import BaseResource, Docs +from dbt.artifacts.resources.types import ModelLanguage, NodeType +from dbt.artifacts.resources.v1.components import MacroDependsOn +from dbt_common.dataclass_schema import dbtClassMixin + + +@dataclass +class MacroArgument(dbtClassMixin): + name: str + type: Optional[str] = None + description: str = "" + + +@dataclass +class Macro(BaseResource): + macro_sql: str + resource_type: Literal[NodeType.Macro] + depends_on: MacroDependsOn = field(default_factory=MacroDependsOn) + description: str = "" + meta: Dict[str, Any] = field(default_factory=dict) + docs: Docs = field(default_factory=Docs) + patch_path: Optional[str] = None + arguments: List[MacroArgument] = field(default_factory=list) + created_at: float = field(default_factory=lambda: time.time()) + supported_languages: Optional[List[ModelLanguage]] = None diff --git a/core/dbt/artifacts/resources/v1/metric.py b/core/dbt/artifacts/resources/v1/metric.py new file mode 100644 index 00000000000..0c6da764220 --- /dev/null +++ b/core/dbt/artifacts/resources/v1/metric.py @@ -0,0 +1,147 @@ +import time +from dataclasses import dataclass, field +from typing import Any, Dict, List, Literal, Optional + +from dbt.artifacts.resources.base import GraphResource +from dbt.artifacts.resources.types import NodeType +from dbt.artifacts.resources.v1.components import DependsOn, RefArgs +from dbt.artifacts.resources.v1.semantic_layer_components import ( + SourceFileMetadata, + WhereFilterIntersection, +) +from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior +from dbt_common.dataclass_schema import dbtClassMixin +from dbt_semantic_interfaces.references import MeasureReference, MetricReference +from dbt_semantic_interfaces.type_enums import ( + ConversionCalculationType, + MetricType, + PeriodAggregation, + TimeGranularity, +) + +""" +The following classes are dataclasses which are used to construct the Metric +node in dbt-core. Additionally, these classes need to at a minimum support +what is specified in their protocol definitions in dbt-semantic-interfaces. +Their protocol definitions can be found here: +https://github.com/dbt-labs/dbt-semantic-interfaces/blob/main/dbt_semantic_interfaces/protocols/metric.py +""" + + +@dataclass +class MetricInputMeasure(dbtClassMixin): + name: str + filter: Optional[WhereFilterIntersection] = None + alias: Optional[str] = None + join_to_timespine: bool = False + fill_nulls_with: Optional[int] = None + + def measure_reference(self) -> MeasureReference: + return MeasureReference(element_name=self.name) + + def post_aggregation_measure_reference(self) -> MeasureReference: + return MeasureReference(element_name=self.alias or self.name) + + +@dataclass +class MetricTimeWindow(dbtClassMixin): + count: int + granularity: TimeGranularity + + +@dataclass +class MetricInput(dbtClassMixin): + name: str + filter: Optional[WhereFilterIntersection] = None + alias: Optional[str] = None + offset_window: Optional[MetricTimeWindow] = None + offset_to_grain: Optional[TimeGranularity] = None + + def as_reference(self) -> MetricReference: + return MetricReference(element_name=self.name) + + def post_aggregation_reference(self) -> MetricReference: + return MetricReference(element_name=self.alias or self.name) + + +@dataclass +class ConstantPropertyInput(dbtClassMixin): + base_property: str + conversion_property: str + + +@dataclass +class ConversionTypeParams(dbtClassMixin): + base_measure: MetricInputMeasure + conversion_measure: MetricInputMeasure + entity: str + calculation: ConversionCalculationType = ConversionCalculationType.CONVERSION_RATE + window: Optional[MetricTimeWindow] = None + constant_properties: Optional[List[ConstantPropertyInput]] = None + + +@dataclass +class CumulativeTypeParams(dbtClassMixin): + window: Optional[MetricTimeWindow] = None + grain_to_date: Optional[TimeGranularity] = None + period_agg: PeriodAggregation = PeriodAggregation.FIRST + + +@dataclass +class MetricTypeParams(dbtClassMixin): + measure: Optional[MetricInputMeasure] = None + input_measures: List[MetricInputMeasure] = field(default_factory=list) + numerator: Optional[MetricInput] = None + denominator: Optional[MetricInput] = None + expr: Optional[str] = None + window: Optional[MetricTimeWindow] = None + grain_to_date: Optional[TimeGranularity] = None + metrics: Optional[List[MetricInput]] = None + conversion_type_params: Optional[ConversionTypeParams] = None + cumulative_type_params: Optional[CumulativeTypeParams] = None + + +@dataclass +class MetricConfig(BaseConfig): + enabled: bool = True + group: Optional[str] = field( + default=None, + metadata=CompareBehavior.Exclude.meta(), + ) + + meta: Dict[str, Any] = field(default_factory=dict, metadata=MergeBehavior.Update.meta()) + + +@dataclass +class Metric(GraphResource): + name: str + description: str + label: str + type: MetricType + type_params: MetricTypeParams + filter: Optional[WhereFilterIntersection] = None + metadata: Optional[SourceFileMetadata] = None + time_granularity: Optional[TimeGranularity] = None + resource_type: Literal[NodeType.Metric] + meta: Dict[str, Any] = field(default_factory=dict, metadata=MergeBehavior.Update.meta()) + tags: List[str] = field(default_factory=list) + config: MetricConfig = field(default_factory=MetricConfig) + unrendered_config: Dict[str, Any] = field(default_factory=dict) + sources: List[List[str]] = field(default_factory=list) + depends_on: DependsOn = field(default_factory=DependsOn) + refs: List[RefArgs] = field(default_factory=list) + metrics: List[List[str]] = field(default_factory=list) + created_at: float = field(default_factory=lambda: time.time()) + group: Optional[str] = None + + @property + def input_measures(self) -> List[MetricInputMeasure]: + return self.type_params.input_measures + + @property + def measure_references(self) -> List[MeasureReference]: + return [x.measure_reference() for x in self.input_measures] + + @property + def input_metrics(self) -> List[MetricInput]: + return self.type_params.metrics or [] diff --git a/core/dbt/artifacts/resources/v1/model.py b/core/dbt/artifacts/resources/v1/model.py new file mode 100644 index 00000000000..9c43970f488 --- /dev/null +++ b/core/dbt/artifacts/resources/v1/model.py @@ -0,0 +1,54 @@ +from dataclasses import dataclass, field +from datetime import datetime +from typing import Dict, List, Literal, Optional + +from dbt.artifacts.resources.types import AccessType, NodeType +from dbt.artifacts.resources.v1.components import ( + CompiledResource, + DeferRelation, + NodeVersion, +) +from dbt.artifacts.resources.v1.config import NodeConfig +from dbt_common.contracts.config.base import MergeBehavior +from dbt_common.contracts.constraints import ModelLevelConstraint +from dbt_common.dataclass_schema import dbtClassMixin + + +@dataclass +class ModelConfig(NodeConfig): + access: AccessType = field( + default=AccessType.Protected, + metadata=MergeBehavior.Clobber.meta(), + ) + + +@dataclass +class CustomGranularity(dbtClassMixin): + name: str + column_name: Optional[str] = None + + +@dataclass +class TimeSpine(dbtClassMixin): + standard_granularity_column: str + custom_granularities: List[CustomGranularity] = field(default_factory=list) + + +@dataclass +class Model(CompiledResource): + resource_type: Literal[NodeType.Model] + access: AccessType = AccessType.Protected + config: ModelConfig = field(default_factory=ModelConfig) + constraints: List[ModelLevelConstraint] = field(default_factory=list) + version: Optional[NodeVersion] = None + latest_version: Optional[NodeVersion] = None + deprecation_date: Optional[datetime] = None + defer_relation: Optional[DeferRelation] = None + primary_key: List[str] = field(default_factory=list) + time_spine: Optional[TimeSpine] = None + + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) + if context and context.get("artifact") and "defer_relation" in dct: + del dct["defer_relation"] + return dct diff --git a/core/dbt/artifacts/resources/v1/owner.py b/core/dbt/artifacts/resources/v1/owner.py new file mode 100644 index 00000000000..e270769cc88 --- /dev/null +++ b/core/dbt/artifacts/resources/v1/owner.py @@ -0,0 +1,10 @@ +from dataclasses import dataclass +from typing import Optional + +from dbt_common.contracts.config.properties import AdditionalPropertiesAllowed + + +@dataclass +class Owner(AdditionalPropertiesAllowed): + email: Optional[str] = None + name: Optional[str] = None diff --git a/core/dbt/artifacts/resources/v1/saved_query.py b/core/dbt/artifacts/resources/v1/saved_query.py new file mode 100644 index 00000000000..1eea7990cc1 --- /dev/null +++ b/core/dbt/artifacts/resources/v1/saved_query.py @@ -0,0 +1,103 @@ +from __future__ import annotations + +import time +from dataclasses import dataclass, field +from typing import Any, Dict, List, Literal, Optional + +from dbt.artifacts.resources.base import GraphResource +from dbt.artifacts.resources.types import NodeType +from dbt.artifacts.resources.v1.components import DependsOn, RefArgs +from dbt.artifacts.resources.v1.semantic_layer_components import ( + SourceFileMetadata, + WhereFilterIntersection, +) +from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior +from dbt_common.dataclass_schema import dbtClassMixin +from dbt_semantic_interfaces.type_enums.export_destination_type import ( + ExportDestinationType, +) + + +@dataclass +class ExportConfig(dbtClassMixin): + """Nested configuration attributes for exports.""" + + export_as: ExportDestinationType + schema_name: Optional[str] = None + alias: Optional[str] = None + database: Optional[str] = None + + +@dataclass +class Export(dbtClassMixin): + """Configuration for writing query results to a table.""" + + name: str + config: ExportConfig + unrendered_config: Dict[str, str] = field(default_factory=dict) + + +@dataclass +class QueryParams(dbtClassMixin): + """The query parameters for the saved query""" + + metrics: List[str] + group_by: List[str] + where: Optional[WhereFilterIntersection] + + +@dataclass +class SavedQueryCache(dbtClassMixin): + enabled: bool = False + + +@dataclass +class SavedQueryConfig(BaseConfig): + """Where config options for SavedQueries are stored. + + This class is much like many other node config classes. It's likely that + this class will expand in the direction of what's in the `NodeAndTestConfig` + class. It might make sense to clean the various *Config classes into one at + some point. + """ + + enabled: bool = True + group: Optional[str] = field( + default=None, + metadata=CompareBehavior.Exclude.meta(), + ) + meta: Dict[str, Any] = field( + default_factory=dict, + metadata=MergeBehavior.Update.meta(), + ) + export_as: Optional[ExportDestinationType] = None + schema: Optional[str] = None + cache: SavedQueryCache = field(default_factory=SavedQueryCache) + + +@dataclass +class SavedQueryMandatory(GraphResource): + query_params: QueryParams + exports: List[Export] + + +@dataclass +class SavedQuery(SavedQueryMandatory): + resource_type: Literal[NodeType.SavedQuery] + description: Optional[str] = None + label: Optional[str] = None + metadata: Optional[SourceFileMetadata] = None + config: SavedQueryConfig = field(default_factory=SavedQueryConfig) + unrendered_config: Dict[str, Any] = field(default_factory=dict) + group: Optional[str] = None + depends_on: DependsOn = field(default_factory=DependsOn) + created_at: float = field(default_factory=lambda: time.time()) + refs: List[RefArgs] = field(default_factory=list) + + @property + def metrics(self) -> List[str]: + return self.query_params.metrics + + @property + def depends_on_nodes(self): + return self.depends_on.nodes diff --git a/core/dbt/artifacts/resources/v1/seed.py b/core/dbt/artifacts/resources/v1/seed.py new file mode 100644 index 00000000000..5328488b3c5 --- /dev/null +++ b/core/dbt/artifacts/resources/v1/seed.py @@ -0,0 +1,41 @@ +from dataclasses import dataclass, field +from typing import Dict, Literal, Optional + +from dbt.artifacts.resources.types import NodeType +from dbt.artifacts.resources.v1.components import ( + DeferRelation, + MacroDependsOn, + ParsedResource, +) +from dbt.artifacts.resources.v1.config import NodeConfig +from dbt_common.dataclass_schema import ValidationError + + +@dataclass +class SeedConfig(NodeConfig): + materialized: str = "seed" + delimiter: str = "," + quote_columns: Optional[bool] = None + + @classmethod + def validate(cls, data): + super().validate(data) + if data.get("materialized") and data.get("materialized") != "seed": + raise ValidationError("A seed must have a materialized value of 'seed'") + + +@dataclass +class Seed(ParsedResource): # No SQLDefaults! + resource_type: Literal[NodeType.Seed] + config: SeedConfig = field(default_factory=SeedConfig) + # seeds need the root_path because the contents are not loaded initially + # and we need the root_path to load the seed later + root_path: Optional[str] = None + depends_on: MacroDependsOn = field(default_factory=MacroDependsOn) + defer_relation: Optional[DeferRelation] = None + + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) + if context and context.get("artifact") and "defer_relation" in dct: + del dct["defer_relation"] + return dct diff --git a/core/dbt/artifacts/resources/v1/semantic_layer_components.py b/core/dbt/artifacts/resources/v1/semantic_layer_components.py new file mode 100644 index 00000000000..89f87512b0b --- /dev/null +++ b/core/dbt/artifacts/resources/v1/semantic_layer_components.py @@ -0,0 +1,50 @@ +from dataclasses import dataclass +from typing import List, Sequence, Tuple + +from dbt_common.dataclass_schema import dbtClassMixin +from dbt_semantic_interfaces.call_parameter_sets import FilterCallParameterSets +from dbt_semantic_interfaces.parsing.where_filter.where_filter_parser import ( + WhereFilterParser, +) + + +@dataclass +class WhereFilter(dbtClassMixin): + where_sql_template: str + + @property + def call_parameter_sets(self) -> FilterCallParameterSets: + return WhereFilterParser.parse_call_parameter_sets(self.where_sql_template) + + +@dataclass +class WhereFilterIntersection(dbtClassMixin): + where_filters: List[WhereFilter] + + @property + def filter_expression_parameter_sets(self) -> Sequence[Tuple[str, FilterCallParameterSets]]: + raise NotImplementedError + + +@dataclass +class FileSlice(dbtClassMixin): + """Provides file slice level context about what something was created from. + + Implementation of the dbt-semantic-interfaces `FileSlice` protocol + """ + + filename: str + content: str + start_line_number: int + end_line_number: int + + +@dataclass +class SourceFileMetadata(dbtClassMixin): + """Provides file context about what something was created from. + + Implementation of the dbt-semantic-interfaces `Metadata` protocol + """ + + repo_file_path: str + file_slice: FileSlice diff --git a/core/dbt/artifacts/resources/v1/semantic_model.py b/core/dbt/artifacts/resources/v1/semantic_model.py new file mode 100644 index 00000000000..3e021b2f469 --- /dev/null +++ b/core/dbt/artifacts/resources/v1/semantic_model.py @@ -0,0 +1,272 @@ +import time +from dataclasses import dataclass, field +from typing import Any, Dict, List, Optional, Sequence + +from dbt.artifacts.resources import SourceFileMetadata +from dbt.artifacts.resources.base import GraphResource +from dbt.artifacts.resources.v1.components import DependsOn, RefArgs +from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior +from dbt_common.dataclass_schema import dbtClassMixin +from dbt_semantic_interfaces.references import ( + DimensionReference, + EntityReference, + LinkableElementReference, + MeasureReference, + SemanticModelReference, + TimeDimensionReference, +) +from dbt_semantic_interfaces.type_enums import ( + AggregationType, + DimensionType, + EntityType, + TimeGranularity, +) + +""" +The classes in this file are dataclasses which are used to construct the Semantic +Model node in dbt-core. Additionally, these classes need to at a minimum support +what is specified in their protocol definitions in dbt-semantic-interfaces. +Their protocol definitions can be found here: +https://github.com/dbt-labs/dbt-semantic-interfaces/blob/main/dbt_semantic_interfaces/protocols/semantic_model.py +""" + + +@dataclass +class Defaults(dbtClassMixin): + agg_time_dimension: Optional[str] = None + + +@dataclass +class NodeRelation(dbtClassMixin): + alias: str + schema_name: str # TODO: Could this be called simply "schema" so we could reuse StateRelation? + database: Optional[str] = None + relation_name: Optional[str] = "" + + +# ==================================== +# Dimension objects +# Dimension protocols: https://github.com/dbt-labs/dbt-semantic-interfaces/blob/main/dbt_semantic_interfaces/protocols/dimension.py +# ==================================== + + +@dataclass +class DimensionValidityParams(dbtClassMixin): + is_start: bool = False + is_end: bool = False + + +@dataclass +class DimensionTypeParams(dbtClassMixin): + time_granularity: TimeGranularity + validity_params: Optional[DimensionValidityParams] = None + + +@dataclass +class Dimension(dbtClassMixin): + name: str + type: DimensionType + description: Optional[str] = None + label: Optional[str] = None + is_partition: bool = False + type_params: Optional[DimensionTypeParams] = None + expr: Optional[str] = None + metadata: Optional[SourceFileMetadata] = None + + @property + def reference(self) -> DimensionReference: + return DimensionReference(element_name=self.name) + + @property + def time_dimension_reference(self) -> Optional[TimeDimensionReference]: + if self.type == DimensionType.TIME: + return TimeDimensionReference(element_name=self.name) + else: + return None + + @property + def validity_params(self) -> Optional[DimensionValidityParams]: + if self.type_params: + return self.type_params.validity_params + else: + return None + + +# ==================================== +# Entity objects +# Entity protocols: https://github.com/dbt-labs/dbt-semantic-interfaces/blob/main/dbt_semantic_interfaces/protocols/entity.py +# ==================================== + + +@dataclass +class Entity(dbtClassMixin): + name: str + type: EntityType + description: Optional[str] = None + label: Optional[str] = None + role: Optional[str] = None + expr: Optional[str] = None + + @property + def reference(self) -> EntityReference: + return EntityReference(element_name=self.name) + + @property + def is_linkable_entity_type(self) -> bool: + return self.type in (EntityType.PRIMARY, EntityType.UNIQUE, EntityType.NATURAL) + + +# ==================================== +# Measure objects +# Measure protocols: https://github.com/dbt-labs/dbt-semantic-interfaces/blob/main/dbt_semantic_interfaces/protocols/measure.py +# ==================================== + + +@dataclass +class MeasureAggregationParameters(dbtClassMixin): + percentile: Optional[float] = None + use_discrete_percentile: bool = False + use_approximate_percentile: bool = False + + +@dataclass +class NonAdditiveDimension(dbtClassMixin): + name: str + window_choice: AggregationType + window_groupings: List[str] + + +@dataclass +class Measure(dbtClassMixin): + name: str + agg: AggregationType + description: Optional[str] = None + label: Optional[str] = None + create_metric: bool = False + expr: Optional[str] = None + agg_params: Optional[MeasureAggregationParameters] = None + non_additive_dimension: Optional[NonAdditiveDimension] = None + agg_time_dimension: Optional[str] = None + + @property + def reference(self) -> MeasureReference: + return MeasureReference(element_name=self.name) + + +# ==================================== +# SemanticModel final parts +# ==================================== + + +@dataclass +class SemanticModelConfig(BaseConfig): + enabled: bool = True + group: Optional[str] = field( + default=None, + metadata=CompareBehavior.Exclude.meta(), + ) + meta: Dict[str, Any] = field( + default_factory=dict, + metadata=MergeBehavior.Update.meta(), + ) + + +@dataclass +class SemanticModel(GraphResource): + model: str + node_relation: Optional[NodeRelation] + description: Optional[str] = None + label: Optional[str] = None + defaults: Optional[Defaults] = None + entities: Sequence[Entity] = field(default_factory=list) + measures: Sequence[Measure] = field(default_factory=list) + dimensions: Sequence[Dimension] = field(default_factory=list) + metadata: Optional[SourceFileMetadata] = None + depends_on: DependsOn = field(default_factory=DependsOn) + refs: List[RefArgs] = field(default_factory=list) + created_at: float = field(default_factory=lambda: time.time()) + config: SemanticModelConfig = field(default_factory=SemanticModelConfig) + unrendered_config: Dict[str, Any] = field(default_factory=dict) + primary_entity: Optional[str] = None + group: Optional[str] = None + + @property + def entity_references(self) -> List[LinkableElementReference]: + return [entity.reference for entity in self.entities] + + @property + def dimension_references(self) -> List[LinkableElementReference]: + return [dimension.reference for dimension in self.dimensions] + + @property + def measure_references(self) -> List[MeasureReference]: + return [measure.reference for measure in self.measures] + + @property + def has_validity_dimensions(self) -> bool: + return any([dim.validity_params is not None for dim in self.dimensions]) + + @property + def validity_start_dimension(self) -> Optional[Dimension]: + validity_start_dims = [ + dim for dim in self.dimensions if dim.validity_params and dim.validity_params.is_start + ] + if not validity_start_dims: + return None + return validity_start_dims[0] + + @property + def validity_end_dimension(self) -> Optional[Dimension]: + validity_end_dims = [ + dim for dim in self.dimensions if dim.validity_params and dim.validity_params.is_end + ] + if not validity_end_dims: + return None + return validity_end_dims[0] + + @property + def partitions(self) -> List[Dimension]: # noqa: D + return [dim for dim in self.dimensions or [] if dim.is_partition] + + @property + def partition(self) -> Optional[Dimension]: + partitions = self.partitions + if not partitions: + return None + return partitions[0] + + @property + def reference(self) -> SemanticModelReference: + return SemanticModelReference(semantic_model_name=self.name) + + def checked_agg_time_dimension_for_measure( + self, measure_reference: MeasureReference + ) -> TimeDimensionReference: + measure: Optional[Measure] = None + for measure in self.measures: + if measure.reference == measure_reference: + measure = measure + + assert ( + measure is not None + ), f"No measure with name ({measure_reference.element_name}) in semantic_model with name ({self.name})" + + default_agg_time_dimension = ( + self.defaults.agg_time_dimension if self.defaults is not None else None + ) + + agg_time_dimension_name = measure.agg_time_dimension or default_agg_time_dimension + assert agg_time_dimension_name is not None, ( + f"Aggregation time dimension for measure {measure.name} on semantic model {self.name} is not set! " + "To fix this either specify a default `agg_time_dimension` for the semantic model or define an " + "`agg_time_dimension` on the measure directly." + ) + return TimeDimensionReference(element_name=agg_time_dimension_name) + + @property + def primary_entity_reference(self) -> Optional[EntityReference]: + return ( + EntityReference(element_name=self.primary_entity) + if self.primary_entity is not None + else None + ) diff --git a/core/dbt/artifacts/resources/v1/singular_test.py b/core/dbt/artifacts/resources/v1/singular_test.py new file mode 100644 index 00000000000..3b025d79e11 --- /dev/null +++ b/core/dbt/artifacts/resources/v1/singular_test.py @@ -0,0 +1,14 @@ +from dataclasses import dataclass, field +from typing import Literal + +from dbt.artifacts.resources.types import NodeType +from dbt.artifacts.resources.v1.components import CompiledResource +from dbt.artifacts.resources.v1.config import TestConfig + + +@dataclass +class SingularTest(CompiledResource): + resource_type: Literal[NodeType.Test] + # Was not able to make mypy happy and keep the code working. We need to + # refactor the various configs. + config: TestConfig = field(default_factory=TestConfig) # type: ignore diff --git a/core/dbt/artifacts/resources/v1/snapshot.py b/core/dbt/artifacts/resources/v1/snapshot.py new file mode 100644 index 00000000000..464d94bae69 --- /dev/null +++ b/core/dbt/artifacts/resources/v1/snapshot.py @@ -0,0 +1,88 @@ +from dataclasses import dataclass, field +from typing import Dict, List, Literal, Optional, Union + +from dbt.artifacts.resources.types import NodeType +from dbt.artifacts.resources.v1.components import CompiledResource, DeferRelation +from dbt.artifacts.resources.v1.config import NodeConfig +from dbt_common.dataclass_schema import ValidationError, dbtClassMixin + + +@dataclass +class SnapshotMetaColumnNames(dbtClassMixin): + dbt_valid_to: Optional[str] = None + dbt_valid_from: Optional[str] = None + dbt_scd_id: Optional[str] = None + dbt_updated_at: Optional[str] = None + + +@dataclass +class SnapshotConfig(NodeConfig): + materialized: str = "snapshot" + strategy: Optional[str] = None + unique_key: Optional[str] = None + target_schema: Optional[str] = None + target_database: Optional[str] = None + updated_at: Optional[str] = None + # Not using Optional because of serialization issues with a Union of str and List[str] + check_cols: Union[str, List[str], None] = None + snapshot_meta_column_names: SnapshotMetaColumnNames = field( + default_factory=SnapshotMetaColumnNames + ) + + @property + def snapshot_table_column_names(self): + return { + "dbt_valid_from": self.snapshot_meta_column_names.dbt_valid_from or "dbt_valid_from", + "dbt_valid_to": self.snapshot_meta_column_names.dbt_valid_to or "dbt_valid_to", + "dbt_scd_id": self.snapshot_meta_column_names.dbt_scd_id or "dbt_scd_id", + "dbt_updated_at": self.snapshot_meta_column_names.dbt_updated_at or "dbt_updated_at", + } + + def final_validate(self): + if not self.strategy or not self.unique_key: + raise ValidationError( + "Snapshots must be configured with a 'strategy' and 'unique_key'." + ) + if self.strategy == "check": + if not self.check_cols: + raise ValidationError( + "A snapshot configured with the check strategy must " + "specify a check_cols configuration." + ) + if isinstance(self.check_cols, str) and self.check_cols != "all": + raise ValidationError( + f"Invalid value for 'check_cols': {self.check_cols}. " + "Expected 'all' or a list of strings." + ) + elif self.strategy == "timestamp": + if not self.updated_at: + raise ValidationError( + "A snapshot configured with the timestamp strategy " + "must specify an updated_at configuration." + ) + if self.check_cols: + raise ValidationError("A 'timestamp' snapshot should not have 'check_cols'") + # If the strategy is not 'check' or 'timestamp' it's a custom strategy, + # formerly supported with GenericSnapshotConfig + + if self.materialized and self.materialized != "snapshot": + raise ValidationError("A snapshot must have a materialized value of 'snapshot'") + + # Called by "calculate_node_config_dict" in ContextConfigGenerator + def finalize_and_validate(self): + data = self.to_dict(omit_none=True) + self.validate(data) + return self.from_dict(data) + + +@dataclass +class Snapshot(CompiledResource): + resource_type: Literal[NodeType.Snapshot] + config: SnapshotConfig + defer_relation: Optional[DeferRelation] = None + + def __post_serialize__(self, dct, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) + if context and context.get("artifact") and "defer_relation" in dct: + del dct["defer_relation"] + return dct diff --git a/core/dbt/artifacts/resources/v1/source_definition.py b/core/dbt/artifacts/resources/v1/source_definition.py new file mode 100644 index 00000000000..6c1c3679a00 --- /dev/null +++ b/core/dbt/artifacts/resources/v1/source_definition.py @@ -0,0 +1,73 @@ +import time +from dataclasses import dataclass, field +from typing import Any, Dict, List, Literal, Optional, Union + +from dbt.artifacts.resources.base import GraphResource +from dbt.artifacts.resources.types import NodeType +from dbt.artifacts.resources.v1.components import ( + ColumnInfo, + FreshnessThreshold, + HasRelationMetadata, + Quoting, +) +from dbt.artifacts.resources.v1.config import BaseConfig +from dbt_common.contracts.config.properties import AdditionalPropertiesAllowed +from dbt_common.contracts.util import Mergeable +from dbt_common.exceptions import CompilationError + + +@dataclass +class SourceConfig(BaseConfig): + enabled: bool = True + event_time: Any = None + + +@dataclass +class ExternalPartition(AdditionalPropertiesAllowed): + name: str = "" + description: str = "" + data_type: str = "" + meta: Dict[str, Any] = field(default_factory=dict) + + def __post_init__(self): + if self.name == "" or self.data_type == "": + raise CompilationError("External partition columns must have names and data types") + + +@dataclass +class ExternalTable(AdditionalPropertiesAllowed, Mergeable): + location: Optional[str] = None + file_format: Optional[str] = None + row_format: Optional[str] = None + tbl_properties: Optional[str] = None + partitions: Optional[Union[List[str], List[ExternalPartition]]] = None + + def __bool__(self): + return self.location is not None + + +@dataclass +class ParsedSourceMandatory(GraphResource, HasRelationMetadata): + source_name: str + source_description: str + loader: str + identifier: str + resource_type: Literal[NodeType.Source] + + +@dataclass +class SourceDefinition(ParsedSourceMandatory): + quoting: Quoting = field(default_factory=Quoting) + loaded_at_field: Optional[str] = None + freshness: Optional[FreshnessThreshold] = None + external: Optional[ExternalTable] = None + description: str = "" + columns: Dict[str, ColumnInfo] = field(default_factory=dict) + meta: Dict[str, Any] = field(default_factory=dict) + source_meta: Dict[str, Any] = field(default_factory=dict) + tags: List[str] = field(default_factory=list) + config: SourceConfig = field(default_factory=SourceConfig) + patch_path: Optional[str] = None + unrendered_config: Dict[str, Any] = field(default_factory=dict) + relation_name: Optional[str] = None + created_at: float = field(default_factory=lambda: time.time()) diff --git a/core/dbt/artifacts/resources/v1/sql_operation.py b/core/dbt/artifacts/resources/v1/sql_operation.py new file mode 100644 index 00000000000..f669471f1dd --- /dev/null +++ b/core/dbt/artifacts/resources/v1/sql_operation.py @@ -0,0 +1,10 @@ +from dataclasses import dataclass +from typing import Literal + +from dbt.artifacts.resources.types import NodeType +from dbt.artifacts.resources.v1.components import CompiledResource + + +@dataclass +class SqlOperation(CompiledResource): + resource_type: Literal[NodeType.SqlOperation] diff --git a/core/dbt/artifacts/resources/v1/unit_test_definition.py b/core/dbt/artifacts/resources/v1/unit_test_definition.py new file mode 100644 index 00000000000..5c18538a733 --- /dev/null +++ b/core/dbt/artifacts/resources/v1/unit_test_definition.py @@ -0,0 +1,76 @@ +import time +from dataclasses import dataclass, field +from typing import Any, Dict, List, Optional, Sequence, Union + +from dbt.artifacts.resources import DependsOn, NodeVersion +from dbt.artifacts.resources.base import GraphResource +from dbt.artifacts.resources.v1.config import list_str, metas +from dbt_common.contracts.config.base import BaseConfig, CompareBehavior, MergeBehavior +from dbt_common.contracts.config.metadata import ShowBehavior +from dbt_common.dataclass_schema import StrEnum, dbtClassMixin + + +@dataclass +class UnitTestConfig(BaseConfig): + tags: Union[str, List[str]] = field( + default_factory=list_str, + metadata=metas(ShowBehavior.Hide, MergeBehavior.Append, CompareBehavior.Exclude), + ) + meta: Dict[str, Any] = field( + default_factory=dict, + metadata=MergeBehavior.Update.meta(), + ) + + +class UnitTestFormat(StrEnum): + CSV = "csv" + Dict = "dict" + SQL = "sql" + + +@dataclass +class UnitTestInputFixture(dbtClassMixin): + input: str + rows: Optional[Union[str, List[Dict[str, Any]]]] = None + format: UnitTestFormat = UnitTestFormat.Dict + fixture: Optional[str] = None + + +@dataclass +class UnitTestOverrides(dbtClassMixin): + macros: Dict[str, Any] = field(default_factory=dict) + vars: Dict[str, Any] = field(default_factory=dict) + env_vars: Dict[str, Any] = field(default_factory=dict) + + +@dataclass +class UnitTestNodeVersions(dbtClassMixin): + include: Optional[List[NodeVersion]] = None + exclude: Optional[List[NodeVersion]] = None + + +@dataclass +class UnitTestOutputFixture(dbtClassMixin): + rows: Optional[Union[str, List[Dict[str, Any]]]] = None + format: UnitTestFormat = UnitTestFormat.Dict + fixture: Optional[str] = None + + +@dataclass +class UnitTestDefinitionMandatory: + model: str + given: Sequence[UnitTestInputFixture] + expect: UnitTestOutputFixture + + +@dataclass +class UnitTestDefinition(GraphResource, UnitTestDefinitionMandatory): + description: str = "" + overrides: Optional[UnitTestOverrides] = None + depends_on: DependsOn = field(default_factory=DependsOn) + config: UnitTestConfig = field(default_factory=UnitTestConfig) + checksum: Optional[str] = None + schema: Optional[str] = None + created_at: float = field(default_factory=lambda: time.time()) + versions: Optional[UnitTestNodeVersions] = None + version: Optional[NodeVersion] = None diff --git a/core/dbt/task/parse.py b/core/dbt/artifacts/schemas/__init__.py similarity index 100% rename from core/dbt/task/parse.py rename to core/dbt/artifacts/schemas/__init__.py diff --git a/core/dbt/artifacts/schemas/base.py b/core/dbt/artifacts/schemas/base.py new file mode 100644 index 00000000000..6cca031603e --- /dev/null +++ b/core/dbt/artifacts/schemas/base.py @@ -0,0 +1,178 @@ +import dataclasses +import functools +from datetime import datetime +from typing import Any, ClassVar, Dict, Optional, Type, TypeVar + +from mashumaro.jsonschema import build_json_schema +from mashumaro.jsonschema.dialects import DRAFT_2020_12 + +from dbt.artifacts.exceptions import IncompatibleSchemaError +from dbt.version import __version__ +from dbt_common.clients.system import read_json, write_json +from dbt_common.dataclass_schema import dbtClassMixin +from dbt_common.events.functions import get_metadata_vars +from dbt_common.exceptions import DbtInternalError, DbtRuntimeError +from dbt_common.invocation import get_invocation_id + +BASE_SCHEMAS_URL = "https://schemas.getdbt.com/" +SCHEMA_PATH = "dbt/{name}/v{version}.json" + + +@dataclasses.dataclass +class SchemaVersion: + name: str + version: int + + @property + def path(self) -> str: + return SCHEMA_PATH.format(name=self.name, version=self.version) + + def __str__(self) -> str: + return BASE_SCHEMAS_URL + self.path + + +class Writable: + def write(self, path: str): + write_json(path, self.to_dict(omit_none=False, context={"artifact": True})) # type: ignore + + +class Readable: + @classmethod + def read(cls, path: str): + try: + data = read_json(path) + except (EnvironmentError, ValueError) as exc: + raise DbtRuntimeError( + f'Could not read {cls.__name__} at "{path}" as JSON: {exc}' + ) from exc + + return cls.from_dict(data) # type: ignore + + +# This is used in the ManifestMetadata, RunResultsMetadata, RunOperationResultMetadata, +# FreshnessMetadata, and CatalogMetadata classes +@dataclasses.dataclass +class BaseArtifactMetadata(dbtClassMixin): + dbt_schema_version: str + dbt_version: str = __version__ + generated_at: datetime = dataclasses.field(default_factory=datetime.utcnow) + invocation_id: Optional[str] = dataclasses.field(default_factory=get_invocation_id) + env: Dict[str, str] = dataclasses.field(default_factory=get_metadata_vars) + + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) + if dct["generated_at"] and dct["generated_at"].endswith("+00:00"): + dct["generated_at"] = dct["generated_at"].replace("+00:00", "") + "Z" + return dct + + +# This is used as a class decorator to set the schema_version in the +# 'dbt_schema_version' class attribute. (It's copied into the metadata objects.) +# Name attributes of SchemaVersion in classes with the 'schema_version' decorator: +# manifest +# run-results +# run-operation-result +# sources +# catalog +# remote-compile-result +# remote-execution-result +# remote-run-result +S = TypeVar("S", bound="VersionedSchema") + + +def schema_version(name: str, version: int): + def inner(cls: Type[S]): + cls.dbt_schema_version = SchemaVersion( + name=name, + version=version, + ) + return cls + + return inner + + +# This is used in the ArtifactMixin and RemoteCompileResultMixin classes +@dataclasses.dataclass +class VersionedSchema(dbtClassMixin): + dbt_schema_version: ClassVar[SchemaVersion] + + @classmethod + @functools.lru_cache + def json_schema(cls) -> Dict[str, Any]: + json_schema_obj = build_json_schema(cls, dialect=DRAFT_2020_12, with_dialect_uri=True) + json_schema = json_schema_obj.to_dict() + json_schema["$id"] = str(cls.dbt_schema_version) + return json_schema + + @classmethod + def is_compatible_version(cls, schema_version): + compatible_versions = [str(cls.dbt_schema_version)] + if hasattr(cls, "compatible_previous_versions"): + for name, version in cls.compatible_previous_versions(): + compatible_versions.append(str(SchemaVersion(name, version))) + return str(schema_version) in compatible_versions + + @classmethod + def read_and_check_versions(cls, path: str): + try: + data = read_json(path) + except (EnvironmentError, ValueError) as exc: + raise DbtRuntimeError( + f'Could not read {cls.__name__} at "{path}" as JSON: {exc}' + ) from exc + + # Check metadata version. There is a class variable 'dbt_schema_version', but + # that doesn't show up in artifacts, where it only exists in the 'metadata' + # dictionary. + if hasattr(cls, "dbt_schema_version"): + if "metadata" in data and "dbt_schema_version" in data["metadata"]: + previous_schema_version = data["metadata"]["dbt_schema_version"] + # cls.dbt_schema_version is a SchemaVersion object + if not cls.is_compatible_version(previous_schema_version): + raise IncompatibleSchemaError( + expected=str(cls.dbt_schema_version), + found=previous_schema_version, + ) + + return cls.upgrade_schema_version(data) + + @classmethod + def upgrade_schema_version(cls, data): + """This will modify the data (dictionary) passed in to match the current + artifact schema code, if necessary. This is the default method, which + just returns the instantiated object via from_dict.""" + return cls.from_dict(data) + + +T = TypeVar("T", bound="ArtifactMixin") + + +# metadata should really be a Generic[T_M] where T_M is a TypeVar bound to +# BaseArtifactMetadata. Unfortunately this isn't possible due to a mypy issue: +# https://github.com/python/mypy/issues/7520 +# This is used in the WritableManifest, RunResultsArtifact, RunOperationResultsArtifact, +# and CatalogArtifact +@dataclasses.dataclass(init=False) +class ArtifactMixin(VersionedSchema, Writable, Readable): + metadata: BaseArtifactMetadata + + @classmethod + def validate(cls, data): + super().validate(data) + if cls.dbt_schema_version is None: + raise DbtInternalError("Cannot call from_dict with no schema version!") + + +def get_artifact_schema_version(dct: dict) -> int: + schema_version = dct.get("metadata", {}).get("dbt_schema_version", None) + if not schema_version: + raise ValueError("Artifact is missing schema version") + + # schema_version is in this format: https://schemas.getdbt.com/dbt/manifest/v10.json + # What the code below is doing: + # 1. Split on "/" – v10.json + # 2. Split on "." – v10 + # 3. Skip first character – 10 + # 4. Convert to int + # TODO: If this gets more complicated, turn into a regex + return int(schema_version.split("/")[-1].split(".")[0][1:]) diff --git a/core/dbt/artifacts/schemas/catalog/__init__.py b/core/dbt/artifacts/schemas/catalog/__init__.py new file mode 100644 index 00000000000..c134931c2d7 --- /dev/null +++ b/core/dbt/artifacts/schemas/catalog/__init__.py @@ -0,0 +1,11 @@ +# alias to latest +from dbt.artifacts.schemas.catalog.v1.catalog import * # noqa +from dbt_common.contracts.metadata import ( + CatalogKey, + CatalogTable, + ColumnMap, + ColumnMetadata, + StatsDict, + StatsItem, + TableMetadata, +) diff --git a/plugins/postgres/dbt/include/postgres/macros/utils/columns_spec_ddl.sql b/core/dbt/artifacts/schemas/catalog/v1/__init__.py similarity index 100% rename from plugins/postgres/dbt/include/postgres/macros/utils/columns_spec_ddl.sql rename to core/dbt/artifacts/schemas/catalog/v1/__init__.py diff --git a/core/dbt/artifacts/schemas/catalog/v1/catalog.py b/core/dbt/artifacts/schemas/catalog/v1/catalog.py new file mode 100644 index 00000000000..b210920e5d5 --- /dev/null +++ b/core/dbt/artifacts/schemas/catalog/v1/catalog.py @@ -0,0 +1,59 @@ +from dataclasses import dataclass, field +from datetime import datetime +from typing import Any, Dict, List, Optional, Union + +from dbt.artifacts.schemas.base import ( + ArtifactMixin, + BaseArtifactMetadata, + schema_version, +) +from dbt_common.contracts.metadata import CatalogTable +from dbt_common.dataclass_schema import dbtClassMixin + +Primitive = Union[bool, str, float, None] +PrimitiveDict = Dict[str, Primitive] + + +@dataclass +class CatalogMetadata(BaseArtifactMetadata): + dbt_schema_version: str = field( + default_factory=lambda: str(CatalogArtifact.dbt_schema_version) + ) + + +@dataclass +class CatalogResults(dbtClassMixin): + nodes: Dict[str, CatalogTable] + sources: Dict[str, CatalogTable] + errors: Optional[List[str]] = None + _compile_results: Optional[Any] = None + + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) + if "_compile_results" in dct: + del dct["_compile_results"] + return dct + + +@dataclass +@schema_version("catalog", 1) +class CatalogArtifact(CatalogResults, ArtifactMixin): + metadata: CatalogMetadata + + @classmethod + def from_results( + cls, + generated_at: datetime, + nodes: Dict[str, CatalogTable], + sources: Dict[str, CatalogTable], + compile_results: Optional[Any], + errors: Optional[List[str]], + ) -> "CatalogArtifact": + meta = CatalogMetadata(generated_at=generated_at) + return cls( + metadata=meta, + nodes=nodes, + sources=sources, + errors=errors, + _compile_results=compile_results, + ) diff --git a/core/dbt/artifacts/schemas/freshness/__init__.py b/core/dbt/artifacts/schemas/freshness/__init__.py new file mode 100644 index 00000000000..4d04d4acf97 --- /dev/null +++ b/core/dbt/artifacts/schemas/freshness/__init__.py @@ -0,0 +1 @@ +from dbt.artifacts.schemas.freshness.v3.freshness import * # noqa diff --git a/tests/unit/test_compiler.py b/core/dbt/artifacts/schemas/freshness/v3/__init__.py similarity index 100% rename from tests/unit/test_compiler.py rename to core/dbt/artifacts/schemas/freshness/v3/__init__.py diff --git a/core/dbt/artifacts/schemas/freshness/v3/freshness.py b/core/dbt/artifacts/schemas/freshness/v3/freshness.py new file mode 100644 index 00000000000..fe489994268 --- /dev/null +++ b/core/dbt/artifacts/schemas/freshness/v3/freshness.py @@ -0,0 +1,158 @@ +from dataclasses import dataclass, field +from datetime import datetime +from typing import Any, Dict, List, Optional, Sequence, Union + +from dbt.artifacts.resources import FreshnessThreshold +from dbt.artifacts.schemas.base import ( + ArtifactMixin, + BaseArtifactMetadata, + VersionedSchema, + schema_version, +) +from dbt.artifacts.schemas.results import ( + ExecutionResult, + FreshnessStatus, + NodeResult, + TimingInfo, +) +from dbt.contracts.graph.nodes import SourceDefinition +from dbt_common.dataclass_schema import StrEnum, dbtClassMixin +from dbt_common.exceptions import DbtInternalError + + +@dataclass +class SourceFreshnessResult(NodeResult): + node: SourceDefinition + status: FreshnessStatus + max_loaded_at: datetime + snapshotted_at: datetime + age: float + + @property + def skipped(self): + return False + + +@dataclass +class PartialSourceFreshnessResult(NodeResult): + status: FreshnessStatus + + @property + def skipped(self): + return False + + +FreshnessNodeResult = Union[PartialSourceFreshnessResult, SourceFreshnessResult] + + +@dataclass +class FreshnessMetadata(BaseArtifactMetadata): + dbt_schema_version: str = field( + default_factory=lambda: str(FreshnessExecutionResultArtifact.dbt_schema_version) + ) + + +@dataclass +class FreshnessResult(ExecutionResult): + metadata: FreshnessMetadata + results: Sequence[FreshnessNodeResult] + + @classmethod + def from_node_results( + cls, + results: List[FreshnessNodeResult], + elapsed_time: float, + generated_at: datetime, + ): + meta = FreshnessMetadata(generated_at=generated_at) + return cls(metadata=meta, results=results, elapsed_time=elapsed_time) + + def write(self, path): + FreshnessExecutionResultArtifact.from_result(self).write(path) + + +@dataclass +class SourceFreshnessOutput(dbtClassMixin): + unique_id: str + max_loaded_at: datetime + snapshotted_at: datetime + max_loaded_at_time_ago_in_s: float + status: FreshnessStatus + criteria: FreshnessThreshold + adapter_response: Dict[str, Any] + timing: List[TimingInfo] + thread_id: str + execution_time: float + + +class FreshnessErrorEnum(StrEnum): + runtime_error = "runtime error" + + +@dataclass +class SourceFreshnessRuntimeError(dbtClassMixin): + unique_id: str + error: Optional[Union[str, int]] + status: FreshnessErrorEnum + + +FreshnessNodeOutput = Union[SourceFreshnessRuntimeError, SourceFreshnessOutput] + + +@dataclass +@schema_version("sources", 3) +class FreshnessExecutionResultArtifact( + ArtifactMixin, + VersionedSchema, +): + metadata: FreshnessMetadata + results: Sequence[FreshnessNodeOutput] + elapsed_time: float + + @classmethod + def from_result(cls, base: FreshnessResult): + processed = [ + process_freshness_result(r) + for r in base.results + if isinstance(r, SourceFreshnessResult) + ] + return cls( + metadata=base.metadata, + results=processed, + elapsed_time=base.elapsed_time, + ) + + +def process_freshness_result(result: FreshnessNodeResult) -> FreshnessNodeOutput: + unique_id = result.node.unique_id + if result.status == FreshnessStatus.RuntimeErr: + return SourceFreshnessRuntimeError( + unique_id=unique_id, + error=result.message, + status=FreshnessErrorEnum.runtime_error, + ) + + # we know that this must be a SourceFreshnessResult + if not isinstance(result, SourceFreshnessResult): + raise DbtInternalError( + "Got {} instead of a SourceFreshnessResult for a " + "non-error result in freshness execution!".format(type(result)) + ) + # if we're here, we must have a non-None freshness threshold + criteria = result.node.freshness + if criteria is None: + raise DbtInternalError( + "Somehow evaluated a freshness result for a source that has no freshness criteria!" + ) + return SourceFreshnessOutput( + unique_id=unique_id, + max_loaded_at=result.max_loaded_at, + snapshotted_at=result.snapshotted_at, + max_loaded_at_time_ago_in_s=result.age, + status=result.status, + criteria=criteria, + adapter_response=result.adapter_response, + timing=result.timing, + thread_id=result.thread_id, + execution_time=result.execution_time, + ) diff --git a/core/dbt/artifacts/schemas/manifest/__init__.py b/core/dbt/artifacts/schemas/manifest/__init__.py new file mode 100644 index 00000000000..904f952cff1 --- /dev/null +++ b/core/dbt/artifacts/schemas/manifest/__init__.py @@ -0,0 +1,2 @@ +# alias to latest +from dbt.artifacts.schemas.manifest.v12.manifest import * # noqa diff --git a/third-party-stubs/mashumaro/dialects/__init__.pyi b/core/dbt/artifacts/schemas/manifest/v12/__init__.py similarity index 100% rename from third-party-stubs/mashumaro/dialects/__init__.pyi rename to core/dbt/artifacts/schemas/manifest/v12/__init__.py diff --git a/core/dbt/artifacts/schemas/manifest/v12/manifest.py b/core/dbt/artifacts/schemas/manifest/v12/manifest.py new file mode 100644 index 00000000000..cc13fca43f5 --- /dev/null +++ b/core/dbt/artifacts/schemas/manifest/v12/manifest.py @@ -0,0 +1,182 @@ +from dataclasses import dataclass, field +from typing import Any, Dict, Iterable, List, Mapping, Optional, Tuple, Union +from uuid import UUID + +from dbt.artifacts.resources import ( + Analysis, + Documentation, + Exposure, + GenericTest, + Group, + HookNode, + Macro, + Metric, + Model, + SavedQuery, + Seed, + SemanticModel, + SingularTest, + Snapshot, + SourceDefinition, + SqlOperation, + UnitTestDefinition, +) +from dbt.artifacts.schemas.base import ( + ArtifactMixin, + BaseArtifactMetadata, + get_artifact_schema_version, + schema_version, +) +from dbt.artifacts.schemas.upgrades import upgrade_manifest_json + +NodeEdgeMap = Dict[str, List[str]] +UniqueID = str +ManifestResource = Union[ + Seed, + Analysis, + SingularTest, + HookNode, + Model, + SqlOperation, + GenericTest, + Snapshot, +] +DisabledManifestResource = Union[ + ManifestResource, + SourceDefinition, + Exposure, + Metric, + SavedQuery, + SemanticModel, + UnitTestDefinition, +] + + +@dataclass +class ManifestMetadata(BaseArtifactMetadata): + """Metadata for the manifest.""" + + dbt_schema_version: str = field( + default_factory=lambda: str(WritableManifest.dbt_schema_version) + ) + project_name: Optional[str] = field( + default=None, + metadata={ + "description": "Name of the root project", + }, + ) + project_id: Optional[str] = field( + default=None, + metadata={ + "description": "A unique identifier for the project, hashed from the project name", + }, + ) + user_id: Optional[UUID] = field( + default=None, + metadata={ + "description": "A unique identifier for the user", + }, + ) + send_anonymous_usage_stats: Optional[bool] = field( + default=None, + metadata=dict( + description=("Whether dbt is configured to send anonymous usage statistics") + ), + ) + adapter_type: Optional[str] = field( + default=None, + metadata=dict(description="The type name of the adapter"), + ) + + @classmethod + def default(cls): + return cls( + dbt_schema_version=str(WritableManifest.dbt_schema_version), + ) + + +@dataclass +@schema_version("manifest", 12) +class WritableManifest(ArtifactMixin): + nodes: Mapping[UniqueID, ManifestResource] = field( + metadata=dict(description=("The nodes defined in the dbt project and its dependencies")) + ) + sources: Mapping[UniqueID, SourceDefinition] = field( + metadata=dict(description=("The sources defined in the dbt project and its dependencies")) + ) + macros: Mapping[UniqueID, Macro] = field( + metadata=dict(description=("The macros defined in the dbt project and its dependencies")) + ) + docs: Mapping[UniqueID, Documentation] = field( + metadata=dict(description=("The docs defined in the dbt project and its dependencies")) + ) + exposures: Mapping[UniqueID, Exposure] = field( + metadata=dict( + description=("The exposures defined in the dbt project and its dependencies") + ) + ) + metrics: Mapping[UniqueID, Metric] = field( + metadata=dict(description=("The metrics defined in the dbt project and its dependencies")) + ) + groups: Mapping[UniqueID, Group] = field( + metadata=dict(description=("The groups defined in the dbt project")) + ) + selectors: Mapping[UniqueID, Any] = field( + metadata=dict(description=("The selectors defined in selectors.yml")) + ) + disabled: Optional[Mapping[UniqueID, List[DisabledManifestResource]]] = field( + metadata=dict(description="A mapping of the disabled nodes in the target") + ) + parent_map: Optional[NodeEdgeMap] = field( + metadata=dict( + description="A mapping from child nodes to their dependencies", + ) + ) + child_map: Optional[NodeEdgeMap] = field( + metadata=dict( + description="A mapping from parent nodes to their dependents", + ) + ) + group_map: Optional[NodeEdgeMap] = field( + metadata=dict( + description="A mapping from group names to their nodes", + ) + ) + saved_queries: Mapping[UniqueID, SavedQuery] = field( + metadata=dict(description=("The saved queries defined in the dbt project")) + ) + semantic_models: Mapping[UniqueID, SemanticModel] = field( + metadata=dict(description=("The semantic models defined in the dbt project")) + ) + metadata: ManifestMetadata = field( + metadata=dict( + description="Metadata about the manifest", + ) + ) + unit_tests: Mapping[UniqueID, UnitTestDefinition] = field( + metadata=dict( + description="The unit tests defined in the project", + ) + ) + + @classmethod + def compatible_previous_versions(cls) -> Iterable[Tuple[str, int]]: + return [ + ("manifest", 4), + ("manifest", 5), + ("manifest", 6), + ("manifest", 7), + ("manifest", 8), + ("manifest", 9), + ("manifest", 10), + ("manifest", 11), + ] + + @classmethod + def upgrade_schema_version(cls, data): + """This overrides the "upgrade_schema_version" call in VersionedSchema (via + ArtifactMixin) to modify the dictionary passed in from earlier versions of the manifest.""" + manifest_schema_version = get_artifact_schema_version(data) + if manifest_schema_version < cls.dbt_schema_version.version: + data = upgrade_manifest_json(data, manifest_schema_version) + return cls.from_dict(data) diff --git a/core/dbt/artifacts/schemas/results.py b/core/dbt/artifacts/schemas/results.py new file mode 100644 index 00000000000..f3f830ca927 --- /dev/null +++ b/core/dbt/artifacts/schemas/results.py @@ -0,0 +1,137 @@ +from dataclasses import dataclass +from datetime import datetime +from typing import Any, Callable, Dict, List, Optional, Sequence, Union + +from dbt.contracts.graph.nodes import ResultNode +from dbt_common.dataclass_schema import StrEnum, dbtClassMixin +from dbt_common.events.helpers import datetime_to_json_string +from dbt_common.utils import cast_to_int, cast_to_str + + +@dataclass +class TimingInfo(dbtClassMixin): + name: str + started_at: Optional[datetime] = None + completed_at: Optional[datetime] = None + + def begin(self): + self.started_at = datetime.utcnow() + + def end(self): + self.completed_at = datetime.utcnow() + + def to_msg_dict(self): + msg_dict = {"name": self.name} + if self.started_at: + msg_dict["started_at"] = datetime_to_json_string(self.started_at) + if self.completed_at: + msg_dict["completed_at"] = datetime_to_json_string(self.completed_at) + return msg_dict + + +# This is a context manager +class collect_timing_info: + def __init__(self, name: str, callback: Callable[[TimingInfo], None]) -> None: + self.timing_info = TimingInfo(name=name) + self.callback = callback + + def __enter__(self): + self.timing_info.begin() + + def __exit__(self, exc_type, exc_value, traceback): + self.timing_info.end() + self.callback(self.timing_info) + + +class RunningStatus(StrEnum): + Started = "started" + Compiling = "compiling" + Executing = "executing" + + +class NodeStatus(StrEnum): + Success = "success" + Error = "error" + Fail = "fail" + Warn = "warn" + Skipped = "skipped" + Pass = "pass" + RuntimeErr = "runtime error" + + +class RunStatus(StrEnum): + Success = NodeStatus.Success + Error = NodeStatus.Error + Skipped = NodeStatus.Skipped + + +class TestStatus(StrEnum): + __test__ = False + Pass = NodeStatus.Pass + Error = NodeStatus.Error + Fail = NodeStatus.Fail + Warn = NodeStatus.Warn + Skipped = NodeStatus.Skipped + + +class FreshnessStatus(StrEnum): + Pass = NodeStatus.Pass + Warn = NodeStatus.Warn + Error = NodeStatus.Error + RuntimeErr = NodeStatus.RuntimeErr + + +@dataclass +class BaseResult(dbtClassMixin): + status: Union[RunStatus, TestStatus, FreshnessStatus] + timing: List[TimingInfo] + thread_id: str + execution_time: float + adapter_response: Dict[str, Any] + message: Optional[str] + failures: Optional[int] + + @classmethod + def __pre_deserialize__(cls, data): + data = super().__pre_deserialize__(data) + if "message" not in data: + data["message"] = None + if "failures" not in data: + data["failures"] = None + return data + + def to_msg_dict(self): + msg_dict = { + "status": str(self.status), + "message": cast_to_str(self.message), + "thread": self.thread_id, + "execution_time": self.execution_time, + "num_failures": cast_to_int(self.failures), + "timing_info": [ti.to_msg_dict() for ti in self.timing], + "adapter_response": self.adapter_response, + } + return msg_dict + + +@dataclass +class NodeResult(BaseResult): + node: ResultNode + + +@dataclass +class ExecutionResult(dbtClassMixin): + results: Sequence[BaseResult] + elapsed_time: float + + def __len__(self): + return len(self.results) + + def __iter__(self): + return iter(self.results) + + def __getitem__(self, idx): + return self.results[idx] + + +# due to issues with typing.Union collapsing subclasses, this can't subclass +# PartialResult diff --git a/core/dbt/artifacts/schemas/run/__init__.py b/core/dbt/artifacts/schemas/run/__init__.py new file mode 100644 index 00000000000..5229dc92894 --- /dev/null +++ b/core/dbt/artifacts/schemas/run/__init__.py @@ -0,0 +1,2 @@ +# alias to latest +from dbt.artifacts.schemas.run.v5.run import * # noqa diff --git a/core/dbt/artifacts/schemas/run/v5/__init__.py b/core/dbt/artifacts/schemas/run/v5/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/core/dbt/artifacts/schemas/run/v5/run.py b/core/dbt/artifacts/schemas/run/v5/run.py new file mode 100644 index 00000000000..33a5859ccc7 --- /dev/null +++ b/core/dbt/artifacts/schemas/run/v5/run.py @@ -0,0 +1,175 @@ +import copy +import threading +from dataclasses import dataclass, field +from datetime import datetime +from typing import Any, Dict, Iterable, Optional, Sequence, Tuple + +# https://github.com/dbt-labs/dbt-core/issues/10098 +# Needed for Mashumaro serialization of RunResult below +# TODO: investigate alternative approaches to restore conditional import +# if TYPE_CHECKING: +import agate + +from dbt.artifacts.resources import CompiledResource +from dbt.artifacts.schemas.base import ( + ArtifactMixin, + BaseArtifactMetadata, + get_artifact_schema_version, + schema_version, +) +from dbt.artifacts.schemas.results import ( + BaseResult, + ExecutionResult, + NodeResult, + ResultNode, + RunStatus, +) +from dbt.exceptions import scrub_secrets +from dbt_common.clients.system import write_json +from dbt_common.constants import SECRET_ENV_PREFIX + + +@dataclass +class RunResult(NodeResult): + agate_table: Optional["agate.Table"] = field( + default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None} + ) + + @property + def skipped(self): + return self.status == RunStatus.Skipped + + @classmethod + def from_node(cls, node: ResultNode, status: RunStatus, message: Optional[str]): + thread_id = threading.current_thread().name + return RunResult( + status=status, + thread_id=thread_id, + execution_time=0, + timing=[], + message=message, + node=node, + adapter_response={}, + failures=None, + ) + + +@dataclass +class RunResultsMetadata(BaseArtifactMetadata): + dbt_schema_version: str = field( + default_factory=lambda: str(RunResultsArtifact.dbt_schema_version) + ) + + +@dataclass +class RunResultOutput(BaseResult): + unique_id: str + compiled: Optional[bool] + compiled_code: Optional[str] + relation_name: Optional[str] + + +def process_run_result(result: RunResult) -> RunResultOutput: + + compiled = isinstance(result.node, CompiledResource) + + return RunResultOutput( + unique_id=result.node.unique_id, + status=result.status, + timing=result.timing, + thread_id=result.thread_id, + execution_time=result.execution_time, + message=result.message, + adapter_response=result.adapter_response, + failures=result.failures, + compiled=result.node.compiled if compiled else None, # type:ignore + compiled_code=result.node.compiled_code if compiled else None, # type:ignore + relation_name=result.node.relation_name if compiled else None, # type:ignore + ) + + +@dataclass +class RunExecutionResult( + ExecutionResult, +): + results: Sequence[RunResult] + args: Dict[str, Any] = field(default_factory=dict) + generated_at: datetime = field(default_factory=datetime.utcnow) + + def write(self, path: str): + writable = RunResultsArtifact.from_execution_results( + results=self.results, + elapsed_time=self.elapsed_time, + generated_at=self.generated_at, + args=self.args, + ) + writable.write(path) + + +@dataclass +@schema_version("run-results", 6) +class RunResultsArtifact(ExecutionResult, ArtifactMixin): + results: Sequence[RunResultOutput] + args: Dict[str, Any] = field(default_factory=dict) + + @classmethod + def from_execution_results( + cls, + results: Sequence[RunResult], + elapsed_time: float, + generated_at: datetime, + args: Dict, + ): + processed_results = [ + process_run_result(result) for result in results if isinstance(result, RunResult) + ] + meta = RunResultsMetadata( + dbt_schema_version=str(cls.dbt_schema_version), + generated_at=generated_at, + ) + + secret_vars = [ + v for k, v in args["vars"].items() if k.startswith(SECRET_ENV_PREFIX) and v.strip() + ] + + scrubbed_args = copy.deepcopy(args) + + # scrub secrets in invocation command + scrubbed_args["invocation_command"] = scrub_secrets( + scrubbed_args["invocation_command"], secret_vars + ) + + # scrub secrets in vars dict + scrubbed_args["vars"] = { + k: scrub_secrets(v, secret_vars) for k, v in scrubbed_args["vars"].items() + } + + return cls( + metadata=meta, results=processed_results, elapsed_time=elapsed_time, args=scrubbed_args + ) + + @classmethod + def compatible_previous_versions(cls) -> Iterable[Tuple[str, int]]: + return [ + ("run-results", 4), + ("run-results", 5), + ] + + @classmethod + def upgrade_schema_version(cls, data): + """This overrides the "upgrade_schema_version" call in VersionedSchema (via + ArtifactMixin) to modify the dictionary passed in from earlier versions of the run_results. + """ + run_results_schema_version = get_artifact_schema_version(data) + # If less than the current version (v5), preprocess contents to match latest schema version + if run_results_schema_version <= 5: + # In v5, we added 'compiled' attributes to each result entry + # Going forward, dbt expects these to be populated + for result in data["results"]: + result["compiled"] = False + result["compiled_code"] = "" + result["relation_name"] = "" + return cls.from_dict(data) + + def write(self, path: str): + write_json(path, self.to_dict(omit_none=False)) diff --git a/core/dbt/artifacts/schemas/upgrades/__init__.py b/core/dbt/artifacts/schemas/upgrades/__init__.py new file mode 100644 index 00000000000..d0839736d32 --- /dev/null +++ b/core/dbt/artifacts/schemas/upgrades/__init__.py @@ -0,0 +1 @@ +from dbt.artifacts.schemas.upgrades.upgrade_manifest import upgrade_manifest_json diff --git a/core/dbt/contracts/graph/manifest_upgrade.py b/core/dbt/artifacts/schemas/upgrades/upgrade_manifest.py similarity index 63% rename from core/dbt/contracts/graph/manifest_upgrade.py rename to core/dbt/artifacts/schemas/upgrades/upgrade_manifest.py index fb979ec09a4..7a4f888a463 100644 --- a/core/dbt/contracts/graph/manifest_upgrade.py +++ b/core/dbt/artifacts/schemas/upgrades/upgrade_manifest.py @@ -62,10 +62,72 @@ def drop_v9_and_prior_metrics(manifest: dict) -> None: manifest["disabled"] = filtered_disabled_entries +def _convert_dct_with_filter(v10_dct_with_opt_filter): + """Upgrage the filter object from v10 to v11. + + v10 filters from a serialized manifest looked like: + {..., 'filter': {'where_sql_template': '<filter_value>'}} + whereas v11 filters look like: + {..., 'filter': {'where_filters': [{'where_sql_template': '<filter_value>'}, ...]}} + """ + if v10_dct_with_opt_filter is not None and v10_dct_with_opt_filter.get("filter") is not None: + v10_dct_with_opt_filter["filter"] = {"where_filters": [v10_dct_with_opt_filter["filter"]]} + + +def _convert_metric(v10_metric_dict): + """Upgrades a v10 metric object to a v11 metric object. + + Specifcally the following properties change + 1. metric.filter + 2. metric.type_params.measure.filter + 3. metric.type_params.input_measures[x].filter + 4. metric.type_params.numerator.filter + 5. metric.type_params.denominator.filter + 6. metric.type_params.metrics[x].filter" + """ + + # handles top level metric filter + _convert_dct_with_filter(v10_metric_dict) + + type_params = v10_metric_dict.get("type_params") + if type_params is not None: + _convert_dct_with_filter(type_params.get("measure")) + _convert_dct_with_filter(type_params.get("numerator")) + _convert_dct_with_filter(type_params.get("denominator")) + + # handles metric.type_params.input_measures[x].filter + input_measures = type_params.get("input_measures") + if input_measures is not None: + for input_measure in input_measures: + _convert_dct_with_filter(input_measure) + + # handles metric.type_params.metrics[x].filter + metrics = type_params.get("metrics") + if metrics is not None: + for metric in metrics: + _convert_dct_with_filter(metric) + + +def upgrade_v10_metric_filters(manifest: dict): + """Handles metric filters changes from v10 to v11.""" + + metrics = manifest.get("metrics", {}) + for metric in metrics.values(): + _convert_metric(metric) + + disabled_nodes = manifest.get("disabled", {}) + for unique_id, nodes in disabled_nodes.items(): + if unique_id.split(".")[0] == "metric": + for node in nodes: + _convert_metric(node) + + def upgrade_manifest_json(manifest: dict, manifest_schema_version: int) -> dict: # this should remain 9 while the check in `upgrade_schema_version` may change if manifest_schema_version <= 9: drop_v9_and_prior_metrics(manifest=manifest) + elif manifest_schema_version == 10: + upgrade_v10_metric_filters(manifest=manifest) for node_content in manifest.get("nodes", {}).values(): upgrade_node_content(node_content) @@ -83,6 +145,9 @@ def upgrade_manifest_json(manifest: dict, manifest_schema_version: int) -> dict: manifest["groups"] = {} if "group_map" not in manifest: manifest["group_map"] = {} + # add unit_tests key + if "unit_tests" not in manifest: + manifest["unit_tests"] = {} for metric_content in manifest.get("metrics", {}).values(): # handle attr renames + value translation ("expression" -> "derived") metric_content = upgrade_ref_content(metric_content) @@ -104,4 +169,6 @@ def upgrade_manifest_json(manifest: dict, manifest_schema_version: int) -> dict: doc_content["resource_type"] = "doc" if "semantic_models" not in manifest: manifest["semantic_models"] = {} + if "saved_queries" not in manifest: + manifest["saved_queries"] = {} return manifest diff --git a/core/dbt/contracts/graph/utils.py b/core/dbt/artifacts/utils/validation.py similarity index 100% rename from core/dbt/contracts/graph/utils.py rename to core/dbt/artifacts/utils/validation.py diff --git a/core/dbt/cli/context.py b/core/dbt/cli/context.py index b8f541b9ad8..f0be810e1e4 100644 --- a/core/dbt/cli/context.py +++ b/core/dbt/cli/context.py @@ -1,6 +1,7 @@ -import click from typing import Optional +import click + from dbt.cli.main import cli as dbt diff --git a/core/dbt/cli/exceptions.py b/core/dbt/cli/exceptions.py index d88f91c01ac..6442f3d5f1f 100644 --- a/core/dbt/cli/exceptions.py +++ b/core/dbt/cli/exceptions.py @@ -1,6 +1,10 @@ -from typing import Optional, IO +from typing import IO, List, Optional, Union from click.exceptions import ClickException + +from dbt.artifacts.schemas.catalog import CatalogArtifact +from dbt.contracts.graph.manifest import Manifest +from dbt.contracts.results import RunExecutionResult from dbt.utils import ExitCodes @@ -22,7 +26,7 @@ def __init__(self, exit_code: ExitCodes) -> None: # the typing of _file is to satisfy the signature of ClickException.show # overriding this method prevents click from printing any exceptions to stdout - def show(self, _file: Optional[IO] = None) -> None: + def show(self, _file: Optional[IO] = None) -> None: # type: ignore[type-arg] pass @@ -30,7 +34,17 @@ class ResultExit(CliException): """This class wraps any exception that contains results while invoking dbt, or the results of an invocation that did not succeed but did not throw any exceptions.""" - def __init__(self, result) -> None: + def __init__( + self, + result: Union[ + bool, # debug + CatalogArtifact, # docs generate + List[str], # list/ls + Manifest, # parse + None, # clean, deps, init, source + RunExecutionResult, # build, compile, run, seed, snapshot, test, run-operation + ] = None, + ) -> None: super().__init__(ExitCodes.ModelError) self.result = result diff --git a/core/dbt/cli/flags.py b/core/dbt/cli/flags.py index df79ad5941b..2b957ed9465 100644 --- a/core/dbt/cli/flags.py +++ b/core/dbt/cli/flags.py @@ -2,20 +2,26 @@ import sys from dataclasses import dataclass from importlib import import_module -from multiprocessing import get_context +from pathlib import Path from pprint import pformat as pf from typing import Any, Callable, Dict, List, Optional, Set, Union -from click import Context, get_current_context, Parameter -from click.core import Command as ClickCommand, Group, ParameterSource +from click import Context, Parameter, get_current_context +from click.core import Command as ClickCommand +from click.core import Group, ParameterSource + from dbt.cli.exceptions import DbtUsageException from dbt.cli.resolvers import default_log_path, default_project_dir from dbt.cli.types import Command as CliCommand -from dbt.config.profile import read_user_config -from dbt.contracts.project import UserConfig -from dbt.exceptions import DbtInternalError -from dbt.deprecations import renamed_env_var -from dbt.helper_types import WarnErrorOptions +from dbt.config.project import read_project_flags +from dbt.contracts.project import ProjectFlags +from dbt.deprecations import fire_buffered_deprecations, renamed_env_var +from dbt.events import ALL_EVENT_NAMES +from dbt_common import ui +from dbt_common.clients import jinja +from dbt_common.events import functions +from dbt_common.exceptions import DbtInternalError +from dbt_common.helper_types import WarnErrorOptions if os.name != "nt": # https://bugs.python.org/issue41567 @@ -24,7 +30,9 @@ FLAGS_DEFAULTS = { "INDIRECT_SELECTION": "eager", "TARGET_PATH": None, - # Cli args without user_config or env var option. + "DEFER_STATE": None, # necessary because of retry construction of flags + "WARN_ERROR": None, + # Cli args without project_flags or env var option. "FULL_REFRESH": False, "STRICT_MODE": False, "STORE_FAILURES": False, @@ -47,7 +55,10 @@ def convert_config(config_name, config_value): ret = config_value if config_name.lower() == "warn_error_options" and type(config_value) == dict: ret = WarnErrorOptions( - include=config_value.get("include", []), exclude=config_value.get("exclude", []) + include=config_value.get("include", []), + exclude=config_value.get("exclude", []), + silence=config_value.get("silence", []), + valid_error_names=ALL_EVENT_NAMES, ) return ret @@ -57,11 +68,10 @@ def args_to_context(args: List[str]) -> Context: from dbt.cli.main import cli cli_ctx = cli.make_context(cli.name, args) - # Split args if they're a comma seperated string. + # Split args if they're a comma separated string. if len(args) == 1 and "," in args[0]: args = args[0].split(",") sub_command_name, sub_command, args = cli.resolve_command(cli_ctx, args) - # Handle source and docs group. if isinstance(sub_command, Group): sub_command_name, sub_command, args = sub_command.resolve_command(cli_ctx, args) @@ -77,12 +87,13 @@ class Flags: """Primary configuration artifact for running dbt""" def __init__( - self, ctx: Optional[Context] = None, user_config: Optional[UserConfig] = None + self, ctx: Optional[Context] = None, project_flags: Optional[ProjectFlags] = None ) -> None: - # Set the default flags. for key, value in FLAGS_DEFAULTS.items(): object.__setattr__(self, key, value) + # Use to handle duplicate params in _assign_params + flags_defaults_list = list(FLAGS_DEFAULTS.keys()) if ctx is None: ctx = get_current_context() @@ -108,6 +119,7 @@ def _get_params_by_source(ctx: Context, source_type: ParameterSource): def _assign_params( ctx: Context, params_assigned_from_default: set, + params_assigned_from_user: set, deprecated_env_vars: Dict[str, Callable], ): """Recursively adds all click params to flag object""" @@ -121,7 +133,6 @@ def _assign_params( # respected over DBT_PRINT or --print. new_name: Union[str, None] = None if param_name in DEPRECATED_PARAMS: - # Deprecated env vars can only be set via env var. # We use the deprecated option in click to serialize the value # from the env var string. @@ -164,25 +175,56 @@ def _assign_params( old_name=dep_param.envvar, new_name=new_param.envvar, ) + # end deprecated_params # Set the flag value. - is_duplicate = hasattr(self, param_name.upper()) + is_duplicate = ( + hasattr(self, param_name.upper()) + and param_name.upper() not in flags_defaults_list + ) + # First time through, set as though FLAGS_DEFAULTS hasn't been set, so not a duplicate. + # Subsequent pass (to process "parent" params) should be treated as duplicates. + if param_name.upper() in flags_defaults_list: + flags_defaults_list.remove(param_name.upper()) + # Note: the following determines whether parameter came from click default, + # not from FLAGS_DEFAULTS in __init__. is_default = ctx.get_parameter_source(param_name) == ParameterSource.DEFAULT + is_envvar = ctx.get_parameter_source(param_name) == ParameterSource.ENVIRONMENT + flag_name = (new_name or param_name).upper() - if (is_duplicate and not is_default) or not is_duplicate: + # envvar flags are assigned in either parent or child context if there + # isn't an overriding cli command flag. + # If the flag has been encountered as a child cli flag, we don't + # want to overwrite with parent envvar, since the commandline flag takes precedence. + if (is_duplicate and not (is_default or is_envvar)) or not is_duplicate: object.__setattr__(self, flag_name, param_value) # Track default assigned params. - if is_default: + # For flags that are accepted at both 'parent' and 'child' levels, + # we need to track user-provided and default values across both, + # to support detection of mutually exclusive flags later on. + if not is_default: + params_assigned_from_user.add(param_name) + if param_name in params_assigned_from_default: + params_assigned_from_default.remove(param_name) + if is_default and param_name not in params_assigned_from_user: params_assigned_from_default.add(param_name) if ctx.parent: - _assign_params(ctx.parent, params_assigned_from_default, deprecated_env_vars) + _assign_params( + ctx.parent, + params_assigned_from_default, + params_assigned_from_user, + deprecated_env_vars, + ) + params_assigned_from_user = set() # type: Set[str] params_assigned_from_default = set() # type: Set[str] deprecated_env_vars: Dict[str, Callable] = {} - _assign_params(ctx, params_assigned_from_default, deprecated_env_vars) + _assign_params( + ctx, params_assigned_from_default, params_assigned_from_user, deprecated_env_vars + ) # Set deprecated_env_var_warnings to be fired later after events have been init. object.__setattr__( @@ -199,33 +241,48 @@ def _assign_params( invoked_subcommand.ignore_unknown_options = True invoked_subcommand_ctx = invoked_subcommand.make_context(None, sys.argv) _assign_params( - invoked_subcommand_ctx, params_assigned_from_default, deprecated_env_vars + invoked_subcommand_ctx, + params_assigned_from_default, + params_assigned_from_user, + deprecated_env_vars, ) - if not user_config: + if not project_flags: + project_dir = getattr(self, "PROJECT_DIR", str(default_project_dir())) profiles_dir = getattr(self, "PROFILES_DIR", None) - user_config = read_user_config(profiles_dir) if profiles_dir else None + if profiles_dir and project_dir: + project_flags = read_project_flags(project_dir, profiles_dir) + else: + project_flags = None # Add entire invocation command to flags object.__setattr__(self, "INVOCATION_COMMAND", "dbt " + " ".join(sys.argv[1:])) - # Overwrite default assignments with user config if available. - if user_config: + if project_flags: + # Overwrite default assignments with project flags if available. param_assigned_from_default_copy = params_assigned_from_default.copy() for param_assigned_from_default in params_assigned_from_default: - user_config_param_value = getattr(user_config, param_assigned_from_default, None) - if user_config_param_value is not None: + project_flags_param_value = getattr( + project_flags, param_assigned_from_default, None + ) + if project_flags_param_value is not None: object.__setattr__( self, param_assigned_from_default.upper(), - convert_config(param_assigned_from_default, user_config_param_value), + convert_config(param_assigned_from_default, project_flags_param_value), ) param_assigned_from_default_copy.remove(param_assigned_from_default) params_assigned_from_default = param_assigned_from_default_copy + # Add project-level flags that are not available as CLI options / env vars + for ( + project_level_flag_name, + project_level_flag_value, + ) in project_flags.project_only_flags.items(): + object.__setattr__(self, project_level_flag_name.upper(), project_level_flag_value) + # Set hard coded flags. object.__setattr__(self, "WHICH", invoked_subcommand_name or ctx.info_name) - object.__setattr__(self, "MP_CONTEXT", get_context("spawn")) # Apply the lead/follow relationship between some parameters. self._override_if_set("USE_COLORS", "USE_COLORS_FILE", params_assigned_from_default) @@ -236,9 +293,11 @@ def _assign_params( # Starting in v1.5, if `log-path` is set in `dbt_project.yml`, it will raise a deprecation warning, # with the possibility of removing it in a future release. if getattr(self, "LOG_PATH", None) is None: - project_dir = getattr(self, "PROJECT_DIR", default_project_dir()) + project_dir = getattr(self, "PROJECT_DIR", str(default_project_dir())) version_check = getattr(self, "VERSION_CHECK", True) - object.__setattr__(self, "LOG_PATH", default_log_path(project_dir, version_check)) + object.__setattr__( + self, "LOG_PATH", default_log_path(Path(project_dir), version_check) + ) # Support console DO NOT TRACK initiative. if os.getenv("DO_NOT_TRACK", "").lower() in ("1", "t", "true", "y", "yes"): @@ -249,6 +308,10 @@ def _assign_params( params_assigned_from_default, ["WARN_ERROR", "WARN_ERROR_OPTIONS"] ) + # Handle arguments mutually exclusive with INLINE + self._assert_mutually_exclusive(params_assigned_from_default, ["SELECT", "INLINE"]) + self._assert_mutually_exclusive(params_assigned_from_default, ["SELECTOR", "INLINE"]) + # Support lower cased access for legacy code. params = set( x for x in dir(self) if not callable(getattr(self, x)) and not x.startswith("__") @@ -256,6 +319,8 @@ def _assign_params( for param in params: object.__setattr__(self, param.lower(), getattr(self, param)) + self.set_common_global_flags() + def __str__(self) -> str: return str(pf(self.__dict__)) @@ -273,7 +338,9 @@ def _assert_mutually_exclusive( """ set_flag = None for flag in group: - flag_set_by_user = flag.lower() not in params_assigned_from_default + flag_set_by_user = ( + hasattr(self, flag) and flag.lower() not in params_assigned_from_default + ) if flag_set_by_user and set_flag: raise DbtUsageException( f"{flag.lower()}: not allowed with argument {set_flag.lower()}" @@ -288,6 +355,8 @@ def fire_deprecations(self): # not get pickled when written to disk as json. object.__delattr__(self, "deprecated_env_var_warnings") + fire_buffered_deprecations() + @classmethod def from_dict(cls, command: CliCommand, args_dict: Dict[str, Any]) -> "Flags": command_arg_list = command_params(command, args_dict) @@ -296,6 +365,27 @@ def from_dict(cls, command: CliCommand, args_dict: Dict[str, Any]) -> "Flags": flags.fire_deprecations() return flags + def set_common_global_flags(self): + # Set globals for common.ui + if getattr(self, "PRINTER_WIDTH", None) is not None: + ui.PRINTER_WIDTH = getattr(self, "PRINTER_WIDTH") + if getattr(self, "USE_COLORS", None) is not None: + ui.USE_COLOR = getattr(self, "USE_COLORS") + + # Set globals for common.events.functions + functions.WARN_ERROR = getattr(self, "WARN_ERROR", False) + if getattr(self, "WARN_ERROR_OPTIONS", None) is not None: + functions.WARN_ERROR_OPTIONS = getattr(self, "WARN_ERROR_OPTIONS") + + # Set globals for common.jinja + if getattr(self, "MACRO_DEBUGGING", None) is not None: + jinja.MACRO_DEBUGGING = getattr(self, "MACRO_DEBUGGING") + + # This is here to prevent mypy from complaining about all of the + # attributes which we added dynamically. + def __getattr__(self, name: str) -> Any: + return super().__getattribute__(name) # type: ignore + CommandParams = List[str] @@ -316,10 +406,8 @@ def command_params(command: CliCommand, args_dict: Dict[str, Any]) -> CommandPar default_args = set([x.lower() for x in FLAGS_DEFAULTS.keys()]) res = command.to_list() - for k, v in args_dict.items(): k = k.lower() - # if a "which" value exists in the args dict, it should match the command provided if k == WHICH_KEY: if v != command.value: @@ -329,7 +417,9 @@ def command_params(command: CliCommand, args_dict: Dict[str, Any]) -> CommandPar continue # param was assigned from defaults and should not be included - if k not in (cmd_args | prnt_args) - default_args: + if k not in (cmd_args | prnt_args) or ( + k in default_args and v == FLAGS_DEFAULTS[k.upper()] + ): continue # if the param is in parent args, it should come before the arg name @@ -342,9 +432,21 @@ def add_fn(x): spinal_cased = k.replace("_", "-") + # MultiOption flags come back as lists, but we want to pass them as space separated strings + if isinstance(v, list): + if len(v) > 0: + v = " ".join(v) + else: + continue + if k == "macro" and command == CliCommand.RUN_OPERATION: add_fn(v) - elif v in (None, False): + # None is a Singleton, False is a Flyweight, only one instance of each. + elif (v is None or v is False) and k not in ( + # These are None by default but they do not support --no-{flag} + "defer_state", + "log_format", + ): add_fn(f"--no-{spinal_cased}") elif v is True: add_fn(f"--{spinal_cased}") diff --git a/core/dbt/cli/main.py b/core/dbt/cli/main.py index 390cde0adc4..ca79d5eb073 100644 --- a/core/dbt/cli/main.py +++ b/core/dbt/cli/main.py @@ -1,44 +1,20 @@ +import functools from copy import copy from dataclasses import dataclass from typing import Callable, List, Optional, Union import click -from click.exceptions import ( - Exit as ClickExit, - BadOptionUsage, - NoSuchOption, - UsageError, -) - -from dbt.cli import requires, params as p -from dbt.cli.exceptions import ( - DbtInternalException, - DbtUsageException, -) +from click.exceptions import BadOptionUsage +from click.exceptions import Exit as ClickExit +from click.exceptions import NoSuchOption, UsageError + +from dbt.artifacts.schemas.catalog import CatalogArtifact +from dbt.artifacts.schemas.run import RunExecutionResult +from dbt.cli import params as p +from dbt.cli import requires +from dbt.cli.exceptions import DbtInternalException, DbtUsageException from dbt.contracts.graph.manifest import Manifest -from dbt.contracts.results import ( - CatalogArtifact, - RunExecutionResult, -) -from dbt.events.base_types import EventMsg -from dbt.task.build import BuildTask -from dbt.task.clean import CleanTask -from dbt.task.clone import CloneTask -from dbt.task.compile import CompileTask -from dbt.task.debug import DebugTask -from dbt.task.deps import DepsTask -from dbt.task.freshness import FreshnessTask -from dbt.task.generate import GenerateTask -from dbt.task.init import InitTask -from dbt.task.list import ListTask -from dbt.task.retry import RetryTask -from dbt.task.run import RunTask -from dbt.task.run_operation import RunOperationTask -from dbt.task.seed import SeedTask -from dbt.task.serve import ServeTask -from dbt.task.show import ShowTask -from dbt.task.snapshot import SnapshotTask -from dbt.task.test import TestTask +from dbt_common.events.base_types import EventMsg @dataclass @@ -64,7 +40,7 @@ def __init__( self, manifest: Optional[Manifest] = None, callbacks: Optional[List[Callable[[EventMsg], None]]] = None, - ): + ) -> None: self.manifest = manifest if callbacks is None: @@ -73,7 +49,7 @@ def __init__( def invoke(self, args: List[str], **kwargs) -> dbtRunnerResult: try: - dbt_ctx = cli.make_context(cli.name, args) + dbt_ctx = cli.make_context(cli.name, args.copy()) dbt_ctx.obj = { "manifest": self.manifest, "callbacks": self.callbacks, @@ -118,6 +94,56 @@ def invoke(self, args: List[str], **kwargs) -> dbtRunnerResult: ) +# approach from https://github.com/pallets/click/issues/108#issuecomment-280489786 +def global_flags(func): + @p.cache_selected_only + @p.debug + @p.defer + @p.deprecated_defer + @p.defer_state + @p.deprecated_favor_state + @p.deprecated_print + @p.deprecated_state + @p.fail_fast + @p.favor_state + @p.indirect_selection + @p.log_cache_events + @p.log_file_max_bytes + @p.log_format + @p.log_format_file + @p.log_level + @p.log_level_file + @p.log_path + @p.macro_debugging + @p.partial_parse + @p.partial_parse_file_path + @p.partial_parse_file_diff + @p.populate_cache + @p.print + @p.printer_width + @p.profile + @p.quiet + @p.record_timing_info + @p.send_anonymous_usage_stats + @p.single_threaded + @p.state + @p.static_parser + @p.target + @p.use_colors + @p.use_colors_file + @p.use_experimental_parser + @p.version + @p.version_check + @p.warn_error + @p.warn_error_options + @p.write_json + @functools.wraps(func) + def wrapper(*args, **kwargs): + return func(*args, **kwargs) + + return wrapper + + # dbt @click.group( context_settings={"help_option_names": ["-h", "--help"]}, @@ -126,37 +152,8 @@ def invoke(self, args: List[str], **kwargs) -> dbtRunnerResult: epilog="Specify one of these sub-commands and you can find more help from there.", ) @click.pass_context -@p.cache_selected_only -@p.debug -@p.deprecated_print -@p.enable_legacy_logger -@p.fail_fast -@p.log_cache_events -@p.log_file_max_bytes -@p.log_format -@p.log_format_file -@p.log_level -@p.log_level_file -@p.log_path -@p.macro_debugging -@p.partial_parse -@p.partial_parse_file_path -@p.populate_cache -@p.print -@p.printer_width -@p.quiet -@p.record_timing_info -@p.send_anonymous_usage_stats -@p.single_threaded -@p.static_parser -@p.use_colors -@p.use_colors_file -@p.use_experimental_parser -@p.version -@p.version_check -@p.warn_error -@p.warn_error_options -@p.write_json +@global_flags +@p.show_resource_report def cli(ctx, **kwargs): """An ELT tool for managing your SQL transformations and data models. For more documentation on these commands, visit: docs.getdbt.com @@ -166,30 +163,25 @@ def cli(ctx, **kwargs): # dbt build @cli.command("build") @click.pass_context -@p.defer -@p.deprecated_defer +@global_flags +@p.empty +@p.event_time_start +@p.event_time_end @p.exclude -@p.fail_fast -@p.favor_state -@p.deprecated_favor_state +@p.export_saved_queries @p.full_refresh -@p.indirect_selection -@p.profile +@p.deprecated_include_saved_query @p.profiles_dir @p.project_dir @p.resource_type +@p.exclude_resource_type @p.select @p.selector @p.show -@p.state -@p.defer_state -@p.deprecated_state @p.store_failures -@p.target @p.target_path @p.threads @p.vars -@p.version_check @requires.postflight @requires.preflight @requires.profile @@ -198,6 +190,8 @@ def cli(ctx, **kwargs): @requires.manifest def build(ctx, **kwargs): """Run all seeds, models, snapshots, and tests in DAG order""" + from dbt.task.build import BuildTask + task = BuildTask( ctx.obj["flags"], ctx.obj["runtime_config"], @@ -212,10 +206,10 @@ def build(ctx, **kwargs): # dbt clean @cli.command("clean") @click.pass_context -@p.profile +@global_flags +@p.clean_project_files_only @p.profiles_dir @p.project_dir -@p.target @p.target_path @p.vars @requires.postflight @@ -224,16 +218,18 @@ def build(ctx, **kwargs): @requires.project def clean(ctx, **kwargs): """Delete all folders in the clean-targets list (usually the dbt_packages and target directories.)""" - task = CleanTask(ctx.obj["flags"], ctx.obj["project"]) + from dbt.task.clean import CleanTask - results = task.run() - success = task.interpret_results(results) + with CleanTask(ctx.obj["flags"], ctx.obj["project"]) as task: + results = task.run() + success = task.interpret_results(results) return results, success # dbt docs @cli.group() @click.pass_context +@global_flags def docs(ctx, **kwargs): """Generate or serve the documentation website for your project""" @@ -241,26 +237,18 @@ def docs(ctx, **kwargs): # dbt docs generate @docs.command("generate") @click.pass_context +@global_flags @p.compile_docs -@p.defer -@p.deprecated_defer @p.exclude -@p.favor_state -@p.deprecated_favor_state -@p.profile @p.profiles_dir @p.project_dir @p.select @p.selector @p.empty_catalog -@p.state -@p.defer_state -@p.deprecated_state -@p.target +@p.static @p.target_path @p.threads @p.vars -@p.version_check @requires.postflight @requires.preflight @requires.profile @@ -269,6 +257,8 @@ def docs(ctx, **kwargs): @requires.manifest(write=False) def docs_generate(ctx, **kwargs): """Generate the documentation website for your project""" + from dbt.task.docs.generate import GenerateTask + task = GenerateTask( ctx.obj["flags"], ctx.obj["runtime_config"], @@ -283,12 +273,12 @@ def docs_generate(ctx, **kwargs): # dbt docs serve @docs.command("serve") @click.pass_context +@global_flags @p.browser +@p.host @p.port -@p.profile @p.profiles_dir @p.project_dir -@p.target @p.target_path @p.vars @requires.postflight @@ -298,6 +288,8 @@ def docs_generate(ctx, **kwargs): @requires.runtime_config def docs_serve(ctx, **kwargs): """Serve the documentation website for your project""" + from dbt.task.docs.serve import ServeTask + task = ServeTask( ctx.obj["flags"], ctx.obj["runtime_config"], @@ -311,29 +303,21 @@ def docs_serve(ctx, **kwargs): # dbt compile @cli.command("compile") @click.pass_context -@p.defer -@p.deprecated_defer +@global_flags @p.exclude -@p.favor_state -@p.deprecated_favor_state @p.full_refresh @p.show_output_format -@p.indirect_selection @p.introspect -@p.profile @p.profiles_dir @p.project_dir +@p.empty @p.select @p.selector @p.inline -@p.state -@p.defer_state -@p.deprecated_state -@p.target +@p.compile_inject_ephemeral_ctes @p.target_path @p.threads @p.vars -@p.version_check @requires.postflight @requires.preflight @requires.profile @@ -343,6 +327,8 @@ def docs_serve(ctx, **kwargs): def compile(ctx, **kwargs): """Generates executable SQL from source, model, test, and analysis files. Compiled SQL files are written to the target/ directory.""" + from dbt.task.compile import CompileTask + task = CompileTask( ctx.obj["flags"], ctx.obj["runtime_config"], @@ -357,30 +343,20 @@ def compile(ctx, **kwargs): # dbt show @cli.command("show") @click.pass_context -@p.defer -@p.deprecated_defer +@global_flags @p.exclude -@p.favor_state -@p.deprecated_favor_state @p.full_refresh @p.show_output_format @p.show_limit -@p.indirect_selection @p.introspect -@p.profile @p.profiles_dir @p.project_dir @p.select @p.selector @p.inline -@p.state -@p.defer_state -@p.deprecated_state -@p.target @p.target_path @p.threads @p.vars -@p.version_check @requires.postflight @requires.preflight @requires.profile @@ -390,6 +366,8 @@ def compile(ctx, **kwargs): def show(ctx, **kwargs): """Generates executable SQL for a named resource or inline query, runs that SQL, and returns a preview of the results. Does not materialize anything to the warehouse.""" + from dbt.task.show import ShowTask + task = ShowTask( ctx.obj["flags"], ctx.obj["runtime_config"], @@ -404,22 +382,20 @@ def show(ctx, **kwargs): # dbt debug @cli.command("debug") @click.pass_context +@global_flags @p.debug_connection @p.config_dir -@p.profile @p.profiles_dir_exists_false @p.project_dir -@p.target @p.vars -@p.version_check @requires.postflight @requires.preflight def debug(ctx, **kwargs): """Show information on the current dbt environment and check dependencies, then test the database connection. Not to be confused with the --debug option which increases verbosity.""" + from dbt.task.debug import DebugTask task = DebugTask( ctx.obj["flags"], - None, ) results = task.run() @@ -430,63 +406,80 @@ def debug(ctx, **kwargs): # dbt deps @cli.command("deps") @click.pass_context -@p.profile +@global_flags @p.profiles_dir_exists_false @p.project_dir -@p.target @p.vars +@p.source +@p.lock +@p.upgrade +@p.add_package @requires.postflight @requires.preflight @requires.unset_profile @requires.project def deps(ctx, **kwargs): - """Pull the most recent version of the dependencies listed in packages.yml""" - task = DepsTask(ctx.obj["flags"], ctx.obj["project"]) - results = task.run() - success = task.interpret_results(results) + """Install dbt packages specified. + In the following case, a new `package-lock.yml` will be generated and the packages are installed: + - user updated the packages.yml + - user specify the flag --update, which means for packages that are specified as a + range, dbt-core will try to install the newer version + Otherwise, deps will use `package-lock.yml` as source of truth to install packages. + + There is a way to add new packages by providing an `--add-package` flag to deps command + which will allow user to specify a package they want to add in the format of packagename@version. + """ + from dbt.task.deps import DepsTask + + flags = ctx.obj["flags"] + if flags.ADD_PACKAGE: + if not flags.ADD_PACKAGE["version"] and flags.SOURCE != "local": + raise BadOptionUsage( + message=f"Version is required in --add-package when a package when source is {flags.SOURCE}", + option_name="--add-package", + ) + with DepsTask(flags, ctx.obj["project"]) as task: + results = task.run() + success = task.interpret_results(results) return results, success # dbt init @cli.command("init") @click.pass_context +@global_flags # for backwards compatibility, accept 'project_name' as an optional positional argument @click.argument("project_name", required=False) -@p.profile @p.profiles_dir_exists_false @p.project_dir @p.skip_profile_setup -@p.target @p.vars @requires.postflight @requires.preflight def init(ctx, **kwargs): """Initialize a new dbt project.""" - task = InitTask(ctx.obj["flags"], None) + from dbt.task.init import InitTask - results = task.run() - success = task.interpret_results(results) + with InitTask(ctx.obj["flags"]) as task: + results = task.run() + success = task.interpret_results(results) return results, success # dbt list @cli.command("list") @click.pass_context +@global_flags @p.exclude -@p.indirect_selection @p.models @p.output @p.output_keys -@p.profile @p.profiles_dir @p.project_dir @p.resource_type +@p.exclude_resource_type @p.raw_select @p.selector -@p.state -@p.defer_state -@p.deprecated_state -@p.target @p.target_path @p.vars @requires.postflight @@ -497,6 +490,8 @@ def init(ctx, **kwargs): @requires.manifest def list(ctx, **kwargs): """List the resources in your project""" + from dbt.task.list import ListTask + task = ListTask( ctx.obj["flags"], ctx.obj["runtime_config"], @@ -517,14 +512,12 @@ def list(ctx, **kwargs): # dbt parse @cli.command("parse") @click.pass_context -@p.profile +@global_flags @p.profiles_dir @p.project_dir -@p.target @p.target_path @p.threads @p.vars -@p.version_check @requires.postflight @requires.preflight @requires.profile @@ -534,33 +527,25 @@ def list(ctx, **kwargs): def parse(ctx, **kwargs): """Parses the project and provides information on performance""" # manifest generation and writing happens in @requires.manifest - return ctx.obj["manifest"], True # dbt run @cli.command("run") @click.pass_context -@p.defer -@p.deprecated_defer -@p.favor_state -@p.deprecated_favor_state +@global_flags @p.exclude -@p.fail_fast @p.full_refresh -@p.profile @p.profiles_dir @p.project_dir +@p.empty +@p.event_time_start +@p.event_time_end @p.select @p.selector -@p.state -@p.defer_state -@p.deprecated_state -@p.target @p.target_path @p.threads @p.vars -@p.version_check @requires.postflight @requires.preflight @requires.profile @@ -569,6 +554,8 @@ def parse(ctx, **kwargs): @requires.manifest def run(ctx, **kwargs): """Compile SQL and execute against the current target database.""" + from dbt.task.run import RunTask + task = RunTask( ctx.obj["flags"], ctx.obj["runtime_config"], @@ -583,26 +570,26 @@ def run(ctx, **kwargs): # dbt retry @cli.command("retry") @click.pass_context +@global_flags @p.project_dir @p.profiles_dir @p.vars -@p.profile -@p.target -@p.state +@p.target_path @p.threads -@p.fail_fast +@p.full_refresh @requires.postflight @requires.preflight @requires.profile @requires.project @requires.runtime_config -@requires.manifest def retry(ctx, **kwargs): """Retry the nodes that failed in the previous run.""" + from dbt.task.retry import RetryTask + + # Retry will parse manifest inside the task after we consolidate the flags task = RetryTask( ctx.obj["flags"], ctx.obj["runtime_config"], - ctx.obj["manifest"], ) results = task.run() @@ -613,21 +600,18 @@ def retry(ctx, **kwargs): # dbt clone @cli.command("clone") @click.pass_context -@p.defer_state +@global_flags @p.exclude @p.full_refresh -@p.profile @p.profiles_dir @p.project_dir @p.resource_type +@p.exclude_resource_type @p.select @p.selector -@p.state # required -@p.target @p.target_path @p.threads @p.vars -@p.version_check @requires.preflight @requires.profile @requires.project @@ -636,6 +620,8 @@ def retry(ctx, **kwargs): @requires.postflight def clone(ctx, **kwargs): """Create clones of selected nodes based on their location in the manifest provided to --state.""" + from dbt.task.clone import CloneTask + task = CloneTask( ctx.obj["flags"], ctx.obj["runtime_config"], @@ -650,12 +636,11 @@ def clone(ctx, **kwargs): # dbt run operation @cli.command("run-operation") @click.pass_context +@global_flags @click.argument("macro") @p.args -@p.profile @p.profiles_dir @p.project_dir -@p.target @p.target_path @p.threads @p.vars @@ -667,6 +652,8 @@ def clone(ctx, **kwargs): @requires.manifest def run_operation(ctx, **kwargs): """Run the named macro with any supplied arguments.""" + from dbt.task.run_operation import RunOperationTask + task = RunOperationTask( ctx.obj["flags"], ctx.obj["runtime_config"], @@ -681,22 +668,17 @@ def run_operation(ctx, **kwargs): # dbt seed @cli.command("seed") @click.pass_context +@global_flags @p.exclude @p.full_refresh -@p.profile @p.profiles_dir @p.project_dir @p.select @p.selector @p.show -@p.state -@p.defer_state -@p.deprecated_state -@p.target @p.target_path @p.threads @p.vars -@p.version_check @requires.postflight @requires.preflight @requires.profile @@ -705,6 +687,8 @@ def run_operation(ctx, **kwargs): @requires.manifest def seed(ctx, **kwargs): """Load data from csv files into your data warehouse.""" + from dbt.task.seed import SeedTask + task = SeedTask( ctx.obj["flags"], ctx.obj["runtime_config"], @@ -718,20 +702,12 @@ def seed(ctx, **kwargs): # dbt snapshot @cli.command("snapshot") @click.pass_context -@p.defer -@p.deprecated_defer +@global_flags @p.exclude -@p.favor_state -@p.deprecated_favor_state -@p.profile @p.profiles_dir @p.project_dir @p.select @p.selector -@p.state -@p.defer_state -@p.deprecated_state -@p.target @p.target_path @p.threads @p.vars @@ -743,6 +719,8 @@ def seed(ctx, **kwargs): @requires.manifest def snapshot(ctx, **kwargs): """Execute snapshots defined in your project""" + from dbt.task.snapshot import SnapshotTask + task = SnapshotTask( ctx.obj["flags"], ctx.obj["runtime_config"], @@ -757,6 +735,7 @@ def snapshot(ctx, **kwargs): # dbt source @cli.group() @click.pass_context +@global_flags def source(ctx, **kwargs): """Manage your project's sources""" @@ -764,17 +743,13 @@ def source(ctx, **kwargs): # dbt source freshness @source.command("freshness") @click.pass_context +@global_flags @p.exclude @p.output_path # TODO: Is this ok to re-use? We have three different output params, how much can we consolidate? -@p.profile @p.profiles_dir @p.project_dir @p.select @p.selector -@p.state -@p.defer_state -@p.deprecated_state -@p.target @p.target_path @p.threads @p.vars @@ -786,6 +761,8 @@ def source(ctx, **kwargs): @requires.manifest def freshness(ctx, **kwargs): """check the current freshness of the project's sources""" + from dbt.task.freshness import FreshnessTask + task = FreshnessTask( ctx.obj["flags"], ctx.obj["runtime_config"], @@ -806,27 +783,18 @@ def freshness(ctx, **kwargs): # dbt test @cli.command("test") @click.pass_context -@p.defer -@p.deprecated_defer +@global_flags @p.exclude -@p.fail_fast -@p.favor_state -@p.deprecated_favor_state -@p.indirect_selection -@p.profile +@p.resource_type +@p.exclude_resource_type @p.profiles_dir @p.project_dir @p.select @p.selector -@p.state -@p.defer_state -@p.deprecated_state @p.store_failures -@p.target @p.target_path @p.threads @p.vars -@p.version_check @requires.postflight @requires.preflight @requires.profile @@ -835,6 +803,8 @@ def freshness(ctx, **kwargs): @requires.manifest def test(ctx, **kwargs): """Runs tests on data in deployed models. Run this after `dbt run`""" + from dbt.task.test import TestTask + task = TestTask( ctx.obj["flags"], ctx.obj["runtime_config"], diff --git a/core/dbt/cli/option_types.py b/core/dbt/cli/option_types.py index 34d7314e867..2d0efedbb9e 100644 --- a/core/dbt/cli/option_types.py +++ b/core/dbt/cli/option_types.py @@ -1,9 +1,10 @@ -from click import ParamType, Choice +from click import Choice, ParamType -from dbt.config.utils import parse_cli_yaml_string -from dbt.exceptions import ValidationError, DbtValidationError, OptionNotYamlDictError - -from dbt.helper_types import WarnErrorOptions +from dbt.config.utils import normalize_warn_error_options, parse_cli_yaml_string +from dbt.events import ALL_EVENT_NAMES +from dbt.exceptions import OptionNotYamlDictError, ValidationError +from dbt_common.exceptions import DbtValidationError +from dbt_common.helper_types import WarnErrorOptions class YAML(ParamType): @@ -22,6 +23,26 @@ def convert(self, value, param, ctx): self.fail(f"String '{value}' is not valid YAML", param, ctx) +class Package(ParamType): + """The Click STRING type. Converts string into dict with package name and version. + Example package: + package-name@1.0.0 + package-name + """ + + name = "NewPackage" + + def convert(self, value, param, ctx): + # assume non-string values are a problem + if not isinstance(value, str): + self.fail(f"Cannot load Package from type {type(value)}", param, ctx) + try: + package_name, package_version = value.split("@") + return {"name": package_name, "version": package_version} + except ValueError: + return {"name": value, "version": None} + + class WarnErrorOptionsType(YAML): """The Click WarnErrorOptions type. Converts YAML strings into objects.""" @@ -30,9 +51,13 @@ class WarnErrorOptionsType(YAML): def convert(self, value, param, ctx): # this function is being used by param in click include_exclude = super().convert(value, param, ctx) + normalize_warn_error_options(include_exclude) return WarnErrorOptions( - include=include_exclude.get("include", []), exclude=include_exclude.get("exclude", []) + include=include_exclude.get("include", []), + exclude=include_exclude.get("exclude", []), + silence=include_exclude.get("silence", []), + valid_error_names=ALL_EVENT_NAMES, ) @@ -56,7 +81,10 @@ class ChoiceTuple(Choice): name = "CHOICE_TUPLE" def convert(self, value, param, ctx): - for value_item in value: - super().convert(value_item, param, ctx) + if not isinstance(value, str): + for value_item in value: + super().convert(value_item, param, ctx) + else: + super().convert(value, param, ctx) return value diff --git a/core/dbt/cli/options.py b/core/dbt/cli/options.py index 3a42dddda80..c73a4bcd374 100644 --- a/core/dbt/cli/options.py +++ b/core/dbt/cli/options.py @@ -1,20 +1,24 @@ -import click import inspect import typing as t + +import click from click import Context +from click.parser import OptionParser, ParsingState + from dbt.cli.option_types import ChoiceTuple # Implementation from: https://stackoverflow.com/a/48394004 # Note MultiOption options must be specified with type=tuple or type=ChoiceTuple (https://github.com/pallets/click/issues/2012) class MultiOption(click.Option): - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: self.save_other_options = kwargs.pop("save_other_options", True) nargs = kwargs.pop("nargs", -1) assert nargs == -1, "nargs, if set, must be -1 not {}".format(nargs) super(MultiOption, self).__init__(*args, **kwargs) - self._previous_parser_process = None - self._eat_all_parser = None + # this makes mypy happy, setting these to None causes mypy failures + self._previous_parser_process = lambda *args, **kwargs: None + self._eat_all_parser = lambda *args, **kwargs: None # validate that multiple=True multiple = kwargs.pop("multiple", None) @@ -29,34 +33,35 @@ def __init__(self, *args, **kwargs): else: assert isinstance(option_type, ChoiceTuple), msg - def add_to_parser(self, parser, ctx): - def parser_process(value, state): + def add_to_parser(self, parser: OptionParser, ctx: Context): + def parser_process(value: str, state: ParsingState): # method to hook to the parser.process done = False - value = [value] + value_list = str.split(value, " ") if self.save_other_options: # grab everything up to the next option while state.rargs and not done: - for prefix in self._eat_all_parser.prefixes: + for prefix in self._eat_all_parser.prefixes: # type: ignore[attr-defined] if state.rargs[0].startswith(prefix): done = True if not done: - value.append(state.rargs.pop(0)) + value_list.append(state.rargs.pop(0)) else: # grab everything remaining - value += state.rargs + value_list += state.rargs state.rargs[:] = [] - value = tuple(value) + value_tuple = tuple(value_list) # call the actual process - self._previous_parser_process(value, state) + self._previous_parser_process(value_tuple, state) retval = super(MultiOption, self).add_to_parser(parser, ctx) for name in self.opts: our_parser = parser._long_opt.get(name) or parser._short_opt.get(name) if our_parser: - self._eat_all_parser = our_parser + self._eat_all_parser = our_parser # type: ignore[assignment] self._previous_parser_process = our_parser.process - our_parser.process = parser_process + # mypy doesnt like assingment to a method see https://github.com/python/mypy/issues/708 + our_parser.process = parser_process # type: ignore[method-assign] break return retval diff --git a/core/dbt/cli/params.py b/core/dbt/cli/params.py index e75f5f6419b..425009d76ee 100644 --- a/core/dbt/cli/params.py +++ b/core/dbt/cli/params.py @@ -1,11 +1,18 @@ from pathlib import Path import click + +from dbt.cli.option_types import YAML, ChoiceTuple, Package, WarnErrorOptionsType from dbt.cli.options import MultiOption -from dbt.cli.option_types import YAML, ChoiceTuple, WarnErrorOptionsType -from dbt.cli.resolvers import default_project_dir, default_profiles_dir +from dbt.cli.resolvers import default_profiles_dir, default_project_dir from dbt.version import get_version_information +add_package = click.option( + "--add-package", + help="Add a package to current package spec, specify it as package-name@version. Change the source with --source flag.", + envvar=None, + type=Package(), +) args = click.option( "--args", envvar=None, @@ -40,6 +47,14 @@ default=True, ) +compile_inject_ephemeral_ctes = click.option( + "--inject-ephemeral-ctes/--no-inject-ephemeral-ctes", + envvar=None, + help="Internal flag controlling injection of referenced ephemeral models' CTEs during `compile`.", + hidden=True, + default=True, +) + config_dir = click.option( "--config-dir", envvar=None, @@ -69,10 +84,27 @@ hidden=True, ) -enable_legacy_logger = click.option( - "--enable-legacy-logger/--no-enable-legacy-logger", - envvar="DBT_ENABLE_LEGACY_LOGGER", - hidden=True, +empty = click.option( + "--empty/--no-empty", + envvar="DBT_EMPTY", + help="If specified, limit input refs and sources to zero rows.", + is_flag=True, +) + +event_time_end = click.option( + "--event-time-end", + envvar="DBT_EVENT_TIME_END", + help="If specified, the end datetime dbt uses to filter microbatch model inputs (exclusive).", + type=click.DateTime(), + default=None, +) + +event_time_start = click.option( + "--event-time-start", + envvar="DBT_EVENT_TIME_START", + help="If specified, the start datetime dbt uses to filter microbatch model inputs (inclusive).", + type=click.DateTime(), + default=None, ) exclude = click.option( @@ -84,6 +116,14 @@ help="Specify the nodes to exclude.", ) +export_saved_queries = click.option( + "--export-saved-queries/--no-export-saved-queries", + envvar="DBT_EXPORT_SAVED_QUERIES", + help="Export saved queries within the 'build' command, otherwise no-op", + is_flag=True, + hidden=True, +) + fail_fast = click.option( "--fail-fast/--no-fail-fast", "-x/ ", @@ -111,6 +151,14 @@ is_flag=True, ) +host = click.option( + "--host", + envvar="DBT_HOST", + help="host to serve dbt docs on", + type=click.STRING, + default="127.0.0.1", +) + indirect_selection = click.option( "--indirect-selection", envvar="DBT_INDIRECT_SELECTION", @@ -119,6 +167,13 @@ default="eager", ) +lock = click.option( + "--lock", + envvar=None, + help="Generate the package-lock.yml file without install the packages.", + is_flag=True, +) + log_cache_events = click.option( "--log-cache-events/--no-log-cache-events", help="Enable verbose logging for relational cache events to help when debugging.", @@ -257,6 +312,14 @@ type=click.Path(exists=True, dir_okay=False, resolve_path=True), ) +partial_parse_file_diff = click.option( + "--partial-parse-file-diff/--no-partial-parse-file-diff", + envvar="DBT_PARTIAL_PARSE_FILE_DIFF", + help="Internal flag for whether to compute a file diff during partial parsing.", + hidden=True, + default=True, +) + populate_cache = click.option( "--populate-cache/--no-populate-cache", envvar="DBT_POPULATE_CACHE", @@ -298,8 +361,8 @@ profile = click.option( "--profile", - envvar=None, - help="Which profile to load. Overrides setting in dbt_project.yml.", + envvar="DBT_PROFILE", + help="Which existing profile to load. Overrides setting in dbt_project.yml.", ) profiles_dir = click.option( @@ -347,15 +410,18 @@ resource_type = click.option( "--resource-types", "--resource-type", - envvar=None, + envvar="DBT_RESOURCE_TYPES", help="Restricts the types of resources that dbt will include", type=ChoiceTuple( [ "metric", + "semantic_model", + "saved_query", "source", "analysis", "model", "test", + "unit_test", "exposure", "snapshot", "seed", @@ -369,6 +435,42 @@ default=(), ) +exclude_resource_type = click.option( + "--exclude-resource-types", + "--exclude-resource-type", + envvar="DBT_EXCLUDE_RESOURCE_TYPES", + help="Specify the types of resources that dbt will exclude", + type=ChoiceTuple( + [ + "metric", + "semantic_model", + "saved_query", + "source", + "analysis", + "model", + "test", + "unit_test", + "exposure", + "snapshot", + "seed", + "default", + ], + case_sensitive=False, + ), + cls=MultiOption, + multiple=True, + default=(), +) + +# Renamed to --export-saved-queries +deprecated_include_saved_query = click.option( + "--include-saved-query/--no-include-saved-query", + envvar="DBT_INCLUDE_SAVED_QUERY", + help="Include saved queries in the list of resources to be selected for build command", + is_flag=True, + hidden=True, +) + model_decls = ("-m", "--models", "--model") select_decls = ("-s", "--select") select_attrs = { @@ -406,6 +508,13 @@ default=True, ) +clean_project_files_only = click.option( + "--clean-project-files-only / --no-clean-project-files-only", + envvar="DBT_CLEAN_PROJECT_FILES_ONLY", + help="If disabled, dbt clean will delete all paths specified in clean-paths, even if they're outside the dbt project.", + default=True, +) + show = click.option( "--show", envvar=None, @@ -441,6 +550,21 @@ is_flag=True, ) +source = click.option( + "--source", + envvar=None, + help="Source to download page from, must be one of hub, git, or local. Defaults to hub.", + type=click.Choice(["hub", "git", "local"], case_sensitive=True), + default="hub", +) + +static = click.option( + "--static", + help="Generate an additional static_index.html with manifest and catalog built-in.", + default=False, + is_flag=True, +) + state = click.option( "--state", envvar="DBT_STATE", @@ -498,7 +622,7 @@ target = click.option( "--target", "-t", - envvar=None, + envvar="DBT_TARGET", help="Which target to load for the given profile", ) @@ -509,6 +633,13 @@ type=click.Path(), ) +upgrade = click.option( + "--upgrade", + envvar=None, + help="Upgrade packages to the latest version.", + is_flag=True, +) + debug_connection = click.option( "--connection", envvar=None, @@ -590,3 +721,10 @@ def _version_callback(ctx, _param, value): help="Whether or not to write the manifest.json and run_results.json files to the target directory", default=True, ) + +show_resource_report = click.option( + "--show-resource-report/--no-show-resource-report", + default=False, + envvar="DBT_SHOW_RESOURCE_REPORT", + hidden=True, +) diff --git a/core/dbt/cli/requires.py b/core/dbt/cli/requires.py index e8d9e69cc80..0c0b1900827 100644 --- a/core/dbt/cli/requires.py +++ b/core/dbt/cli/requires.py @@ -1,34 +1,54 @@ +import importlib.util +import os +import time +import traceback +from functools import update_wrapper +from typing import Optional + +from click import Context + import dbt.tracking -from dbt.version import installed as installed_version -from dbt.adapters.factory import adapter_management, register_adapter -from dbt.flags import set_flags, get_flag_dict -from dbt.cli.exceptions import ( - ExceptionExit, - ResultExit, -) +from dbt.adapters.factory import adapter_management, get_adapter, register_adapter +from dbt.cli.exceptions import ExceptionExit, ResultExit from dbt.cli.flags import Flags from dbt.config import RuntimeConfig -from dbt.config.runtime import load_project, load_profile, UnsetProfile -from dbt.events.functions import fire_event, LOG_VERSION, set_invocation_id, setup_event_logger +from dbt.config.runtime import UnsetProfile, load_profile, load_project +from dbt.context.providers import generate_runtime_macro_context +from dbt.context.query_header import generate_query_header_context +from dbt.events.logging import setup_event_logger from dbt.events.types import ( CommandCompleted, - MainReportVersion, + MainEncounteredError, MainReportArgs, + MainReportVersion, + MainStackTrace, MainTrackingUserState, + ResourceReport, ) -from dbt.events.helpers import get_json_string_utcnow -from dbt.events.types import MainEncounteredError, MainStackTrace -from dbt.exceptions import Exception as DbtException, DbtProjectError, FailFastError -from dbt.parser.manifest import ManifestLoader, write_manifest +from dbt.exceptions import DbtProjectError, FailFastError +from dbt.flags import get_flag_dict, set_flags +from dbt.mp_context import get_mp_context +from dbt.parser.manifest import parse_manifest +from dbt.plugins import set_up_plugin_manager from dbt.profiler import profiler from dbt.tracking import active_user, initialize_from_flags, track_run -from dbt.utils import cast_dict_to_dict_of_strings -from dbt.plugins import set_up_plugin_manager, get_plugin_manager - -from click import Context -from functools import update_wrapper -import time -import traceback +from dbt.utils import try_get_max_rss_kb +from dbt.version import installed as installed_version +from dbt_common.clients.system import get_env +from dbt_common.context import get_invocation_context, set_invocation_context +from dbt_common.events.base_types import EventLevel +from dbt_common.events.functions import LOG_VERSION, fire_event +from dbt_common.events.helpers import get_json_string_utcnow +from dbt_common.exceptions import DbtBaseException as DbtException +from dbt_common.invocation import reset_invocation_id +from dbt_common.record import ( + Recorder, + RecorderMode, + get_record_mode_from_env, + get_record_types_from_dict, + get_record_types_from_env, +) +from dbt_common.utils import cast_dict_to_dict_of_strings def preflight(func): @@ -37,14 +57,25 @@ def wrapper(*args, **kwargs): assert isinstance(ctx, Context) ctx.obj = ctx.obj or {} + set_invocation_context({}) + + # Record/Replay + setup_record_replay() + + # Must be set after record/replay is set up so that the env can be + # recorded or replayed if needed. + get_invocation_context()._env = get_env() + # Flags flags = Flags(ctx) ctx.obj["flags"] = flags set_flags(flags) + # Reset invocation_id for each 'invocation' of a dbt command (can happen multiple times in a single process) + reset_invocation_id() + # Logging callbacks = ctx.obj.get("callbacks", []) - set_invocation_id() setup_event_logger(flags=flags, callbacks=callbacks) # Tracking @@ -74,6 +105,41 @@ def wrapper(*args, **kwargs): return update_wrapper(wrapper, func) +def setup_record_replay(): + rec_mode = get_record_mode_from_env() + rec_types = get_record_types_from_env() + + recorder: Optional[Recorder] = None + if rec_mode == RecorderMode.REPLAY: + previous_recording_path = os.environ.get("DBT_RECORDER_FILE_PATH") + recorder = Recorder( + RecorderMode.REPLAY, types=rec_types, previous_recording_path=previous_recording_path + ) + elif rec_mode == RecorderMode.DIFF: + previous_recording_path = os.environ.get("DBT_RECORDER_FILE_PATH") + # ensure types match the previous recording + types = get_record_types_from_dict(previous_recording_path) + recorder = Recorder( + RecorderMode.DIFF, types=types, previous_recording_path=previous_recording_path + ) + elif rec_mode == RecorderMode.RECORD: + recorder = Recorder(RecorderMode.RECORD, types=rec_types) + + get_invocation_context().recorder = recorder + + +def tear_down_record_replay(): + recorder = get_invocation_context().recorder + if recorder is not None: + if recorder.mode == RecorderMode.RECORD: + recorder.write() + if recorder.mode == RecorderMode.DIFF: + recorder.write() + recorder.write_diffs(diff_file_name="recording_diffs.json") + elif recorder.mode == RecorderMode.REPLAY: + recorder.write_diffs("replay_diffs.json") + + def postflight(func): """The decorator that handles all exception handling for the click commands. This decorator must be used before any other decorators that may throw an exception.""" @@ -96,6 +162,30 @@ def wrapper(*args, **kwargs): fire_event(MainStackTrace(stack_trace=traceback.format_exc())) raise ExceptionExit(e) finally: + # Fire ResourceReport, but only on systems which support the resource + # module. (Skip it on Windows). + if importlib.util.find_spec("resource") is not None: + import resource + + rusage = resource.getrusage(resource.RUSAGE_SELF) + fire_event( + ResourceReport( + command_name=ctx.command.name, + command_success=success, + command_wall_clock_time=time.perf_counter() - start_func, + process_user_time=rusage.ru_utime, + process_kernel_time=rusage.ru_stime, + process_mem_max_rss=try_get_max_rss_kb() or rusage.ru_maxrss, + process_in_blocks=rusage.ru_inblock, + process_out_blocks=rusage.ru_oublock, + ), + ( + EventLevel.INFO + if "flags" in ctx.obj and ctx.obj["flags"].SHOW_RESOURCE_REPORT + else None + ), + ) + fire_event( CommandCompleted( command=ctx.command_path, @@ -105,6 +195,8 @@ def wrapper(*args, **kwargs): ) ) + tear_down_record_replay() + if not success: raise ResultExit(result) @@ -239,23 +331,21 @@ def wrapper(*args, **kwargs): raise DbtProjectError("profile, project, and runtime_config required for manifest") runtime_config = ctx.obj["runtime_config"] - register_adapter(runtime_config) - # a manifest has already been set on the context, so don't overwrite it + # if a manifest has already been set on the context, don't overwrite it if ctx.obj.get("manifest") is None: - manifest = ManifestLoader.get_full_manifest( - runtime_config, - write_perf_info=write_perf_info, + ctx.obj["manifest"] = parse_manifest( + runtime_config, write_perf_info, write, ctx.obj["flags"].write_json ) - - ctx.obj["manifest"] = manifest - if write and ctx.obj["flags"].write_json: - write_manifest(manifest, runtime_config.project_target_path) - pm = get_plugin_manager(runtime_config.project_name) - plugin_artifacts = pm.get_manifest_artifacts(manifest) - for path, plugin_artifact in plugin_artifacts.items(): - plugin_artifact.write(path) - + else: + register_adapter(runtime_config, get_mp_context()) + adapter = get_adapter(runtime_config) + adapter.set_macro_context_generator(generate_runtime_macro_context) + adapter.set_macro_resolver(ctx.obj["manifest"]) + query_header_context = generate_query_header_context( + adapter.config, ctx.obj["manifest"] + ) + adapter.connections.set_query_header(query_header_context) return func(*args, **kwargs) return update_wrapper(wrapper, func) diff --git a/core/dbt/cli/resolvers.py b/core/dbt/cli/resolvers.py index 48ba92c365a..6d495501c39 100644 --- a/core/dbt/cli/resolvers.py +++ b/core/dbt/cli/resolvers.py @@ -1,4 +1,5 @@ from pathlib import Path + from dbt.config.project import PartialProject from dbt.exceptions import DbtProjectError diff --git a/core/dbt/cli/types.py b/core/dbt/cli/types.py index 14028a69451..1da078df902 100644 --- a/core/dbt/cli/types.py +++ b/core/dbt/cli/types.py @@ -1,7 +1,7 @@ from enum import Enum from typing import List -from dbt.exceptions import DbtInternalError +from dbt_common.exceptions import DbtInternalError class Command(Enum): diff --git a/core/dbt/clients/_jinja_blocks.py b/core/dbt/clients/_jinja_blocks.py deleted file mode 100644 index 1ada0a6234d..00000000000 --- a/core/dbt/clients/_jinja_blocks.py +++ /dev/null @@ -1,360 +0,0 @@ -import re -from collections import namedtuple - -from dbt.exceptions import ( - BlockDefinitionNotAtTopError, - DbtInternalError, - MissingCloseTagError, - MissingControlFlowStartTagError, - NestedTagsError, - UnexpectedControlFlowEndTagError, - UnexpectedMacroEOFError, -) - - -def regex(pat): - return re.compile(pat, re.DOTALL | re.MULTILINE) - - -class BlockData: - """raw plaintext data from the top level of the file.""" - - def __init__(self, contents): - self.block_type_name = "__dbt__data" - self.contents = contents - self.full_block = contents - - -class BlockTag: - def __init__(self, block_type_name, block_name, contents=None, full_block=None, **kw): - self.block_type_name = block_type_name - self.block_name = block_name - self.contents = contents - self.full_block = full_block - - def __str__(self): - return "BlockTag({!r}, {!r})".format(self.block_type_name, self.block_name) - - def __repr__(self): - return str(self) - - @property - def end_block_type_name(self): - return "end{}".format(self.block_type_name) - - def end_pat(self): - # we don't want to use string formatting here because jinja uses most - # of the string formatting operators in its syntax... - pattern = "".join( - ( - r"(?P<endblock>((?:\s*\{\%\-|\{\%)\s*", - self.end_block_type_name, - r"\s*(?:\-\%\}\s*|\%\})))", - ) - ) - return regex(pattern) - - -Tag = namedtuple("Tag", "block_type_name block_name start end") - - -_NAME_PATTERN = r"[A-Za-z_][A-Za-z_0-9]*" - -COMMENT_START_PATTERN = regex(r"(?:(?P<comment_start>(\s*\{\#)))") -COMMENT_END_PATTERN = regex(r"(.*?)(\s*\#\})") -RAW_START_PATTERN = regex(r"(?:\s*\{\%\-|\{\%)\s*(?P<raw_start>(raw))\s*(?:\-\%\}\s*|\%\})") -EXPR_START_PATTERN = regex(r"(?P<expr_start>(\{\{\s*))") -EXPR_END_PATTERN = regex(r"(?P<expr_end>(\s*\}\}))") - -BLOCK_START_PATTERN = regex( - "".join( - ( - r"(?:\s*\{\%\-|\{\%)\s*", - r"(?P<block_type_name>({}))".format(_NAME_PATTERN), - # some blocks have a 'block name'. - r"(?:\s+(?P<block_name>({})))?".format(_NAME_PATTERN), - ) - ) -) - - -RAW_BLOCK_PATTERN = regex( - "".join( - ( - r"(?:\s*\{\%\-|\{\%)\s*raw\s*(?:\-\%\}\s*|\%\})", - r"(?:.*?)", - r"(?:\s*\{\%\-|\{\%)\s*endraw\s*(?:\-\%\}\s*|\%\})", - ) - ) -) - -TAG_CLOSE_PATTERN = regex(r"(?:(?P<tag_close>(\-\%\}\s*|\%\})))") - -# stolen from jinja's lexer. Note that we've consumed all prefix whitespace by -# the time we want to use this. -STRING_PATTERN = regex(r"(?P<string>('([^'\\]*(?:\\.[^'\\]*)*)'|" r'"([^"\\]*(?:\\.[^"\\]*)*)"))') - -QUOTE_START_PATTERN = regex(r"""(?P<quote>(['"]))""") - - -class TagIterator: - def __init__(self, data): - self.data = data - self.blocks = [] - self._parenthesis_stack = [] - self.pos = 0 - - def linepos(self, end=None) -> str: - """Given an absolute position in the input data, return a pair of - line number + relative position to the start of the line. - """ - end_val: int = self.pos if end is None else end - data = self.data[:end_val] - # if not found, rfind returns -1, and -1+1=0, which is perfect! - last_line_start = data.rfind("\n") + 1 - # it's easy to forget this, but line numbers are 1-indexed - line_number = data.count("\n") + 1 - return f"{line_number}:{end_val - last_line_start}" - - def advance(self, new_position): - self.pos = new_position - - def rewind(self, amount=1): - self.pos -= amount - - def _search(self, pattern): - return pattern.search(self.data, self.pos) - - def _match(self, pattern): - return pattern.match(self.data, self.pos) - - def _first_match(self, *patterns, **kwargs): - matches = [] - for pattern in patterns: - # default to 'search', but sometimes we want to 'match'. - if kwargs.get("method", "search") == "search": - match = self._search(pattern) - else: - match = self._match(pattern) - if match: - matches.append(match) - if not matches: - return None - # if there are multiple matches, pick the least greedy match - # TODO: do I need to account for m.start(), or is this ok? - return min(matches, key=lambda m: m.end()) - - def _expect_match(self, expected_name, *patterns, **kwargs): - match = self._first_match(*patterns, **kwargs) - if match is None: - raise UnexpectedMacroEOFError(expected_name, self.data[self.pos :]) - return match - - def handle_expr(self, match): - """Handle an expression. At this point we're at a string like: - {{ 1 + 2 }} - ^ right here - - And the match contains "{{ " - - We expect to find a `}}`, but we might find one in a string before - that. Imagine the case of `{{ 2 * "}}" }}`... - - You're not allowed to have blocks or comments inside an expr so it is - pretty straightforward, I hope: only strings can get in the way. - """ - self.advance(match.end()) - while True: - match = self._expect_match("}}", EXPR_END_PATTERN, QUOTE_START_PATTERN) - if match.groupdict().get("expr_end") is not None: - break - else: - # it's a quote. we haven't advanced for this match yet, so - # just slurp up the whole string, no need to rewind. - match = self._expect_match("string", STRING_PATTERN) - self.advance(match.end()) - - self.advance(match.end()) - - def handle_comment(self, match): - self.advance(match.end()) - match = self._expect_match("#}", COMMENT_END_PATTERN) - self.advance(match.end()) - - def _expect_block_close(self): - """Search for the tag close marker. - To the right of the type name, there are a few possiblities: - - a name (handled by the regex's 'block_name') - - any number of: `=`, `(`, `)`, strings, etc (arguments) - - nothing - - followed eventually by a %} - - So the only characters we actually have to worry about in this context - are quote and `%}` - nothing else can hide the %} and be valid jinja. - """ - while True: - end_match = self._expect_match( - 'tag close ("%}")', QUOTE_START_PATTERN, TAG_CLOSE_PATTERN - ) - self.advance(end_match.end()) - if end_match.groupdict().get("tag_close") is not None: - return - # must be a string. Rewind to its start and advance past it. - self.rewind() - string_match = self._expect_match("string", STRING_PATTERN) - self.advance(string_match.end()) - - def handle_raw(self): - # raw blocks are super special, they are a single complete regex - match = self._expect_match("{% raw %}...{% endraw %}", RAW_BLOCK_PATTERN) - self.advance(match.end()) - return match.end() - - def handle_tag(self, match): - """The tag could be one of a few things: - - {% mytag %} - {% mytag x = y %} - {% mytag x = "y" %} - {% mytag x.y() %} - {% mytag foo("a", "b", c="d") %} - - But the key here is that it's always going to be `{% mytag`! - """ - groups = match.groupdict() - # always a value - block_type_name = groups["block_type_name"] - # might be None - block_name = groups.get("block_name") - start_pos = self.pos - if block_type_name == "raw": - match = self._expect_match("{% raw %}...{% endraw %}", RAW_BLOCK_PATTERN) - self.advance(match.end()) - else: - self.advance(match.end()) - self._expect_block_close() - return Tag( - block_type_name=block_type_name, block_name=block_name, start=start_pos, end=self.pos - ) - - def find_tags(self): - while True: - match = self._first_match( - BLOCK_START_PATTERN, COMMENT_START_PATTERN, EXPR_START_PATTERN - ) - if match is None: - break - - self.advance(match.start()) - # start = self.pos - - groups = match.groupdict() - comment_start = groups.get("comment_start") - expr_start = groups.get("expr_start") - block_type_name = groups.get("block_type_name") - - if comment_start is not None: - self.handle_comment(match) - elif expr_start is not None: - self.handle_expr(match) - elif block_type_name is not None: - yield self.handle_tag(match) - else: - raise DbtInternalError( - "Invalid regex match in next_block, expected block start, " - "expr start, or comment start" - ) - - def __iter__(self): - return self.find_tags() - - -_CONTROL_FLOW_TAGS = { - "if": "endif", - "for": "endfor", -} - -_CONTROL_FLOW_END_TAGS = {v: k for k, v in _CONTROL_FLOW_TAGS.items()} - - -class BlockIterator: - def __init__(self, data): - self.tag_parser = TagIterator(data) - self.current = None - self.stack = [] - self.last_position = 0 - - @property - def current_end(self): - if self.current is None: - return 0 - else: - return self.current.end - - @property - def data(self): - return self.tag_parser.data - - def is_current_end(self, tag): - return ( - tag.block_type_name.startswith("end") - and self.current is not None - and tag.block_type_name[3:] == self.current.block_type_name - ) - - def find_blocks(self, allowed_blocks=None, collect_raw_data=True): - """Find all top-level blocks in the data.""" - if allowed_blocks is None: - allowed_blocks = {"snapshot", "macro", "materialization", "docs"} - - for tag in self.tag_parser.find_tags(): - if tag.block_type_name in _CONTROL_FLOW_TAGS: - self.stack.append(tag.block_type_name) - elif tag.block_type_name in _CONTROL_FLOW_END_TAGS: - found = None - if self.stack: - found = self.stack.pop() - else: - expected = _CONTROL_FLOW_END_TAGS[tag.block_type_name] - raise UnexpectedControlFlowEndTagError(tag, expected, self.tag_parser) - expected = _CONTROL_FLOW_TAGS[found] - if expected != tag.block_type_name: - raise MissingControlFlowStartTagError(tag, expected, self.tag_parser) - - if tag.block_type_name in allowed_blocks: - if self.stack: - raise BlockDefinitionNotAtTopError(self.tag_parser, tag.start) - if self.current is not None: - raise NestedTagsError(outer=self.current, inner=tag) - if collect_raw_data: - raw_data = self.data[self.last_position : tag.start] - self.last_position = tag.start - if raw_data: - yield BlockData(raw_data) - self.current = tag - - elif self.is_current_end(tag): - self.last_position = tag.end - assert self.current is not None - yield BlockTag( - block_type_name=self.current.block_type_name, - block_name=self.current.block_name, - contents=self.data[self.current.end : tag.start], - full_block=self.data[self.current.start : tag.end], - ) - self.current = None - - if self.current: - linecount = self.data[: self.current.end].count("\n") + 1 - raise MissingCloseTagError(self.current.block_type_name, linecount) - - if collect_raw_data: - raw_data = self.data[self.last_position :] - if raw_data: - yield BlockData(raw_data) - - def lex_for_blocks(self, allowed_blocks=None, collect_raw_data=True): - return list( - self.find_blocks(allowed_blocks=allowed_blocks, collect_raw_data=collect_raw_data) - ) diff --git a/core/dbt/clients/agate_helper.py b/core/dbt/clients/agate_helper.py deleted file mode 100644 index 1d69a2bd17f..00000000000 --- a/core/dbt/clients/agate_helper.py +++ /dev/null @@ -1,220 +0,0 @@ -from codecs import BOM_UTF8 - -import agate -import datetime -import isodate -import json -import dbt.utils -from typing import Iterable, List, Dict, Union, Optional, Any - -from dbt.exceptions import DbtRuntimeError - - -BOM = BOM_UTF8.decode("utf-8") # '\ufeff' - - -class Number(agate.data_types.Number): - # undo the change in https://github.com/wireservice/agate/pull/733 - # i.e. do not cast True and False to numeric 1 and 0 - def cast(self, d): - if type(d) == bool: - raise agate.exceptions.CastError("Do not cast True to 1 or False to 0.") - else: - return super().cast(d) - - -class ISODateTime(agate.data_types.DateTime): - def cast(self, d): - # this is agate.data_types.DateTime.cast with the "clever" bits removed - # so we only handle ISO8601 stuff - if isinstance(d, datetime.datetime) or d is None: - return d - elif isinstance(d, datetime.date): - return datetime.datetime.combine(d, datetime.time(0, 0, 0)) - elif isinstance(d, str): - d = d.strip() - if d.lower() in self.null_values: - return None - try: - return isodate.parse_datetime(d) - except: # noqa - pass - - raise agate.exceptions.CastError('Can not parse value "%s" as datetime.' % d) - - -def build_type_tester( - text_columns: Iterable[str], string_null_values: Optional[Iterable[str]] = ("null", "") -) -> agate.TypeTester: - - types = [ - Number(null_values=("null", "")), - agate.data_types.Date(null_values=("null", ""), date_format="%Y-%m-%d"), - agate.data_types.DateTime(null_values=("null", ""), datetime_format="%Y-%m-%d %H:%M:%S"), - ISODateTime(null_values=("null", "")), - agate.data_types.Boolean( - true_values=("true",), false_values=("false",), null_values=("null", "") - ), - agate.data_types.Text(null_values=string_null_values), - ] - force = {k: agate.data_types.Text(null_values=string_null_values) for k in text_columns} - return agate.TypeTester(force=force, types=types) - - -DEFAULT_TYPE_TESTER = build_type_tester(()) - - -def table_from_rows( - rows: List[Any], - column_names: Iterable[str], - text_only_columns: Optional[Iterable[str]] = None, -) -> agate.Table: - if text_only_columns is None: - column_types = DEFAULT_TYPE_TESTER - else: - # If text_only_columns are present, prevent coercing empty string or - # literal 'null' strings to a None representation. - column_types = build_type_tester(text_only_columns, string_null_values=()) - - return agate.Table(rows, column_names, column_types=column_types) - - -def table_from_data(data, column_names: Iterable[str]) -> agate.Table: - "Convert a list of dictionaries into an Agate table" - - # The agate table is generated from a list of dicts, so the column order - # from `data` is not preserved. We can use `select` to reorder the columns - # - # If there is no data, create an empty table with the specified columns - - if len(data) == 0: - return agate.Table([], column_names=column_names) - else: - table = agate.Table.from_object(data, column_types=DEFAULT_TYPE_TESTER) - return table.select(column_names) - - -def table_from_data_flat(data, column_names: Iterable[str]) -> agate.Table: - """ - Convert a list of dictionaries into an Agate table. This method does not - coerce string values into more specific types (eg. '005' will not be - coerced to '5'). Additionally, this method does not coerce values to - None (eg. '' or 'null' will retain their string literal representations). - """ - - rows = [] - text_only_columns = set() - for _row in data: - row = [] - for col_name in column_names: - value = _row[col_name] - if isinstance(value, (dict, list, tuple)): - # Represent container types as json strings - value = json.dumps(value, cls=dbt.utils.JSONEncoder) - text_only_columns.add(col_name) - elif isinstance(value, str): - text_only_columns.add(col_name) - row.append(value) - - rows.append(row) - - return table_from_rows( - rows=rows, column_names=column_names, text_only_columns=text_only_columns - ) - - -def empty_table(): - "Returns an empty Agate table. To be used in place of None" - - return agate.Table(rows=[]) - - -def as_matrix(table): - "Return an agate table as a matrix of data sans columns" - - return [r.values() for r in table.rows.values()] - - -def from_csv(abspath, text_columns): - type_tester = build_type_tester(text_columns=text_columns) - with open(abspath, encoding="utf-8") as fp: - if fp.read(1) != BOM: - fp.seek(0) - return agate.Table.from_csv(fp, column_types=type_tester) - - -class _NullMarker: - pass - - -NullableAgateType = Union[agate.data_types.DataType, _NullMarker] - - -class ColumnTypeBuilder(Dict[str, NullableAgateType]): - def __init__(self): - super().__init__() - - def __setitem__(self, key, value): - if key not in self: - super().__setitem__(key, value) - return - - existing_type = self[key] - if isinstance(existing_type, _NullMarker): - # overwrite - super().__setitem__(key, value) - elif isinstance(value, _NullMarker): - # use the existing value - return - elif not isinstance(value, type(existing_type)): - # actual type mismatch! - raise DbtRuntimeError( - f"Tables contain columns with the same names ({key}), " - f"but different types ({value} vs {existing_type})" - ) - - def finalize(self) -> Dict[str, agate.data_types.DataType]: - result: Dict[str, agate.data_types.DataType] = {} - for key, value in self.items(): - if isinstance(value, _NullMarker): - # this is what agate would do. - result[key] = agate.data_types.Number() - else: - result[key] = value - return result - - -def _merged_column_types(tables: List[agate.Table]) -> Dict[str, agate.data_types.DataType]: - # this is a lot like agate.Table.merge, but with handling for all-null - # rows being "any type". - new_columns: ColumnTypeBuilder = ColumnTypeBuilder() - for table in tables: - for i in range(len(table.columns)): - column_name: str = table.column_names[i] - column_type: NullableAgateType = table.column_types[i] - # avoid over-sensitive type inference - if all(x is None for x in table.columns[column_name]): - column_type = _NullMarker() - new_columns[column_name] = column_type - - return new_columns.finalize() - - -def merge_tables(tables: List[agate.Table]) -> agate.Table: - """This is similar to agate.Table.merge, but it handles rows of all 'null' - values more gracefully during merges. - """ - new_columns = _merged_column_types(tables) - column_names = tuple(new_columns.keys()) - column_types = tuple(new_columns.values()) - - rows: List[agate.Row] = [] - for table in tables: - if table.column_names == column_names and table.column_types == column_types: - rows.extend(table.rows) - else: - for row in table.rows: - data = [row.get(name, None) for name in column_names] - rows.append(agate.Row(data, column_names)) - # _is_fork to tell agate that we already made things into `Row`s. - return agate.Table(rows, column_names, column_types, _is_fork=True) diff --git a/core/dbt/clients/git.py b/core/dbt/clients/git.py index d6cb3f3870c..33beb93a6ba 100644 --- a/core/dbt/clients/git.py +++ b/core/dbt/clients/git.py @@ -1,25 +1,26 @@ -import re import os.path +import re + +from packaging import version -from dbt.clients.system import run_cmd, rmdir -from dbt.events.functions import fire_event from dbt.events.types import ( - GitSparseCheckoutSubdirectory, + GitNothingToDo, + GitProgressCheckedOutAt, GitProgressCheckoutRevision, - GitProgressUpdatingExistingDependency, GitProgressPullingNewDependency, - GitNothingToDo, GitProgressUpdatedCheckoutRange, - GitProgressCheckedOutAt, + GitProgressUpdatingExistingDependency, + GitSparseCheckoutSubdirectory, ) from dbt.exceptions import ( CommandResultError, + DbtRuntimeError, GitCheckoutError, GitCloningError, UnknownGitCloningProblemError, - DbtRuntimeError, ) -from packaging import version +from dbt_common.clients.system import rmdir, run_cmd +from dbt_common.events.functions import fire_event def _is_commit(revision: str) -> bool: @@ -111,7 +112,7 @@ def checkout(cwd, repo, revision=None): def get_current_sha(cwd): out, err = run_cmd(cwd, ["git", "rev-parse", "HEAD"], env={"LC_ALL": "C"}) - return out.decode("utf-8") + return out.decode("utf-8").strip() def remove_remote(cwd): diff --git a/core/dbt/clients/jinja.py b/core/dbt/clients/jinja.py index ca7814792b1..4aa6f013916 100644 --- a/core/dbt/clients/jinja.py +++ b/core/dbt/clients/jinja.py @@ -1,13 +1,7 @@ -import codecs -import linecache -import os import re -import tempfile import threading -from ast import literal_eval from contextlib import contextmanager -from itertools import chain, islice -from typing import List, Union, Set, Optional, Dict, Any, Iterator, Type, NoReturn, Tuple, Callable +from typing import Any, Dict, List, NoReturn, Optional, Tuple, Union import jinja2 import jinja2.ext @@ -16,249 +10,24 @@ import jinja2.parser import jinja2.sandbox -from dbt.utils import ( - get_dbt_macro_name, - get_docs_macro_name, - get_materialization_macro_name, - get_test_macro_name, - deep_map_render, -) - -from dbt.clients._jinja_blocks import BlockIterator, BlockData, BlockTag from dbt.contracts.graph.nodes import GenericTestNode - from dbt.exceptions import ( - CaughtMacroError, - CaughtMacroErrorWithNodeError, - CompilationError, DbtInternalError, - MaterializationArgError, - JinjaRenderingError, - MacroReturn, MaterializtionMacroNotUsedError, NoSupportedLanguagesFoundError, - UndefinedCompilationError, - UndefinedMacroError, ) -from dbt.flags import get_flags from dbt.node_types import ModelLanguage - +from dbt_common.clients.jinja import ( + CallableMacroGenerator, + MacroProtocol, + get_template, + render_template, +) +from dbt_common.utils import deep_map_render SUPPORTED_LANG_ARG = jinja2.nodes.Name("supported_languages", "param") -def _linecache_inject(source, write): - if write: - # this is the only reliable way to accomplish this. Obviously, it's - # really darn noisy and will fill your temporary directory - tmp_file = tempfile.NamedTemporaryFile( - prefix="dbt-macro-compiled-", - suffix=".py", - delete=False, - mode="w+", - encoding="utf-8", - ) - tmp_file.write(source) - filename = tmp_file.name - else: - # `codecs.encode` actually takes a `bytes` as the first argument if - # the second argument is 'hex' - mypy does not know this. - rnd = codecs.encode(os.urandom(12), "hex") # type: ignore - filename = rnd.decode("ascii") - - # put ourselves in the cache - cache_entry = (len(source), None, [line + "\n" for line in source.splitlines()], filename) - # linecache does in fact have an attribute `cache`, thanks - linecache.cache[filename] = cache_entry # type: ignore - return filename - - -class MacroFuzzParser(jinja2.parser.Parser): - def parse_macro(self): - node = jinja2.nodes.Macro(lineno=next(self.stream).lineno) - - # modified to fuzz macros defined in the same file. this way - # dbt can understand the stack of macros being called. - # - @cmcarthur - node.name = get_dbt_macro_name(self.parse_assign_target(name_only=True).name) - - self.parse_signature(node) - node.body = self.parse_statements(("name:endmacro",), drop_needle=True) - return node - - -class MacroFuzzEnvironment(jinja2.sandbox.SandboxedEnvironment): - def _parse(self, source, name, filename): - return MacroFuzzParser(self, source, name, filename).parse() - - def _compile(self, source, filename): - """Override jinja's compilation to stash the rendered source inside - the python linecache for debugging when the appropriate environment - variable is set. - - If the value is 'write', also write the files to disk. - WARNING: This can write a ton of data if you aren't careful. - """ - macro_debugging = get_flags().MACRO_DEBUGGING - if filename == "<template>" and macro_debugging: - write = macro_debugging == "write" - filename = _linecache_inject(source, write) - - return super()._compile(source, filename) # type: ignore - - -class NativeSandboxEnvironment(MacroFuzzEnvironment): - code_generator_class = jinja2.nativetypes.NativeCodeGenerator - - -class TextMarker(str): - """A special native-env marker that indicates a value is text and is - not to be evaluated. Use this to prevent your numbery-strings from becoming - numbers! - """ - - -class NativeMarker(str): - """A special native-env marker that indicates the field should be passed to - literal_eval. - """ - - -class BoolMarker(NativeMarker): - pass - - -class NumberMarker(NativeMarker): - pass - - -def _is_number(value) -> bool: - return isinstance(value, (int, float)) and not isinstance(value, bool) - - -def quoted_native_concat(nodes): - """This is almost native_concat from the NativeTemplate, except in the - special case of a single argument that is a quoted string and returns a - string, the quotes are re-inserted. - """ - head = list(islice(nodes, 2)) - - if not head: - return "" - - if len(head) == 1: - raw = head[0] - if isinstance(raw, TextMarker): - return str(raw) - elif not isinstance(raw, NativeMarker): - # return non-strings as-is - return raw - else: - # multiple nodes become a string. - return "".join([str(v) for v in chain(head, nodes)]) - - try: - result = literal_eval(raw) - except (ValueError, SyntaxError, MemoryError): - result = raw - if isinstance(raw, BoolMarker) and not isinstance(result, bool): - raise JinjaRenderingError(f"Could not convert value '{raw!s}' into type 'bool'") - if isinstance(raw, NumberMarker) and not _is_number(result): - raise JinjaRenderingError(f"Could not convert value '{raw!s}' into type 'number'") - - return result - - -class NativeSandboxTemplate(jinja2.nativetypes.NativeTemplate): # mypy: ignore - environment_class = NativeSandboxEnvironment # type: ignore - - def render(self, *args, **kwargs): - """Render the template to produce a native Python type. If the - result is a single node, its value is returned. Otherwise, the - nodes are concatenated as strings. If the result can be parsed - with :func:`ast.literal_eval`, the parsed value is returned. - Otherwise, the string is returned. - """ - vars = dict(*args, **kwargs) - - try: - return quoted_native_concat(self.root_render_func(self.new_context(vars))) - except Exception: - return self.environment.handle_exception() - - -NativeSandboxEnvironment.template_class = NativeSandboxTemplate # type: ignore - - -class TemplateCache: - def __init__(self): - self.file_cache: Dict[str, jinja2.Template] = {} - - def get_node_template(self, node) -> jinja2.Template: - key = node.macro_sql - - if key in self.file_cache: - return self.file_cache[key] - - template = get_template( - string=node.macro_sql, - ctx={}, - node=node, - ) - - self.file_cache[key] = template - return template - - def clear(self): - self.file_cache.clear() - - -template_cache = TemplateCache() - - -class BaseMacroGenerator: - def __init__(self, context: Optional[Dict[str, Any]] = None) -> None: - self.context: Optional[Dict[str, Any]] = context - - def get_template(self): - raise NotImplementedError("get_template not implemented!") - - def get_name(self) -> str: - raise NotImplementedError("get_name not implemented!") - - def get_macro(self): - name = self.get_name() - template = self.get_template() - # make the module. previously we set both vars and local, but that's - # redundant: They both end up in the same place - # make_module is in jinja2.environment. It returns a TemplateModule - module = template.make_module(vars=self.context, shared=False) - macro = module.__dict__[get_dbt_macro_name(name)] - module.__dict__.update(self.context) - return macro - - @contextmanager - def exception_handler(self) -> Iterator[None]: - try: - yield - except (TypeError, jinja2.exceptions.TemplateRuntimeError) as e: - raise CaughtMacroError(e) - - def call_macro(self, *args, **kwargs): - # called from __call__ methods - if self.context is None: - raise DbtInternalError("Context is still None in call_macro!") - assert self.context is not None - - macro = self.get_macro() - - with self.exception_handler(): - try: - return macro(*args, **kwargs) - except MacroReturn as e: - return e.value - - class MacroStack(threading.local): def __init__(self): super().__init__() @@ -277,35 +46,18 @@ def pop(self, name): raise DbtInternalError(f"popped {got}, expected {name}") -class MacroGenerator(BaseMacroGenerator): +class MacroGenerator(CallableMacroGenerator): def __init__( self, - macro, + macro: MacroProtocol, context: Optional[Dict[str, Any]] = None, node: Optional[Any] = None, stack: Optional[MacroStack] = None, ) -> None: - super().__init__(context) - self.macro = macro + super().__init__(macro, context) self.node = node self.stack = stack - def get_template(self): - return template_cache.get_node_template(self.macro) - - def get_name(self) -> str: - return self.macro.name - - @contextmanager - def exception_handler(self) -> Iterator[None]: - try: - yield - except (TypeError, jinja2.exceptions.TemplateRuntimeError) as e: - raise CaughtMacroErrorWithNodeError(exc=e, node=self.macro) - except CompilationError as e: - e.stack.append(self.macro) - raise e - # This adds the macro's unique id to the node's 'depends_on' @contextmanager def track_call(self): @@ -330,232 +82,24 @@ def __call__(self, *args, **kwargs): return self.call_macro(*args, **kwargs) -class QueryStringGenerator(BaseMacroGenerator): - def __init__(self, template_str: str, context: Dict[str, Any]) -> None: - super().__init__(context) - self.template_str: str = template_str - env = get_environment() - self.template = env.from_string( - self.template_str, - globals=self.context, +class UnitTestMacroGenerator(MacroGenerator): + # this makes UnitTestMacroGenerator objects callable like functions + def __init__( + self, + macro_generator: MacroGenerator, + call_return_value: Any, + ) -> None: + super().__init__( + macro_generator.macro, + macro_generator.context, + macro_generator.node, + macro_generator.stack, ) + self.call_return_value = call_return_value - def get_name(self) -> str: - return "query_comment_macro" - - def get_template(self): - """Don't use the template cache, we don't have a node""" - return self.template - - def __call__(self, connection_name: str, node) -> str: - return str(self.call_macro(connection_name, node)) - - -class MaterializationExtension(jinja2.ext.Extension): - tags = ["materialization"] - - def parse(self, parser): - node = jinja2.nodes.Macro(lineno=next(parser.stream).lineno) - materialization_name = parser.parse_assign_target(name_only=True).name - - adapter_name = "default" - node.args = [] - node.defaults = [] - - while parser.stream.skip_if("comma"): - target = parser.parse_assign_target(name_only=True) - - if target.name == "default": - pass - - elif target.name == "adapter": - parser.stream.expect("assign") - value = parser.parse_expression() - adapter_name = value.value - - elif target.name == "supported_languages": - target.set_ctx("param") - node.args.append(target) - parser.stream.expect("assign") - languages = parser.parse_expression() - node.defaults.append(languages) - - else: - raise MaterializationArgError(materialization_name, target.name) - - if SUPPORTED_LANG_ARG not in node.args: - node.args.append(SUPPORTED_LANG_ARG) - node.defaults.append(jinja2.nodes.List([jinja2.nodes.Const("sql")])) - - node.name = get_materialization_macro_name(materialization_name, adapter_name) - - node.body = parser.parse_statements(("name:endmaterialization",), drop_needle=True) - - return node - - -class DocumentationExtension(jinja2.ext.Extension): - tags = ["docs"] - - def parse(self, parser): - node = jinja2.nodes.Macro(lineno=next(parser.stream).lineno) - docs_name = parser.parse_assign_target(name_only=True).name - - node.args = [] - node.defaults = [] - node.name = get_docs_macro_name(docs_name) - node.body = parser.parse_statements(("name:enddocs",), drop_needle=True) - return node - - -class TestExtension(jinja2.ext.Extension): - tags = ["test"] - - def parse(self, parser): - node = jinja2.nodes.Macro(lineno=next(parser.stream).lineno) - test_name = parser.parse_assign_target(name_only=True).name - - parser.parse_signature(node) - node.name = get_test_macro_name(test_name) - node.body = parser.parse_statements(("name:endtest",), drop_needle=True) - return node - - -def _is_dunder_name(name): - return name.startswith("__") and name.endswith("__") - - -def create_undefined(node=None): - class Undefined(jinja2.Undefined): - def __init__(self, hint=None, obj=None, name=None, exc=None): - super().__init__(hint=hint, name=name) - self.node = node - self.name = name - self.hint = hint - # jinja uses these for safety, so we have to override them. - # see https://github.com/pallets/jinja/blob/master/jinja2/sandbox.py#L332-L339 # noqa - self.unsafe_callable = False - self.alters_data = False - - def __getitem__(self, name): - # Propagate the undefined value if a caller accesses this as if it - # were a dictionary - return self - - def __getattr__(self, name): - if name == "name" or _is_dunder_name(name): - raise AttributeError( - "'{}' object has no attribute '{}'".format(type(self).__name__, name) - ) - - self.name = name - - return self.__class__(hint=self.hint, name=self.name) - - def __call__(self, *args, **kwargs): - return self - - def __reduce__(self): - raise UndefinedCompilationError(name=self.name, node=node) - - return Undefined - - -NATIVE_FILTERS: Dict[str, Callable[[Any], Any]] = { - "as_text": TextMarker, - "as_bool": BoolMarker, - "as_native": NativeMarker, - "as_number": NumberMarker, -} - - -TEXT_FILTERS: Dict[str, Callable[[Any], Any]] = { - "as_text": lambda x: x, - "as_bool": lambda x: x, - "as_native": lambda x: x, - "as_number": lambda x: x, -} - - -def get_environment( - node=None, - capture_macros: bool = False, - native: bool = False, -) -> jinja2.Environment: - args: Dict[str, List[Union[str, Type[jinja2.ext.Extension]]]] = { - "extensions": ["jinja2.ext.do", "jinja2.ext.loopcontrols"] - } - - if capture_macros: - args["undefined"] = create_undefined(node) - - args["extensions"].append(MaterializationExtension) - args["extensions"].append(DocumentationExtension) - args["extensions"].append(TestExtension) - - env_cls: Type[jinja2.Environment] - text_filter: Type - if native: - env_cls = NativeSandboxEnvironment - filters = NATIVE_FILTERS - else: - env_cls = MacroFuzzEnvironment - filters = TEXT_FILTERS - - env = env_cls(**args) - env.filters.update(filters) - - return env - - -@contextmanager -def catch_jinja(node=None) -> Iterator[None]: - try: - yield - except jinja2.exceptions.TemplateSyntaxError as e: - e.translated = False - raise CompilationError(str(e), node) from e - except jinja2.exceptions.UndefinedError as e: - raise UndefinedMacroError(str(e), node) from e - except CompilationError as exc: - exc.add_node(node) - raise - - -def parse(string): - with catch_jinja(): - return get_environment().parse(str(string)) - - -def get_template( - string: str, - ctx: Dict[str, Any], - node=None, - capture_macros: bool = False, - native: bool = False, -): - with catch_jinja(node): - env = get_environment(node, capture_macros, native=native) - - template_source = str(string) - return env.from_string(template_source, globals=ctx) - - -def render_template(template, ctx: Dict[str, Any], node=None) -> str: - with catch_jinja(node): - return template.render(ctx) - - -def _requote_result(raw_value: str, rendered: str) -> str: - double_quoted = raw_value.startswith('"') and raw_value.endswith('"') - single_quoted = raw_value.startswith("'") and raw_value.endswith("'") - if double_quoted: - quote_char = '"' - elif single_quoted: - quote_char = "'" - else: - quote_char = "" - return f"{quote_char}{rendered}{quote_char}" + def __call__(self, *args, **kwargs): + with self.track_call(): + return self.call_return_value # performance note: Local benmcharking (so take it with a big grain of salt!) @@ -609,30 +153,6 @@ def undefined_error(msg) -> NoReturn: raise jinja2.exceptions.UndefinedError(msg) -def extract_toplevel_blocks( - data: str, - allowed_blocks: Optional[Set[str]] = None, - collect_raw_data: bool = True, -) -> List[Union[BlockData, BlockTag]]: - """Extract the top-level blocks with matching block types from a jinja - file, with some special handling for block nesting. - - :param data: The data to extract blocks from. - :param allowed_blocks: The names of the blocks to extract from the file. - They may not be nested within if/for blocks. If None, use the default - values. - :param collect_raw_data: If set, raw data between matched blocks will also - be part of the results, as `BlockData` objects. They have a - `block_type_name` field of `'__dbt_data'` and will never have a - `block_name`. - :return: A list of `BlockTag`s matching the allowed block types and (if - `collect_raw_data` is `True`) `BlockData` objects. - """ - return BlockIterator(data).lex_for_blocks( - allowed_blocks=allowed_blocks, collect_raw_data=collect_raw_data - ) - - GENERIC_TEST_KWARGS_NAME = "_dbt_generic_test_kwargs" diff --git a/core/dbt/clients/jinja_static.py b/core/dbt/clients/jinja_static.py index 8184c43622e..d8746a7607d 100644 --- a/core/dbt/clients/jinja_static.py +++ b/core/dbt/clients/jinja_static.py @@ -1,35 +1,40 @@ +from typing import Any, Dict, List, Optional, Union + import jinja2 -from dbt.clients.jinja import get_environment -from dbt.exceptions import MacroNamespaceNotStringError, MacroNameNotStringError + +from dbt.artifacts.resources import RefArgs +from dbt.exceptions import MacroNamespaceNotStringError, ParsingError +from dbt_common.clients.jinja import get_environment +from dbt_common.exceptions.macros import MacroNameNotStringError +from dbt_common.tests import test_caching_enabled +from dbt_extractor import ExtractionError, py_extract_from_source # type: ignore + +_TESTING_MACRO_CACHE: Optional[Dict[str, Any]] = {} def statically_extract_macro_calls(string, ctx, db_wrapper=None): # set 'capture_macros' to capture undefined env = get_environment(None, capture_macros=True) - parsed = env.parse(string) + + global _TESTING_MACRO_CACHE + if test_caching_enabled() and string in _TESTING_MACRO_CACHE: + parsed = _TESTING_MACRO_CACHE.get(string, None) + func_calls = getattr(parsed, "_dbt_cached_calls") + else: + parsed = env.parse(string) + func_calls = tuple(parsed.find_all(jinja2.nodes.Call)) + + if test_caching_enabled(): + _TESTING_MACRO_CACHE[string] = parsed + setattr(parsed, "_dbt_cached_calls", func_calls) standard_calls = ["source", "ref", "config"] possible_macro_calls = [] - for func_call in parsed.find_all(jinja2.nodes.Call): + for func_call in func_calls: func_name = None if hasattr(func_call, "node") and hasattr(func_call.node, "name"): func_name = func_call.node.name else: - # func_call for dbt.current_timestamp macro - # Call( - # node=Getattr( - # node=Name( - # name='dbt_utils', - # ctx='load' - # ), - # attr='current_timestamp', - # ctx='load - # ), - # args=[], - # kwargs=[], - # dyn_args=None, - # dyn_kwargs=None - # ) if ( hasattr(func_call, "node") and hasattr(func_call.node, "node") @@ -150,3 +155,39 @@ def statically_parse_adapter_dispatch(func_call, ctx, db_wrapper): possible_macro_calls.append(f"{package_name}.{func_name}") return possible_macro_calls + + +def statically_parse_ref_or_source(expression: str) -> Union[RefArgs, List[str]]: + """ + Returns a RefArgs or List[str] object, corresponding to ref or source respectively, given an input jinja expression. + + input: str representing how input node is referenced in tested model sql + * examples: + - "ref('my_model_a')" + - "ref('my_model_a', version=3)" + - "ref('package', 'my_model_a', version=3)" + - "source('my_source_schema', 'my_source_name')" + + If input is not a well-formed jinja ref or source expression, a ParsingError is raised. + """ + ref_or_source: Union[RefArgs, List[str]] + + try: + statically_parsed = py_extract_from_source(f"{{{{ {expression} }}}}") + except ExtractionError: + raise ParsingError(f"Invalid jinja expression: {expression}") + + if statically_parsed.get("refs"): + raw_ref = list(statically_parsed["refs"])[0] + ref_or_source = RefArgs( + package=raw_ref.get("package"), + name=raw_ref.get("name"), + version=raw_ref.get("version"), + ) + elif statically_parsed.get("sources"): + source_name, source_table_name = list(statically_parsed["sources"])[0] + ref_or_source = [source_name, source_table_name] + else: + raise ParsingError(f"Invalid ref or source expression: {expression}") + + return ref_or_source diff --git a/core/dbt/clients/registry.py b/core/dbt/clients/registry.py index 92b32a7257c..ed634e9f811 100644 --- a/core/dbt/clients/registry.py +++ b/core/dbt/clients/registry.py @@ -1,21 +1,24 @@ import functools +import os from typing import Any, Dict, List + import requests -from dbt.events.functions import fire_event + +from dbt import deprecations from dbt.events.types import ( - RegistryProgressGETRequest, - RegistryProgressGETResponse, RegistryIndexProgressGETRequest, RegistryIndexProgressGETResponse, - RegistryResponseUnexpectedType, - RegistryResponseMissingTopKeys, - RegistryResponseMissingNestedKeys, + RegistryProgressGETRequest, + RegistryProgressGETResponse, RegistryResponseExtraNestedKeys, + RegistryResponseMissingNestedKeys, + RegistryResponseMissingTopKeys, + RegistryResponseUnexpectedType, ) -from dbt.utils import memoized, _connection_exception_retry as connection_exception_retry -from dbt import deprecations -from dbt import semver -import os +from dbt.utils import memoized +from dbt_common import semver +from dbt_common.events.functions import fire_event +from dbt_common.utils.connection import connection_exception_retry if os.getenv("DBT_PACKAGE_HUB_URL"): DEFAULT_REGISTRY_BASE_URL = os.getenv("DBT_PACKAGE_HUB_URL") @@ -104,7 +107,6 @@ def package(package_name, registry_base_url=None) -> Dict[str, Any]: # redirectname redirects based on package name # Both can be present at the same time, or neither. Fails gracefully to old name if ("redirectnamespace" in response) or ("redirectname" in response): - if ("redirectnamespace" in response) and response["redirectnamespace"] is not None: use_namespace = response["redirectnamespace"] else: @@ -160,7 +162,6 @@ def get_compatible_versions(package_name, dbt_version, should_version_check) -> def _get_index(registry_base_url=None): - url = _get_url("index", registry_base_url) fire_event(RegistryIndexProgressGETRequest(url=url)) # all exceptions from requests get caught in the retry logic so no need to wrap this here diff --git a/core/dbt/clients/system.py b/core/dbt/clients/system.py deleted file mode 100644 index 66c59354b4f..00000000000 --- a/core/dbt/clients/system.py +++ /dev/null @@ -1,570 +0,0 @@ -import errno -import fnmatch -import functools -import json -import os -import os.path -import re -import shutil -import stat -import subprocess -import sys -import tarfile -from pathlib import Path -from typing import Any, Callable, Dict, List, NoReturn, Optional, Tuple, Type, Union - -import dbt.exceptions -import requests -from dbt.events.functions import fire_event -from dbt.events.types import ( - SystemCouldNotWrite, - SystemExecutingCmd, - SystemStdOut, - SystemStdErr, - SystemReportReturnCode, -) -from dbt.exceptions import DbtInternalError -from dbt.utils import _connection_exception_retry as connection_exception_retry -from pathspec import PathSpec # type: ignore - -if sys.platform == "win32": - from ctypes import WinDLL, c_bool -else: - WinDLL = None - c_bool = None - - -def find_matching( - root_path: str, - relative_paths_to_search: List[str], - file_pattern: str, - ignore_spec: Optional[PathSpec] = None, -) -> List[Dict[str, Any]]: - """ - Given an absolute `root_path`, a list of relative paths to that - absolute root path (`relative_paths_to_search`), and a `file_pattern` - like '*.sql', returns information about the files. For example: - - > find_matching('/root/path', ['models'], '*.sql') - - [ { 'absolute_path': '/root/path/models/model_one.sql', - 'relative_path': 'model_one.sql', - 'searched_path': 'models' }, - { 'absolute_path': '/root/path/models/subdirectory/model_two.sql', - 'relative_path': 'subdirectory/model_two.sql', - 'searched_path': 'models' } ] - """ - matching = [] - root_path = os.path.normpath(root_path) - regex = fnmatch.translate(file_pattern) - reobj = re.compile(regex, re.IGNORECASE) - - for relative_path_to_search in relative_paths_to_search: - # potential speedup for ignore_spec - # if ignore_spec.matches(relative_path_to_search): - # continue - absolute_path_to_search = os.path.join(root_path, relative_path_to_search) - walk_results = os.walk(absolute_path_to_search) - - for current_path, subdirectories, local_files in walk_results: - # potential speedup for ignore_spec - # relative_dir = os.path.relpath(current_path, root_path) + os.sep - # if ignore_spec.match(relative_dir): - # continue - for local_file in local_files: - absolute_path = os.path.join(current_path, local_file) - relative_path = os.path.relpath(absolute_path, absolute_path_to_search) - relative_path_to_root = os.path.join(relative_path_to_search, relative_path) - - modification_time = os.path.getmtime(absolute_path) - if reobj.match(local_file) and ( - not ignore_spec or not ignore_spec.match_file(relative_path_to_root) - ): - matching.append( - { - "searched_path": relative_path_to_search, - "absolute_path": absolute_path, - "relative_path": relative_path, - "modification_time": modification_time, - } - ) - - return matching - - -def load_file_contents(path: str, strip: bool = True) -> str: - path = convert_path(path) - with open(path, "rb") as handle: - to_return = handle.read().decode("utf-8") - - if strip: - to_return = to_return.strip() - - return to_return - - -@functools.singledispatch -def make_directory(path=None) -> None: - """ - Make a directory and any intermediate directories that don't already - exist. This function handles the case where two threads try to create - a directory at once. - """ - raise DbtInternalError(f"Can not create directory from {type(path)} ") - - -@make_directory.register -def _(path: str) -> None: - path = convert_path(path) - if not os.path.exists(path): - # concurrent writes that try to create the same dir can fail - try: - os.makedirs(path) - - except OSError as e: - if e.errno == errno.EEXIST: - pass - else: - raise e - - -@make_directory.register -def _(path: Path) -> None: - path.mkdir(parents=True, exist_ok=True) - - -def make_file(path: str, contents: str = "", overwrite: bool = False) -> bool: - """ - Make a file at `path` assuming that the directory it resides in already - exists. The file is saved with contents `contents` - """ - if overwrite or not os.path.exists(path): - path = convert_path(path) - with open(path, "w") as fh: - fh.write(contents) - return True - - return False - - -def make_symlink(source: str, link_path: str) -> None: - """ - Create a symlink at `link_path` referring to `source`. - """ - if not supports_symlinks(): - # TODO: why not import these at top? - raise dbt.exceptions.SymbolicLinkError() - - os.symlink(source, link_path) - - -def supports_symlinks() -> bool: - return getattr(os, "symlink", None) is not None - - -def write_file(path: str, contents: str = "") -> bool: - path = convert_path(path) - try: - make_directory(os.path.dirname(path)) - with open(path, "w", encoding="utf-8") as f: - f.write(str(contents)) - except Exception as exc: - # note that you can't just catch FileNotFound, because sometimes - # windows apparently raises something else. - # It's also not sufficient to look at the path length, because - # sometimes windows fails to write paths that are less than the length - # limit. So on windows, suppress all errors that happen from writing - # to disk. - if os.name == "nt": - # sometimes we get a winerror of 3 which means the path was - # definitely too long, but other times we don't and it means the - # path was just probably too long. This is probably based on the - # windows/python version. - if getattr(exc, "winerror", 0) == 3: - reason = "Path was too long" - else: - reason = "Path was possibly too long" - # all our hard work and the path was still too long. Log and - # continue. - fire_event(SystemCouldNotWrite(path=path, reason=reason, exc=str(exc))) - else: - raise - return True - - -def read_json(path: str) -> Dict[str, Any]: - return json.loads(load_file_contents(path)) - - -def write_json(path: str, data: Dict[str, Any]) -> bool: - return write_file(path, json.dumps(data, cls=dbt.utils.JSONEncoder)) - - -def _windows_rmdir_readonly(func: Callable[[str], Any], path: str, exc: Tuple[Any, OSError, Any]): - exception_val = exc[1] - if exception_val.errno == errno.EACCES: - os.chmod(path, stat.S_IWUSR) - func(path) - else: - raise - - -def resolve_path_from_base(path_to_resolve: str, base_path: str) -> str: - """ - If path_to_resolve is a relative path, create an absolute path - with base_path as the base. - - If path_to_resolve is an absolute path or a user path (~), just - resolve it to an absolute path and return. - """ - return os.path.abspath(os.path.join(base_path, os.path.expanduser(path_to_resolve))) - - -def rmdir(path: str) -> None: - """ - Recursively deletes a directory. Includes an error handler to retry with - different permissions on Windows. Otherwise, removing directories (eg. - cloned via git) can cause rmtree to throw a PermissionError exception - """ - path = convert_path(path) - if sys.platform == "win32": - onerror = _windows_rmdir_readonly - else: - onerror = None - - shutil.rmtree(path, onerror=onerror) - - -def _win_prepare_path(path: str) -> str: - """Given a windows path, prepare it for use by making sure it is absolute - and normalized. - """ - path = os.path.normpath(path) - - # if a path starts with '\', splitdrive() on it will return '' for the - # drive, but the prefix requires a drive letter. So let's add the drive - # letter back in. - # Unless it starts with '\\'. In that case, the path is a UNC mount point - # and splitdrive will be fine. - if not path.startswith("\\\\") and path.startswith("\\"): - curdrive = os.path.splitdrive(os.getcwd())[0] - path = curdrive + path - - # now our path is either an absolute UNC path or relative to the current - # directory. If it's relative, we need to make it absolute or the prefix - # won't work. `ntpath.abspath` allegedly doesn't always play nice with long - # paths, so do this instead. - if not os.path.splitdrive(path)[0]: - path = os.path.join(os.getcwd(), path) - - return path - - -def _supports_long_paths() -> bool: - if sys.platform != "win32": - return True - # Eryk Sun says to use `WinDLL('ntdll')` instead of `windll.ntdll` because - # of pointer caching in a comment here: - # https://stackoverflow.com/a/35097999/11262881 - # I don't know exaclty what he means, but I am inclined to believe him as - # he's pretty active on Python windows bugs! - else: - try: - dll = WinDLL("ntdll") - except OSError: # I don't think this happens? you need ntdll to run python - return False - # not all windows versions have it at all - if not hasattr(dll, "RtlAreLongPathsEnabled"): - return False - # tell windows we want to get back a single unsigned byte (a bool). - dll.RtlAreLongPathsEnabled.restype = c_bool - return dll.RtlAreLongPathsEnabled() - - -def convert_path(path: str) -> str: - """Convert a path that dbt has, which might be >260 characters long, to one - that will be writable/readable on Windows. - - On other platforms, this is a no-op. - """ - # some parts of python seem to append '\*.*' to strings, better safe than - # sorry. - if len(path) < 250: - return path - if _supports_long_paths(): - return path - - prefix = "\\\\?\\" - # Nothing to do - if path.startswith(prefix): - return path - - path = _win_prepare_path(path) - - # add the prefix. The check is just in case os.getcwd() does something - # unexpected - I believe this if-state should always be True though! - if not path.startswith(prefix): - path = prefix + path - return path - - -def remove_file(path: str) -> None: - path = convert_path(path) - os.remove(path) - - -def path_exists(path: str) -> bool: - path = convert_path(path) - return os.path.lexists(path) - - -def path_is_symlink(path: str) -> bool: - path = convert_path(path) - return os.path.islink(path) - - -def open_dir_cmd() -> str: - # https://docs.python.org/2/library/sys.html#sys.platform - if sys.platform == "win32": - return "start" - - elif sys.platform == "darwin": - return "open" - - else: - return "xdg-open" - - -def _handle_posix_cwd_error(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn: - if exc.errno == errno.ENOENT: - message = "Directory does not exist" - elif exc.errno == errno.EACCES: - message = "Current user cannot access directory, check permissions" - elif exc.errno == errno.ENOTDIR: - message = "Not a directory" - else: - message = "Unknown OSError: {} - cwd".format(str(exc)) - raise dbt.exceptions.WorkingDirectoryError(cwd, cmd, message) - - -def _handle_posix_cmd_error(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn: - if exc.errno == errno.ENOENT: - message = "Could not find command, ensure it is in the user's PATH" - elif exc.errno == errno.EACCES: - message = "User does not have permissions for this command" - else: - message = "Unknown OSError: {} - cmd".format(str(exc)) - raise dbt.exceptions.ExecutableError(cwd, cmd, message) - - -def _handle_posix_error(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn: - """OSError handling for POSIX systems. - - Some things that could happen to trigger an OSError: - - cwd could not exist - - exc.errno == ENOENT - - exc.filename == cwd - - cwd could have permissions that prevent the current user moving to it - - exc.errno == EACCES - - exc.filename == cwd - - cwd could exist but not be a directory - - exc.errno == ENOTDIR - - exc.filename == cwd - - cmd[0] could not exist - - exc.errno == ENOENT - - exc.filename == None(?) - - cmd[0] could exist but have permissions that prevents the current - user from executing it (executable bit not set for the user) - - exc.errno == EACCES - - exc.filename == None(?) - """ - if getattr(exc, "filename", None) == cwd: - _handle_posix_cwd_error(exc, cwd, cmd) - else: - _handle_posix_cmd_error(exc, cwd, cmd) - - -def _handle_windows_error(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn: - cls: Type[dbt.exceptions.Exception] = dbt.exceptions.CommandError - if exc.errno == errno.ENOENT: - message = ( - "Could not find command, ensure it is in the user's PATH " - "and that the user has permissions to run it" - ) - cls = dbt.exceptions.ExecutableError - elif exc.errno == errno.ENOEXEC: - message = "Command was not executable, ensure it is valid" - cls = dbt.exceptions.ExecutableError - elif exc.errno == errno.ENOTDIR: - message = ( - "Unable to cd: path does not exist, user does not have" - " permissions, or not a directory" - ) - cls = dbt.exceptions.WorkingDirectoryError - else: - message = 'Unknown error: {} (errno={}: "{}")'.format( - str(exc), exc.errno, errno.errorcode.get(exc.errno, "<Unknown!>") - ) - raise cls(cwd, cmd, message) - - -def _interpret_oserror(exc: OSError, cwd: str, cmd: List[str]) -> NoReturn: - """Interpret an OSError exception and raise the appropriate dbt exception.""" - if len(cmd) == 0: - raise dbt.exceptions.CommandError(cwd, cmd) - - # all of these functions raise unconditionally - if os.name == "nt": - _handle_windows_error(exc, cwd, cmd) - else: - _handle_posix_error(exc, cwd, cmd) - - # this should not be reachable, raise _something_ at least! - raise dbt.exceptions.DbtInternalError( - "Unhandled exception in _interpret_oserror: {}".format(exc) - ) - - -def run_cmd(cwd: str, cmd: List[str], env: Optional[Dict[str, Any]] = None) -> Tuple[bytes, bytes]: - fire_event(SystemExecutingCmd(cmd=cmd)) - if len(cmd) == 0: - raise dbt.exceptions.CommandError(cwd, cmd) - - # the env argument replaces the environment entirely, which has exciting - # consequences on Windows! Do an update instead. - full_env = env - if env is not None: - full_env = os.environ.copy() - full_env.update(env) - - try: - exe_pth = shutil.which(cmd[0]) - if exe_pth: - cmd = [os.path.abspath(exe_pth)] + list(cmd[1:]) - proc = subprocess.Popen( - cmd, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=full_env - ) - - out, err = proc.communicate() - except OSError as exc: - _interpret_oserror(exc, cwd, cmd) - - fire_event(SystemStdOut(bmsg=str(out))) - fire_event(SystemStdErr(bmsg=str(err))) - - if proc.returncode != 0: - fire_event(SystemReportReturnCode(returncode=proc.returncode)) - raise dbt.exceptions.CommandResultError(cwd, cmd, proc.returncode, out, err) - - return out, err - - -def download_with_retries( - url: str, path: str, timeout: Optional[Union[float, tuple]] = None -) -> None: - download_fn = functools.partial(download, url, path, timeout) - connection_exception_retry(download_fn, 5) - - -def download( - url: str, - path: str, - timeout: Optional[Union[float, Tuple[float, float], Tuple[float, None]]] = None, -) -> None: - path = convert_path(path) - connection_timeout = timeout or float(os.getenv("DBT_HTTP_TIMEOUT", 10)) - response = requests.get(url, timeout=connection_timeout) - with open(path, "wb") as handle: - for block in response.iter_content(1024 * 64): - handle.write(block) - - -def rename(from_path: str, to_path: str, force: bool = False) -> None: - from_path = convert_path(from_path) - to_path = convert_path(to_path) - is_symlink = path_is_symlink(to_path) - - if os.path.exists(to_path) and force: - if is_symlink: - remove_file(to_path) - else: - rmdir(to_path) - - shutil.move(from_path, to_path) - - -def untar_package(tar_path: str, dest_dir: str, rename_to: Optional[str] = None) -> None: - tar_path = convert_path(tar_path) - tar_dir_name = None - with tarfile.open(tar_path, "r:gz") as tarball: - tarball.extractall(dest_dir) - tar_dir_name = os.path.commonprefix(tarball.getnames()) - if rename_to: - downloaded_path = os.path.join(dest_dir, tar_dir_name) - desired_path = os.path.join(dest_dir, rename_to) - dbt.clients.system.rename(downloaded_path, desired_path, force=True) - - -def chmod_and_retry(func, path, exc_info): - """Define an error handler to pass to shutil.rmtree. - On Windows, when a file is marked read-only as git likes to do, rmtree will - fail. To handle that, on errors try to make the file writable. - We want to retry most operations here, but listdir is one that we know will - be useless. - """ - if func is os.listdir or os.name != "nt": - raise - os.chmod(path, stat.S_IREAD | stat.S_IWRITE) - # on error,this will raise. - func(path) - - -def _absnorm(path): - return os.path.normcase(os.path.abspath(path)) - - -def move(src, dst): - """A re-implementation of shutil.move that properly removes the source - directory on windows when it has read-only files in it and the move is - between two drives. - - This is almost identical to the real shutil.move, except it, uses our rmtree - and skips handling non-windows OSes since the existing one works ok there. - """ - src = convert_path(src) - dst = convert_path(dst) - if os.name != "nt": - return shutil.move(src, dst) - - if os.path.isdir(dst): - if _absnorm(src) == _absnorm(dst): - os.rename(src, dst) - return - - dst = os.path.join(dst, os.path.basename(src.rstrip("/\\"))) - if os.path.exists(dst): - raise EnvironmentError("Path '{}' already exists".format(dst)) - - try: - os.rename(src, dst) - except OSError: - # probably different drives - if os.path.isdir(src): - if _absnorm(dst + "\\").startswith(_absnorm(src + "\\")): - # dst is inside src - raise EnvironmentError( - "Cannot move a directory '{}' into itself '{}'".format(src, dst) - ) - shutil.copytree(src, dst, symlinks=True) - rmtree(src) - else: - shutil.copy2(src, dst) - os.unlink(src) - - -def rmtree(path): - """Recursively remove the path. On permissions errors on windows, try to remove - the read-only flag and try again. - """ - path = convert_path(path) - return shutil.rmtree(path, onerror=chmod_and_retry) diff --git a/core/dbt/clients/yaml_helper.py b/core/dbt/clients/yaml_helper.py index d5a29b0309f..a0a51099331 100644 --- a/core/dbt/clients/yaml_helper.py +++ b/core/dbt/clients/yaml_helper.py @@ -1,12 +1,17 @@ -import dbt.exceptions from typing import Any, Dict, Optional + import yaml +import dbt_common.exceptions +import dbt_common.exceptions.base + # the C version is faster, but it doesn't always exist try: - from yaml import CLoader as Loader, CSafeLoader as SafeLoader, CDumper as Dumper + from yaml import CDumper as Dumper + from yaml import CLoader as Loader + from yaml import CSafeLoader as SafeLoader except ImportError: - from yaml import Loader, SafeLoader, Dumper # type: ignore # noqa: F401 + from yaml import Dumper, Loader, SafeLoader # type: ignore # noqa: F401 YAML_ERROR_MESSAGE = """ @@ -60,4 +65,4 @@ def load_yaml_text(contents, path=None): else: error = str(e) - raise dbt.exceptions.DbtValidationError(error) + raise dbt_common.exceptions.base.DbtValidationError(error) diff --git a/core/dbt/compilation.py b/core/dbt/compilation.py index 959ad2516d1..72a5b6f016b 100644 --- a/core/dbt/compilation.py +++ b/core/dbt/compilation.py @@ -1,72 +1,63 @@ -import argparse import json - -import networkx as nx # type: ignore import os import pickle - from collections import defaultdict -from typing import List, Dict, Any, Tuple, Optional +from typing import Any, Dict, List, Optional, Tuple -from dbt.flags import get_flags +import networkx as nx # type: ignore +import sqlparse + +import dbt.tracking from dbt.adapters.factory import get_adapter from dbt.clients import jinja -from dbt.clients.system import make_directory -from dbt.context.providers import generate_runtime_model_context +from dbt.context.providers import ( + generate_runtime_model_context, + generate_runtime_unit_test_context, +) from dbt.contracts.graph.manifest import Manifest, UniqueID from dbt.contracts.graph.nodes import ( - ManifestNode, - ManifestSQLNode, GenericTestNode, GraphMemberNode, InjectedCTE, + ManifestNode, + ManifestSQLNode, + ModelNode, SeedNode, + UnitTestDefinition, + UnitTestNode, ) +from dbt.events.types import FoundStats, WritingInjectedSQLForNode from dbt.exceptions import ( - GraphDependencyNotFoundError, DbtInternalError, DbtRuntimeError, + ForeignKeyConstraintToSyntaxError, + GraphDependencyNotFoundError, + ParsingError, ) +from dbt.flags import get_flags from dbt.graph import Graph -from dbt.events.functions import fire_event, get_invocation_id -from dbt.events.types import FoundStats, Note, WritingInjectedSQLForNode -from dbt.events.contextvars import get_node_info -from dbt.node_types import NodeType, ModelLanguage -from dbt.events.format import pluralize -import dbt.tracking -import dbt.task.list as list_task -import sqlparse +from dbt.node_types import ModelLanguage, NodeType +from dbt_common.clients.system import make_directory +from dbt_common.contracts.constraints import ConstraintType +from dbt_common.events.contextvars import get_node_info +from dbt_common.events.format import pluralize +from dbt_common.events.functions import fire_event +from dbt_common.events.types import Note +from dbt_common.invocation import get_invocation_id graph_file_name = "graph.gpickle" -def print_compile_stats(stats): - names = { - NodeType.Model: "model", - NodeType.Test: "test", - NodeType.Snapshot: "snapshot", - NodeType.Analysis: "analysis", - NodeType.Macro: "macro", - NodeType.Operation: "operation", - NodeType.Seed: "seed", - NodeType.Source: "source", - NodeType.Exposure: "exposure", - NodeType.SemanticModel: "semantic model", - NodeType.Metric: "metric", - NodeType.Group: "group", - } - - results = {k: 0 for k in names.keys()} - results.update(stats) - +def print_compile_stats(stats: Dict[NodeType, int]): # create tracking event for resource_counts if dbt.tracking.active_user is not None: - resource_counts = {k.pluralize(): v for k, v in results.items()} + resource_counts = {k.pluralize(): v for k, v in stats.items()} dbt.tracking.track_resource_counts(resource_counts) # do not include resource types that are not actually defined in the project - stat_line = ", ".join([pluralize(ct, names.get(t)) for t, ct in stats.items() if t in names]) - + stat_line = ", ".join( + [pluralize(ct, t).replace("_", " ") for t, ct in stats.items() if ct != 0] + ) fire_event(FoundStats(stat_line=stat_line)) @@ -78,7 +69,7 @@ def _node_enabled(node: ManifestNode): return True -def _generate_stats(manifest: Manifest): +def _generate_stats(manifest: Manifest) -> Dict[NodeType, int]: stats: Dict[NodeType, int] = defaultdict(int) for node in manifest.nodes.values(): if _node_enabled(node): @@ -91,6 +82,8 @@ def _generate_stats(manifest: Manifest): stats[NodeType.Macro] += len(manifest.macros) stats[NodeType.Group] += len(manifest.groups) stats[NodeType.SemanticModel] += len(manifest.semantic_models) + stats[NodeType.SavedQuery] += len(manifest.saved_queries) + stats[NodeType.Unit] += len(manifest.unit_tests) # TODO: should we be counting dimensions + entities? @@ -125,10 +118,10 @@ def _get_tests_for_node(manifest: Manifest, unique_id: UniqueID) -> List[UniqueI class Linker: - def __init__(self, data=None): + def __init__(self, data=None) -> None: if data is None: data = {} - self.graph = nx.DiGraph(**data) + self.graph: nx.DiGraph = nx.DiGraph(**data) def edges(self): return self.graph.edges() @@ -183,14 +176,18 @@ def link_node(self, node: GraphMemberNode, manifest: Manifest): def link_graph(self, manifest: Manifest): for source in manifest.sources.values(): self.add_node(source.unique_id) - for semantic_model in manifest.semantic_models.values(): - self.add_node(semantic_model.unique_id) for node in manifest.nodes.values(): self.link_node(node, manifest) + for semantic_model in manifest.semantic_models.values(): + self.link_node(semantic_model, manifest) for exposure in manifest.exposures.values(): self.link_node(exposure, manifest) for metric in manifest.metrics.values(): self.link_node(metric, manifest) + for unit_test in manifest.unit_tests.values(): + self.link_node(unit_test, manifest) + for saved_query in manifest.saved_queries.values(): + self.link_node(saved_query, manifest) cycle = self.find_cycles() @@ -232,6 +229,7 @@ def add_test_edges(self, manifest: Manifest) -> None: # Get all tests that depend on any upstream nodes. upstream_tests = [] for upstream_node in upstream_nodes: + # This gets tests with unique_ids starting with "test." upstream_tests += _get_tests_for_node(manifest, upstream_node) for upstream_test in upstream_tests: @@ -274,12 +272,11 @@ def get_graph_summary(self, manifest: Manifest) -> Dict[int, Dict[str, Any]]: class Compiler: - def __init__(self, config): + def __init__(self, config) -> None: self.config = config def initialize(self): make_directory(self.config.project_target_path) - make_directory(self.config.packages_install_path) # creates a ModelContext which is converted to # a dict for jinja rendering of SQL @@ -289,8 +286,10 @@ def _create_node_context( manifest: Manifest, extra_context: Dict[str, Any], ) -> Dict[str, Any]: - - context = generate_runtime_model_context(node, self.config, manifest) + if isinstance(node, UnitTestNode): + context = generate_runtime_unit_test_context(node, self.config, manifest) + else: + context = generate_runtime_model_context(node, self.config, manifest) context.update(extra_context) if isinstance(node, GenericTestNode): @@ -320,6 +319,10 @@ def _recursively_prepend_ctes( if model.compiled_code is None: raise DbtRuntimeError("Cannot inject ctes into an uncompiled node", model) + # tech debt: safe flag/arg access (#6259) + if not getattr(self.config.args, "inject_ephemeral_ctes", True): + return (model, []) + # extra_ctes_injected flag says that we've already recursively injected the ctes if model.extra_ctes_injected: return (model, model.extra_ctes) @@ -372,7 +375,7 @@ def _recursively_prepend_ctes( _extend_prepended_ctes(prepended_ctes, new_prepended_ctes) - new_cte_name = self.add_ephemeral_prefix(cte_model.name) + new_cte_name = self.add_ephemeral_prefix(cte_model.identifier) rendered_sql = cte_model._pre_injected_sql or cte_model.compiled_code sql = f" {new_cte_name} as (\n{rendered_sql}\n)" @@ -438,8 +441,31 @@ def _compile_code( relation_name = str(relation_cls.create_from(self.config, node)) node.relation_name = relation_name + # Compile 'ref' and 'source' expressions in foreign key constraints + if isinstance(node, ModelNode): + for constraint in node.all_constraints: + if constraint.type == ConstraintType.foreign_key and constraint.to: + constraint.to = self._compile_relation_for_foreign_key_constraint_to( + manifest, node, constraint.to + ) + return node + def _compile_relation_for_foreign_key_constraint_to( + self, manifest: Manifest, node: ManifestSQLNode, to_expression: str + ) -> str: + try: + foreign_key_node = manifest.find_node_from_ref_or_source(to_expression) + except ParsingError: + raise ForeignKeyConstraintToSyntaxError(node, to_expression) + + if not foreign_key_node: + raise GraphDependencyNotFoundError(node, to_expression) + + adapter = get_adapter(self.config) + relation_name = str(adapter.Relation.create_from(self.config, foreign_key_node)) + return relation_name + # This method doesn't actually "compile" any of the nodes. That is done by the # "compile_node" method. This creates a Linker and builds the networkx graph, # writes out the graph.gpickle file, and prints the stats, returning a Graph object. @@ -454,6 +480,7 @@ def compile(self, manifest: Manifest, write=True, add_test_edges=False) -> Graph summaries["_invocation_id"] = get_invocation_id() summaries["linked"] = linker.get_graph_summary(manifest) + # This is only called for the "build" command if add_test_edges: manifest.build_parent_and_child_maps() linker.add_test_edges(manifest) @@ -479,11 +506,8 @@ def compile(self, manifest: Manifest, write=True, add_test_edges=False) -> Graph if write: self.write_graph_file(linker, manifest) - # Do not print these for ListTask's - if not ( - self.config.args.__class__ == argparse.Namespace - and self.config.args.cls == list_task.ListTask - ): + # Do not print these for list command + if self.config.args.which != "list": stats = _generate_stats(manifest) print_compile_stats(stats) @@ -523,6 +547,11 @@ def compile_node( the node's raw_code into compiled_code, and then calls the recursive method to "prepend" the ctes. """ + # REVIEW: UnitTestDefinition shouldn't be possible here because of the + # type of node, and it is likewise an invalid return type. + if isinstance(node, UnitTestDefinition): + return node + # Make sure Lexer for sqlparse 0.4.4 is initialized from sqlparse.lexer import Lexer # type: ignore diff --git a/core/dbt/config/__init__.py b/core/dbt/config/__init__.py index 1fa43bed3a5..1f3875422c9 100644 --- a/core/dbt/config/__init__.py +++ b/core/dbt/config/__init__.py @@ -1,4 +1,4 @@ # all these are just exports, they need "noqa" so flake8 will not complain. -from .profile import Profile, read_user_config # noqa -from .project import Project, IsFQNResource, PartialProject # noqa +from .profile import Profile # noqa +from .project import IsFQNResource, PartialProject, Project # noqa from .runtime import RuntimeConfig # noqa diff --git a/core/dbt/config/profile.py b/core/dbt/config/profile.py index d31342223c2..ada7f30711c 100644 --- a/core/dbt/config/profile.py +++ b/core/dbt/config/profile.py @@ -1,25 +1,23 @@ +import os from dataclasses import dataclass from typing import Any, Dict, Optional, Tuple -import os - -from dbt.dataclass_schema import ValidationError -from dbt.flags import get_flags -from dbt.clients.system import load_file_contents +from dbt.adapters.contracts.connection import Credentials, HasCredentials from dbt.clients.yaml_helper import load_yaml_text -from dbt.contracts.connection import Credentials, HasCredentials -from dbt.contracts.project import ProfileConfig, UserConfig +from dbt.contracts.project import ProfileConfig +from dbt.events.types import MissingProfileTarget from dbt.exceptions import ( CompilationError, DbtProfileError, DbtProjectError, - DbtValidationError, DbtRuntimeError, ProfileConfigError, ) -from dbt.events.types import MissingProfileTarget -from dbt.events.functions import fire_event -from dbt.utils import coerce_dict_str +from dbt.flags import get_flags +from dbt_common.clients.system import load_file_contents +from dbt_common.dataclass_schema import ValidationError +from dbt_common.events.functions import fire_event +from dbt_common.exceptions import DbtValidationError from .renderer import ProfileRenderer @@ -51,19 +49,6 @@ def read_profile(profiles_dir: str) -> Dict[str, Any]: return {} -def read_user_config(directory: str) -> UserConfig: - try: - profile = read_profile(directory) - if profile: - user_config = coerce_dict_str(profile.get("config", {})) - if user_config is not None: - UserConfig.validate(user_config) - return UserConfig.from_dict(user_config) - except (DbtRuntimeError, ValidationError): - pass - return UserConfig() - - # The Profile class is included in RuntimeConfig, so any attribute # additions must also be set where the RuntimeConfig class is created # `init=False` is a workaround for https://bugs.python.org/issue45081 @@ -71,28 +56,29 @@ def read_user_config(directory: str) -> UserConfig: class Profile(HasCredentials): profile_name: str target_name: str - user_config: UserConfig threads: int credentials: Credentials profile_env_vars: Dict[str, Any] + log_cache_events: bool def __init__( self, profile_name: str, target_name: str, - user_config: UserConfig, threads: int, credentials: Credentials, - ): + ) -> None: """Explicitly defining `__init__` to work around bug in Python 3.9.7 https://bugs.python.org/issue45081 """ self.profile_name = profile_name self.target_name = target_name - self.user_config = user_config self.threads = threads self.credentials = credentials self.profile_env_vars = {} # never available on init + self.log_cache_events = ( + get_flags().LOG_CACHE_EVENTS + ) # never available on init, set for adapter instantiation via AdapterRequiredConfig def to_profile_info(self, serialize_credentials: bool = False) -> Dict[str, Any]: """Unlike to_project_config, this dict is not a mirror of any existing @@ -106,12 +92,10 @@ def to_profile_info(self, serialize_credentials: bool = False) -> Dict[str, Any] result = { "profile_name": self.profile_name, "target_name": self.target_name, - "user_config": self.user_config, "threads": self.threads, "credentials": self.credentials, } if serialize_credentials: - result["user_config"] = self.user_config.to_dict(omit_none=True) result["credentials"] = self.credentials.to_dict(omit_none=True) return result @@ -124,7 +108,6 @@ def to_target_dict(self) -> Dict[str, Any]: "name": self.target_name, "target_name": self.target_name, "profile_name": self.profile_name, - "config": self.user_config.to_dict(omit_none=True), } ) return target @@ -246,7 +229,6 @@ def from_credentials( threads: int, profile_name: str, target_name: str, - user_config: Optional[Dict[str, Any]] = None, ) -> "Profile": """Create a profile from an existing set of Credentials and the remaining information. @@ -255,20 +237,13 @@ def from_credentials( :param threads: The number of threads to use for connections. :param profile_name: The profile name used for this profile. :param target_name: The target name used for this profile. - :param user_config: The user-level config block from the - raw profiles, if specified. :raises DbtProfileError: If the profile is invalid. :returns: The new Profile object. """ - if user_config is None: - user_config = {} - UserConfig.validate(user_config) - user_config_obj: UserConfig = UserConfig.from_dict(user_config) profile = cls( profile_name=profile_name, target_name=target_name, - user_config=user_config_obj, threads=threads, credentials=credentials, ) @@ -316,7 +291,6 @@ def from_raw_profile_info( raw_profile: Dict[str, Any], profile_name: str, renderer: ProfileRenderer, - user_config: Optional[Dict[str, Any]] = None, target_override: Optional[str] = None, threads_override: Optional[int] = None, ) -> "Profile": @@ -328,8 +302,6 @@ def from_raw_profile_info( disk as yaml and its values rendered with jinja. :param profile_name: The profile name used. :param renderer: The config renderer. - :param user_config: The global config for the user, if it - was present. :param target_override: The target to use, if provided on the command line. :param threads_override: The thread count to use, if @@ -338,9 +310,6 @@ def from_raw_profile_info( target could not be found :returns: The new Profile object. """ - # user_config is not rendered. - if user_config is None: - user_config = raw_profile.get("config") # TODO: should it be, and the values coerced to bool? target_name, profile_data = cls.render_profile( raw_profile, profile_name, target_override, renderer @@ -361,7 +330,6 @@ def from_raw_profile_info( profile_name=profile_name, target_name=target_name, threads=threads, - user_config=user_config, ) @classmethod @@ -396,13 +364,11 @@ def from_raw_profiles( if not raw_profile: msg = f"Profile {profile_name} in profiles.yml is empty" raise DbtProfileError(INVALID_PROFILE_MESSAGE.format(error_string=msg)) - user_config = raw_profiles.get("config") return cls.from_raw_profile_info( raw_profile=raw_profile, profile_name=profile_name, renderer=renderer, - user_config=user_config, target_override=target_override, threads_override=threads_override, ) diff --git a/core/dbt/config/project.py b/core/dbt/config/project.py index baa99239e99..25b0f343ef2 100644 --- a/core/dbt/config/project.py +++ b/core/dbt/config/project.py @@ -1,53 +1,50 @@ +import os from copy import deepcopy from dataclasses import dataclass, field from itertools import chain -from typing import ( - List, - Dict, - Any, - Optional, - TypeVar, - Union, - Mapping, -) -from typing_extensions import Protocol, runtime_checkable +from typing import Any, Dict, List, Mapping, Optional, TypeVar, Union -import os +from typing_extensions import Protocol, runtime_checkable -from dbt.flags import get_flags from dbt import deprecations -from dbt.constants import DEPENDENCIES_FILE_NAME, PACKAGES_FILE_NAME -from dbt.clients.system import path_exists, resolve_path_from_base, load_file_contents +from dbt.adapters.contracts.connection import QueryComment from dbt.clients.yaml_helper import load_yaml_text -from dbt.contracts.connection import QueryComment +from dbt.config.selectors import SelectorDict +from dbt.config.utils import normalize_warn_error_options +from dbt.constants import ( + DBT_PROJECT_FILE_NAME, + DEPENDENCIES_FILE_NAME, + PACKAGE_LOCK_HASH_KEY, + PACKAGES_FILE_NAME, +) +from dbt.contracts.project import PackageConfig +from dbt.contracts.project import Project as ProjectContract +from dbt.contracts.project import ProjectFlags, ProjectPackageMetadata, SemverString from dbt.exceptions import ( + DbtExclusivePropertyUseError, DbtProjectError, - SemverError, + DbtRuntimeError, ProjectContractBrokenError, ProjectContractError, - DbtRuntimeError, ) +from dbt.flags import get_flags from dbt.graph import SelectionSpec -from dbt.helper_types import NoValue -from dbt.semver import VersionSpecifier, versions_compatible -from dbt.version import get_installed_version -from dbt.utils import MultiDict, md5 from dbt.node_types import NodeType -from dbt.config.selectors import SelectorDict -from dbt.contracts.project import ( - Project as ProjectContract, - SemverString, -) -from dbt.contracts.project import PackageConfig, ProjectPackageMetadata -from dbt.dataclass_schema import ValidationError +from dbt.utils import MultiDict, coerce_dict_str, md5 +from dbt.version import get_installed_version +from dbt_common.clients.system import load_file_contents, path_exists +from dbt_common.dataclass_schema import ValidationError +from dbt_common.exceptions import SemverError +from dbt_common.helper_types import NoValue +from dbt_common.semver import VersionSpecifier, versions_compatible + from .renderer import DbtProjectYamlRenderer, PackageRenderer from .selectors import ( + SelectorConfig, selector_config_from_data, selector_data_from_root, - SelectorConfig, ) - INVALID_VERSION_ERROR = """\ This version of dbt is not supported with the '{package}' package. Installed version of dbt: {installed} @@ -77,8 +74,8 @@ """ MISSING_DBT_PROJECT_ERROR = """\ -No dbt_project.yml found at expected path {path} -Verify that each entry within packages.yml (and their transitive dependencies) contains a file named dbt_project.yml +No {DBT_PROJECT_FILE_NAME} found at expected path {path} +Verify that each entry within packages.yml (and their transitive dependencies) contains a file named {DBT_PROJECT_FILE_NAME} """ @@ -94,16 +91,16 @@ def _load_yaml(path): return load_yaml_text(contents) -def package_and_project_data_from_root(project_root): - package_filepath = resolve_path_from_base(PACKAGES_FILE_NAME, project_root) - dependencies_filepath = resolve_path_from_base(DEPENDENCIES_FILE_NAME, project_root) +def load_yml_dict(file_path): + ret = {} + if path_exists(file_path): + ret = _load_yaml(file_path) or {} + return ret - packages_yml_dict = {} - dependencies_yml_dict = {} - if path_exists(package_filepath): - packages_yml_dict = _load_yaml(package_filepath) or {} - if path_exists(dependencies_filepath): - dependencies_yml_dict = _load_yaml(dependencies_filepath) or {} + +def package_and_project_data_from_root(project_root): + packages_yml_dict = load_yml_dict(f"{project_root}/{PACKAGES_FILE_NAME}") + dependencies_yml_dict = load_yml_dict(f"{project_root}/{DEPENDENCIES_FILE_NAME}") if "packages" in packages_yml_dict and "packages" in dependencies_yml_dict: msg = "The 'packages' key cannot be specified in both packages.yml and dependencies.yml" @@ -123,10 +120,21 @@ def package_and_project_data_from_root(project_root): return packages_dict, packages_specified_path -def package_config_from_data(packages_data: Dict[str, Any]) -> PackageConfig: +def package_config_from_data( + packages_data: Dict[str, Any], + unrendered_packages_data: Optional[Dict[str, Any]] = None, +) -> PackageConfig: if not packages_data: packages_data = {"packages": []} + # this depends on the two lists being in the same order + if unrendered_packages_data: + unrendered_packages_data = deepcopy(unrendered_packages_data) + for i in range(0, len(packages_data.get("packages", []))): + packages_data["packages"][i]["unrendered"] = unrendered_packages_data["packages"][i] + + if PACKAGE_LOCK_HASH_KEY in packages_data: + packages_data.pop(PACKAGE_LOCK_HASH_KEY) try: PackageConfig.validate(packages_data) packages = PackageConfig.from_dict(packages_data) @@ -181,18 +189,21 @@ def value_or(value: Optional[T], default: T) -> T: def load_raw_project(project_root: str) -> Dict[str, Any]: - project_root = os.path.normpath(project_root) - project_yaml_filepath = os.path.join(project_root, "dbt_project.yml") + project_yaml_filepath = os.path.join(project_root, DBT_PROJECT_FILE_NAME) # get the project.yml contents if not path_exists(project_yaml_filepath): - raise DbtProjectError(MISSING_DBT_PROJECT_ERROR.format(path=project_yaml_filepath)) + raise DbtProjectError( + MISSING_DBT_PROJECT_ERROR.format( + path=project_yaml_filepath, DBT_PROJECT_FILE_NAME=DBT_PROJECT_FILE_NAME + ) + ) project_dict = _load_yaml(project_yaml_filepath) if not isinstance(project_dict, dict): - raise DbtProjectError("dbt_project.yml does not parse to a dictionary") + raise DbtProjectError(f"{DBT_PROJECT_FILE_NAME} does not parse to a dictionary") return project_dict @@ -294,7 +305,6 @@ def get_rendered( self, renderer: DbtProjectYamlRenderer, ) -> RenderComponents: - rendered_project = renderer.render_project(self.project_dict, self.project_root) rendered_packages = renderer.render_packages( self.packages_dict, self.packages_specified_path @@ -307,21 +317,21 @@ def get_rendered( selectors_dict=rendered_selectors, ) - # Called by Project.from_project_root (not PartialProject.from_project_root!) + # Called by Project.from_project_root which first calls PartialProject.from_project_root def render(self, renderer: DbtProjectYamlRenderer) -> "Project": try: rendered = self.get_rendered(renderer) return self.create_project(rendered) except DbtProjectError as exc: if exc.path is None: - exc.path = os.path.join(self.project_root, "dbt_project.yml") + exc.path = os.path.join(self.project_root, DBT_PROJECT_FILE_NAME) raise def render_package_metadata(self, renderer: PackageRenderer) -> ProjectPackageMetadata: packages_data = renderer.render_data(self.packages_dict) - packages_config = package_config_from_data(packages_data) + packages_config = package_config_from_data(packages_data, self.packages_dict) if not self.project_name: - raise DbtProjectError("Package dbt_project.yml must have a name!") + raise DbtProjectError(f"Package defined in {DBT_PROJECT_FILE_NAME} must have a name!") return ProjectPackageMetadata(self.project_name, packages_config.packages) def check_config_path( @@ -332,7 +342,7 @@ def check_config_path( msg = ( "{deprecated_path} and {expected_path} cannot both be defined. The " "`{deprecated_path}` config has been deprecated in favor of `{expected_path}`. " - "Please update your `dbt_project.yml` configuration to reflect this " + f"Please update your `{DBT_PROJECT_FILE_NAME}` configuration to reflect this " "change." ) raise DbtProjectError( @@ -404,11 +414,11 @@ def create_project(self, rendered: RenderComponents) -> "Project": docs_paths: List[str] = value_or(cfg.docs_paths, all_source_paths) asset_paths: List[str] = value_or(cfg.asset_paths, []) - flags = get_flags() + global_flags = get_flags() - flag_target_path = str(flags.TARGET_PATH) if flags.TARGET_PATH else None + flag_target_path = str(global_flags.TARGET_PATH) if global_flags.TARGET_PATH else None target_path: str = flag_or(flag_target_path, cfg.target_path, "target") - log_path: str = str(flags.LOG_PATH) + log_path: str = str(global_flags.LOG_PATH) clean_targets: List[str] = value_or(cfg.clean_targets, [target_path]) packages_install_path: str = value_or(cfg.packages_install_path, "dbt_packages") @@ -424,18 +434,27 @@ def create_project(self, rendered: RenderComponents) -> "Project": seeds: Dict[str, Any] snapshots: Dict[str, Any] sources: Dict[str, Any] - tests: Dict[str, Any] + data_tests: Dict[str, Any] + unit_tests: Dict[str, Any] metrics: Dict[str, Any] + semantic_models: Dict[str, Any] + saved_queries: Dict[str, Any] exposures: Dict[str, Any] vars_value: VarProvider + dbt_cloud: Dict[str, Any] dispatch = cfg.dispatch models = cfg.models seeds = cfg.seeds snapshots = cfg.snapshots sources = cfg.sources - tests = cfg.tests + # the `tests` config is deprecated but still allowed. Copy it into + # `data_tests` to simplify logic throughout the rest of the system. + data_tests = cfg.data_tests if "data_tests" in rendered.project_dict else cfg.tests + unit_tests = cfg.unit_tests metrics = cfg.metrics + semantic_models = cfg.semantic_models + saved_queries = cfg.saved_queries exposures = cfg.exposures if cfg.vars is None: vars_dict: Dict[str, Any] = {} @@ -449,8 +468,9 @@ def create_project(self, rendered: RenderComponents) -> "Project": on_run_end: List[str] = value_or(cfg.on_run_end, []) query_comment = _query_comment_from_cfg(cfg.query_comment) - - packages: PackageConfig = package_config_from_data(rendered.packages_dict) + packages: PackageConfig = package_config_from_data( + rendered.packages_dict, unrendered.packages_dict + ) selectors = selector_config_from_data(rendered.selectors_dict) manifest_selectors: Dict[str, Any] = {} if rendered.selectors_dict and rendered.selectors_dict["selectors"]: @@ -459,6 +479,9 @@ def create_project(self, rendered: RenderComponents) -> "Project": manifest_selectors = SelectorDict.parse_from_selectors_list( rendered.selectors_dict["selectors"] ) + dbt_cloud = cfg.dbt_cloud + flags: Dict[str, Any] = cfg.flags + project = Project( project_name=name, version=version, @@ -490,14 +513,19 @@ def create_project(self, rendered: RenderComponents) -> "Project": selectors=selectors, query_comment=query_comment, sources=sources, - tests=tests, + data_tests=data_tests, + unit_tests=unit_tests, metrics=metrics, + semantic_models=semantic_models, + saved_queries=saved_queries, exposures=exposures, vars=vars_value, config_version=cfg.config_version, unrendered=unrendered, project_env_vars=project_env_vars, restrict_access=cfg.restrict_access, + dbt_cloud=dbt_cloud, + flags=flags, ) # sanity check - this means an internal issue project.validate() @@ -541,6 +569,7 @@ def from_project_root( packages_specified_path, ) = package_and_project_data_from_root(project_root) selectors_dict = selector_data_from_root(project_root) + return cls.from_dicts( project_root=project_root, project_dict=project_dict, @@ -596,8 +625,11 @@ class Project: seeds: Dict[str, Any] snapshots: Dict[str, Any] sources: Dict[str, Any] - tests: Dict[str, Any] + data_tests: Dict[str, Any] + unit_tests: Dict[str, Any] metrics: Dict[str, Any] + semantic_models: Dict[str, Any] + saved_queries: Dict[str, Any] exposures: Dict[str, Any] vars: VarProvider dbt_version: List[VersionSpecifier] @@ -609,6 +641,8 @@ class Project: unrendered: RenderComponents project_env_vars: Dict[str, Any] restrict_access: bool + dbt_cloud: Dict[str, Any] + flags: Dict[str, Any] @property def all_source_paths(self) -> List[str]: @@ -627,6 +661,13 @@ def generic_test_paths(self): generic_test_paths.append(os.path.join(test_path, "generic")) return generic_test_paths + @property + def fixture_paths(self): + fixture_paths = [] + for test_path in self.test_paths: + fixture_paths.append(os.path.join(test_path, "fixtures")) + return fixture_paths + def __str__(self): cfg = self.to_project_config(with_packages=True) return str(cfg) @@ -671,13 +712,17 @@ def to_project_config(self, with_packages=False): "seeds": self.seeds, "snapshots": self.snapshots, "sources": self.sources, - "tests": self.tests, + "data_tests": self.data_tests, + "unit_tests": self.unit_tests, "metrics": self.metrics, + "semantic-models": self.semantic_models, + "saved-queries": self.saved_queries, "exposures": self.exposures, "vars": self.vars.to_dict(), "require-dbt-version": [v.to_version_string() for v in self.dbt_version], - "config-version": self.config_version, "restrict-access": self.restrict_access, + "dbt-cloud": self.dbt_cloud, + "flags": self.flags, } ) if self.query_comment: @@ -739,3 +784,58 @@ def get_macro_search_order(self, macro_namespace: str): def project_target_path(self): # If target_path is absolute, project_root will not be included return os.path.join(self.project_root, self.target_path) + + +def read_project_flags(project_dir: str, profiles_dir: str) -> ProjectFlags: + try: + project_flags: Dict[str, Any] = {} + # Read project_flags from dbt_project.yml first + # Flags are instantiated before the project, so we don't + # want to throw an error for non-existence of dbt_project.yml here + # because it breaks things. + project_root = os.path.normpath(project_dir) + project_yaml_filepath = os.path.join(project_root, DBT_PROJECT_FILE_NAME) + if path_exists(project_yaml_filepath): + try: + project_dict = load_raw_project(project_root) + if "flags" in project_dict: + project_flags = project_dict.pop("flags") + except Exception: + # This is probably a yaml load error.The error will be reported + # later, when the project loads. + pass + + from dbt.config.profile import read_profile + + profile = read_profile(profiles_dir) + profile_project_flags: Optional[Dict[str, Any]] = {} + if profile: + profile_project_flags = coerce_dict_str(profile.get("config", {})) + + if project_flags and profile_project_flags: + raise DbtProjectError( + f"Do not specify both 'config' in profiles.yml and 'flags' in {DBT_PROJECT_FILE_NAME}. " + "Using 'config' in profiles.yml is deprecated." + ) + + if profile_project_flags: + # This can't use WARN_ERROR or WARN_ERROR_OPTIONS because they're in + # the config that we're loading. Uses special "buffer" method and fired after flags are initialized in preflight. + deprecations.buffer("project-flags-moved") + project_flags = profile_project_flags + + if project_flags is not None: + # handle collapsing `include` and `error` as well as collapsing `exclude` and `warn` + # for warn_error_options + warn_error_options = project_flags.get("warn_error_options", {}) + normalize_warn_error_options(warn_error_options) + + ProjectFlags.validate(project_flags) + return ProjectFlags.from_dict(project_flags) + except (DbtProjectError, DbtExclusivePropertyUseError) as exc: + # We don't want to eat the DbtProjectError for UserConfig to ProjectFlags or + # DbtConfigError for warn_error_options munging + raise exc + except (DbtRuntimeError, ValidationError): + pass + return ProjectFlags() diff --git a/core/dbt/config/renderer.py b/core/dbt/config/renderer.py index aba4791c86c..4f605979e62 100644 --- a/core/dbt/config/renderer.py +++ b/core/dbt/config/renderer.py @@ -1,17 +1,19 @@ -from typing import Dict, Any, Tuple, Optional, Union, Callable import re -import os from datetime import date +from typing import Any, Callable, Dict, Optional, Tuple, Union -from dbt.clients.jinja import get_rendered, catch_jinja -from dbt.constants import SECRET_ENV_PREFIX, DEPENDENCIES_FILE_NAME -from dbt.context.target import TargetContext -from dbt.context.secret import SecretContext, SECRET_PLACEHOLDER +from dbt.adapters.contracts.connection import HasCredentials +from dbt.clients.jinja import get_rendered +from dbt.constants import DEPENDENCIES_FILE_NAME, SECRET_PLACEHOLDER from dbt.context.base import BaseContext -from dbt.contracts.connection import HasCredentials -from dbt.exceptions import DbtProjectError, CompilationError, RecursionError -from dbt.utils import deep_map_render - +from dbt.context.secret import SecretContext +from dbt.context.target import TargetContext +from dbt.exceptions import DbtProjectError +from dbt_common.clients.jinja import catch_jinja +from dbt_common.constants import SECRET_ENV_PREFIX +from dbt_common.context import get_invocation_context +from dbt_common.exceptions import CompilationError, RecursionError +from dbt_common.utils import deep_map_render Keypath = Tuple[Union[str, int], ...] @@ -74,7 +76,7 @@ def _list_if_none_or_string(value): class ProjectPostprocessor(Dict[Keypath, Callable[[Any], Any]]): - def __init__(self): + def __init__(self) -> None: super().__init__() self[("on-run-start",)] = _list_if_none_or_string @@ -162,7 +164,7 @@ def should_render_keypath(self, keypath: Keypath) -> bool: if first == "vars": return False - if first in {"seeds", "models", "snapshots", "tests"}: + if first in {"seeds", "models", "snapshots", "tests", "data_tests"}: keypath_parts = {(k.lstrip("+ ") if isinstance(k, str) else k) for k in keypath} # model-level hooks late_rendered_hooks = {"pre-hook", "post-hook", "pre_hook", "post_hook"} @@ -210,7 +212,7 @@ def render_value(self, value: Any, keypath: Optional[Keypath] = None) -> Any: ) if m: found = m.group(1) - value = os.environ[found] + value = get_invocation_context().env[found] replace_this = SECRET_PLACEHOLDER.format(found) return rendered.replace(replace_this, value) else: diff --git a/core/dbt/config/runtime.py b/core/dbt/config/runtime.py index 8e11b2cd43a..e1c24cf5f0c 100644 --- a/core/dbt/config/runtime.py +++ b/core/dbt/config/runtime.py @@ -15,24 +15,30 @@ Type, ) -from dbt.flags import get_flags +from dbt import tracking +from dbt.adapters.contracts.connection import ( + AdapterRequiredConfig, + Credentials, + HasCredentials, +) +from dbt.adapters.contracts.relation import ComponentName from dbt.adapters.factory import get_include_paths, get_relation_class_by_name from dbt.config.project import load_raw_project -from dbt.contracts.connection import AdapterRequiredConfig, Credentials, HasCredentials from dbt.contracts.graph.manifest import ManifestMetadata -from dbt.contracts.project import Configuration, UserConfig -from dbt.contracts.relation import ComponentName -from dbt.dataclass_schema import ValidationError -from dbt.events.functions import warn_or_error +from dbt.contracts.project import Configuration from dbt.events.types import UnusedResourceConfigPath from dbt.exceptions import ( ConfigContractBrokenError, DbtProjectError, - NonUniquePackageNameError, DbtRuntimeError, + NonUniquePackageNameError, UninstalledPackagesFoundError, ) -from dbt.helper_types import DictDefaultEmptyStr, FQNPath, PathSet +from dbt.flags import get_flags +from dbt_common.dataclass_schema import ValidationError +from dbt_common.events.functions import warn_or_error +from dbt_common.helper_types import DictDefaultEmptyStr, FQNPath, PathSet + from .profile import Profile from .project import Project from .renderer import DbtProjectYamlRenderer, ProfileRenderer @@ -134,6 +140,7 @@ def from_parts( ).to_dict(omit_none=True) cli_vars: Dict[str, Any] = getattr(args, "vars", {}) + log_cache_events: bool = getattr(args, "log_cache_events", profile.log_cache_events) return cls( project_name=project.project_name, @@ -165,8 +172,11 @@ def from_parts( selectors=project.selectors, query_comment=project.query_comment, sources=project.sources, - tests=project.tests, + data_tests=project.data_tests, + unit_tests=project.unit_tests, metrics=project.metrics, + semantic_models=project.semantic_models, + saved_queries=project.saved_queries, exposures=project.exposures, vars=project.vars, config_version=project.config_version, @@ -176,12 +186,14 @@ def from_parts( profile_env_vars=profile.profile_env_vars, profile_name=profile.profile_name, target_name=profile.target_name, - user_config=profile.user_config, threads=profile.threads, credentials=profile.credentials, args=args, cli_vars=cli_vars, + log_cache_events=log_cache_events, dependencies=dependencies, + dbt_cloud=project.dbt_cloud, + flags=project.flags, ) # Called by 'load_projects' in this class @@ -278,6 +290,10 @@ def get_metadata(self) -> ManifestMetadata: return ManifestMetadata( project_name=self.project_name, project_id=self.hashed_name(), + user_id=tracking.active_user.id if tracking.active_user else None, + send_anonymous_usage_stats=( + get_flags().SEND_ANONYMOUS_USAGE_STATS if tracking.active_user else None + ), adapter_type=self.credentials.type, ) @@ -320,8 +336,11 @@ def get_resource_config_paths(self) -> Dict[str, PathSet]: "seeds": self._get_config_paths(self.seeds), "snapshots": self._get_config_paths(self.snapshots), "sources": self._get_config_paths(self.sources), - "tests": self._get_config_paths(self.tests), + "data_tests": self._get_config_paths(self.data_tests), + "unit_tests": self._get_config_paths(self.unit_tests), "metrics": self._get_config_paths(self.metrics), + "semantic_models": self._get_config_paths(self.semantic_models), + "saved_queries": self._get_config_paths(self.saved_queries), "exposures": self._get_config_paths(self.exposures), } @@ -404,7 +423,7 @@ def _get_project_directories(self) -> Iterator[Path]: class UnsetCredentials(Credentials): - def __init__(self): + def __init__(self) -> None: super().__init__("", "") @property @@ -427,7 +446,6 @@ def _connection_keys(self): class UnsetProfile(Profile): def __init__(self): self.credentials = UnsetCredentials() - self.user_config = UserConfig() # This will be read in _get_rendered_profile self.profile_name = "" self.target_name = "" self.threads = -1 diff --git a/core/dbt/config/selectors.py b/core/dbt/config/selectors.py index 63f67c14902..82ab388a456 100644 --- a/core/dbt/config/selectors.py +++ b/core/dbt/config/selectors.py @@ -1,20 +1,21 @@ -from pathlib import Path from copy import deepcopy -from typing import Dict, Any, Union -from dbt.clients.yaml_helper import yaml, Loader, Dumper, load_yaml_text # noqa: F401 -from dbt.dataclass_schema import ValidationError - -from .renderer import BaseRenderer +from pathlib import Path +from typing import Any, Dict, Union -from dbt.clients.system import ( +from dbt.clients.yaml_helper import Dumper, Loader, load_yaml_text, yaml # noqa: F401 +from dbt.contracts.selection import SelectorFile +from dbt.exceptions import DbtSelectorsError +from dbt.graph import SelectionSpec, parse_from_selectors_definition +from dbt.graph.selector_spec import SelectionCriteria +from dbt_common.clients.system import ( load_file_contents, path_exists, resolve_path_from_base, ) -from dbt.contracts.selection import SelectorFile -from dbt.exceptions import DbtSelectorsError, DbtRuntimeError -from dbt.graph import parse_from_selectors_definition, SelectionSpec -from dbt.graph.selector_spec import SelectionCriteria +from dbt_common.dataclass_schema import ValidationError +from dbt_common.exceptions import DbtRuntimeError + +from .renderer import BaseRenderer MALFORMED_SELECTOR_ERROR = """\ The selectors.yml file in this project is malformed. Please double check diff --git a/core/dbt/config/utils.py b/core/dbt/config/utils.py index 18951665c53..c8bfc930b9b 100644 --- a/core/dbt/config/utils.py +++ b/core/dbt/config/utils.py @@ -1,10 +1,10 @@ -from typing import Any, Dict - +from typing import Any, Dict, Optional from dbt.clients import yaml_helper -from dbt.events.functions import fire_event from dbt.events.types import InvalidOptionYAML -from dbt.exceptions import DbtValidationError, OptionNotYamlDictError +from dbt.exceptions import DbtExclusivePropertyUseError, OptionNotYamlDictError +from dbt_common.events.functions import fire_event +from dbt_common.exceptions import DbtValidationError def parse_cli_vars(var_string: str) -> Dict[str, Any]: @@ -22,3 +22,45 @@ def parse_cli_yaml_string(var_string: str, cli_option_name: str) -> Dict[str, An except (DbtValidationError, OptionNotYamlDictError): fire_event(InvalidOptionYAML(option_name=cli_option_name)) raise + + +def exclusive_primary_alt_value_setting( + dictionary: Optional[Dict[str, Any]], + primary: str, + alt: str, + parent_config: Optional[str] = None, +) -> None: + """Munges in place under the primary the options for the primary and alt values + + Sometimes we allow setting something via TWO keys, but not at the same time. If both the primary + key and alt key have values, an error gets raised. If the alt key has values, then we update + the dictionary to ensure the primary key contains the values. If neither are set, nothing happens. + """ + + if dictionary is None: + return + + primary_options = dictionary.get(primary) + alt_options = dictionary.get(alt) + + if primary_options and alt_options: + where = f" in `{parent_config}`" if parent_config is not None else "" + raise DbtExclusivePropertyUseError( + f"Only `{alt}` or `{primary}` can be specified{where}, not both" + ) + + if alt in dictionary: + alt_value = dictionary.pop(alt) + dictionary[primary] = alt_value + + +def normalize_warn_error_options(warn_error_options: Dict[str, Any]) -> None: + exclusive_primary_alt_value_setting( + warn_error_options, "include", "error", "warn_error_options" + ) + exclusive_primary_alt_value_setting( + warn_error_options, "exclude", "warn", "warn_error_options" + ) + for key in ("include", "exclude", "silence"): + if key in warn_error_options and warn_error_options[key] is None: + warn_error_options[key] = [] diff --git a/core/dbt/constants.py b/core/dbt/constants.py index f52ac23fefe..0ff538910d5 100644 --- a/core/dbt/constants.py +++ b/core/dbt/constants.py @@ -1,6 +1,8 @@ -SECRET_ENV_PREFIX = "DBT_ENV_SECRET_" +from dbt_semantic_interfaces.type_enums import TimeGranularity + DEFAULT_ENV_PLACEHOLDER = "DBT_DEFAULT_PLACEHOLDER" -METADATA_ENV_PREFIX = "DBT_ENV_CUSTOM_ENV_" + +SECRET_PLACEHOLDER = "$$$DBT_SECRET_START$$${}$$$DBT_SECRET_END$$$" MAXIMUM_SEED_SIZE = 1 * 1024 * 1024 MAXIMUM_SEED_SIZE_NAME = "1MB" @@ -9,8 +11,14 @@ "https://docs.getdbt.com/docs/package-management#section-specifying-package-versions" ) +DBT_PROJECT_FILE_NAME = "dbt_project.yml" PACKAGES_FILE_NAME = "packages.yml" DEPENDENCIES_FILE_NAME = "dependencies.yml" +PACKAGE_LOCK_FILE_NAME = "package-lock.yml" MANIFEST_FILE_NAME = "manifest.json" SEMANTIC_MANIFEST_FILE_NAME = "semantic_manifest.json" +LEGACY_TIME_SPINE_MODEL_NAME = "metricflow_time_spine" +LEGACY_TIME_SPINE_GRANULARITY = TimeGranularity.DAY +MINIMUM_REQUIRED_TIME_SPINE_GRANULARITY = TimeGranularity.DAY PARTIAL_PARSE_FILE_NAME = "partial_parse.msgpack" +PACKAGE_LOCK_HASH_KEY = "sha1_hash" diff --git a/core/dbt/context/base.py b/core/dbt/context/base.py index c9112037676..5b8fd45e350 100644 --- a/core/dbt/context/base.py +++ b/core/dbt/context/base.py @@ -1,35 +1,45 @@ +from __future__ import annotations + +import datetime +import itertools import json import os -from typing import Any, Dict, NoReturn, Optional, Mapping, Iterable, Set, List +import re import threading +from typing import Any, Callable, Dict, Iterable, List, Mapping, NoReturn, Optional, Set + +# These modules are added to the context. Consider alternative +# approaches which will extend well to potentially many modules +import pytz -from dbt.flags import get_flags import dbt.flags as flags_module -from dbt import tracking -from dbt import utils +from dbt import tracking, utils from dbt.clients.jinja import get_rendered -from dbt.clients.yaml_helper import yaml, safe_load, SafeLoader, Loader, Dumper # noqa: F401 -from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER +from dbt.clients.yaml_helper import ( # noqa: F401 + Dumper, + Loader, + SafeLoader, + safe_load, + yaml, +) +from dbt.constants import DEFAULT_ENV_PLACEHOLDER, SECRET_PLACEHOLDER from dbt.contracts.graph.nodes import Resource +from dbt.events.types import JinjaLogDebug, JinjaLogInfo from dbt.exceptions import ( - SecretEnvVarLocationError, EnvVarMissingError, - MacroReturn, RequiredVarNotFoundError, + SecretEnvVarLocationError, SetStrictWrongTypeError, ZipStrictWrongTypeError, ) -from dbt.events.functions import fire_event, get_invocation_id -from dbt.events.types import JinjaLogInfo, JinjaLogDebug -from dbt.events.contextvars import get_node_info +from dbt.flags import get_flags from dbt.version import __version__ as dbt_version - -# These modules are added to the context. Consider alternative -# approaches which will extend well to potentially many modules -import pytz -import datetime -import re -import itertools +from dbt_common.constants import SECRET_ENV_PREFIX +from dbt_common.context import get_invocation_context +from dbt_common.events.contextvars import get_node_info +from dbt_common.events.functions import fire_event, get_invocation_id +from dbt_common.events.types import PrintEvent +from dbt_common.exceptions.macros import MacroReturn # See the `contexts` module README for more information on how contexts work @@ -86,33 +96,29 @@ def get_context_modules() -> Dict[str, Dict[str, Any]]: class ContextMember: - def __init__(self, value, name=None): + def __init__(self, value: Any, name: Optional[str] = None) -> None: self.name = name self.inner = value - def key(self, default): + def key(self, default: str) -> str: if self.name is None: return default return self.name -def contextmember(value): - if isinstance(value, str): - return lambda v: ContextMember(v, name=value) - return ContextMember(value) +def contextmember(value: Optional[str] = None) -> Callable: + return lambda v: ContextMember(v, name=value) -def contextproperty(value): - if isinstance(value, str): - return lambda v: ContextMember(property(v), name=value) - return ContextMember(property(value)) +def contextproperty(value: Optional[str] = None) -> Callable: + return lambda v: ContextMember(property(v), name=value) class ContextMeta(type): - def __new__(mcls, name, bases, dct): - context_members = {} - context_attrs = {} - new_dct = {} + def __new__(mcls, name, bases, dct: Dict[str, Any]) -> ContextMeta: + context_members: Dict[str, Any] = {} + context_attrs: Dict[str, Any] = {} + new_dct: Dict[str, Any] = {} for base in bases: context_members.update(getattr(base, "_context_members_", {})) @@ -148,27 +154,28 @@ def _generate_merged(self) -> Mapping[str, Any]: return self._cli_vars @property - def node_name(self): + def node_name(self) -> str: if self._node is not None: return self._node.name else: return "<Configuration>" - def get_missing_var(self, var_name): - raise RequiredVarNotFoundError(var_name, self._merged, self._node) + def get_missing_var(self, var_name: str) -> NoReturn: + # TODO function name implies a non exception resolution + raise RequiredVarNotFoundError(var_name, dict(self._merged), self._node) - def has_var(self, var_name: str): + def has_var(self, var_name: str) -> bool: return var_name in self._merged - def get_rendered_var(self, var_name): + def get_rendered_var(self, var_name: str) -> Any: raw = self._merged[var_name] # if bool/int/float/etc are passed in, don't compile anything if not isinstance(raw, str): return raw - return get_rendered(raw, self._context) + return get_rendered(raw, dict(self._context)) - def __call__(self, var_name, default=_VAR_NOTSET): + def __call__(self, var_name: str, default: Any = _VAR_NOTSET) -> Any: if self.has_var(var_name): return self.get_rendered_var(var_name) elif default is not self._VAR_NOTSET: @@ -178,13 +185,17 @@ def __call__(self, var_name, default=_VAR_NOTSET): class BaseContext(metaclass=ContextMeta): + # Set by ContextMeta + _context_members_: Dict[str, Any] + _context_attrs_: Dict[str, Any] + # subclass is TargetContext - def __init__(self, cli_vars): - self._ctx = {} - self.cli_vars = cli_vars - self.env_vars = {} + def __init__(self, cli_vars: Dict[str, Any]) -> None: + self._ctx: Dict[str, Any] = {} + self.cli_vars: Dict[str, Any] = cli_vars + self.env_vars: Dict[str, Any] = {} - def generate_builtins(self): + def generate_builtins(self) -> Dict[str, Any]: builtins: Dict[str, Any] = {} for key, value in self._context_members_.items(): if hasattr(value, "__get__"): @@ -194,14 +205,14 @@ def generate_builtins(self): return builtins # no dbtClassMixin so this is not an actual override - def to_dict(self): + def to_dict(self) -> Dict[str, Any]: self._ctx["context"] = self._ctx builtins = self.generate_builtins() self._ctx["builtins"] = builtins self._ctx.update(builtins) return self._ctx - @contextproperty + @contextproperty() def dbt_version(self) -> str: """The `dbt_version` variable returns the installed version of dbt that is currently running. It can be used for debugging or auditing @@ -221,7 +232,7 @@ def dbt_version(self) -> str: """ return dbt_version - @contextproperty + @contextproperty() def var(self) -> Var: """Variables can be passed from your `dbt_project.yml` file into models during compilation. These variables are useful for configuring packages @@ -290,7 +301,7 @@ def var(self) -> Var: """ return Var(self._ctx, self.cli_vars) - @contextmember + @contextmember() def env_var(self, var: str, default: Optional[str] = None) -> str: """The env_var() function. Return the environment variable named 'var'. If there is no such environment variable set, return the default. @@ -300,8 +311,9 @@ def env_var(self, var: str, default: Optional[str] = None) -> str: return_value = None if var.startswith(SECRET_ENV_PREFIX): raise SecretEnvVarLocationError(var) - if var in os.environ: - return_value = os.environ[var] + env = get_invocation_context().env + if var in env: + return_value = env[var] elif default is not None: return_value = default @@ -310,7 +322,7 @@ def env_var(self, var: str, default: Optional[str] = None) -> str: # that so we can skip partial parsing. Otherwise the file will be scheduled for # reparsing. If the default changes, the file will have been updated and therefore # will be scheduled for reparsing anyways. - self.env_vars[var] = return_value if var in os.environ else DEFAULT_ENV_PLACEHOLDER + self.env_vars[var] = return_value if var in env else DEFAULT_ENV_PLACEHOLDER return return_value else: @@ -318,11 +330,12 @@ def env_var(self, var: str, default: Optional[str] = None) -> str: if os.environ.get("DBT_MACRO_DEBUGGING"): - @contextmember + @contextmember() @staticmethod def debug(): """Enter a debugger at this line in the compiled jinja code.""" import sys + import ipdb # type: ignore frame = sys._getframe(3) @@ -357,7 +370,7 @@ def _return(data: Any) -> NoReturn: """ raise MacroReturn(data) - @contextmember + @contextmember() @staticmethod def fromjson(string: str, default: Any = None) -> Any: """The `fromjson` context method can be used to deserialize a json @@ -378,7 +391,7 @@ def fromjson(string: str, default: Any = None) -> Any: except ValueError: return default - @contextmember + @contextmember() @staticmethod def tojson(value: Any, default: Any = None, sort_keys: bool = False) -> Any: """The `tojson` context method can be used to serialize a Python @@ -401,7 +414,7 @@ def tojson(value: Any, default: Any = None, sort_keys: bool = False) -> Any: except ValueError: return default - @contextmember + @contextmember() @staticmethod def fromyaml(value: str, default: Any = None) -> Any: """The fromyaml context method can be used to deserialize a yaml string @@ -432,7 +445,7 @@ def fromyaml(value: str, default: Any = None) -> Any: # safe_dump defaults to sort_keys=True, but we act like json.dumps (the # opposite) - @contextmember + @contextmember() @staticmethod def toyaml( value: Any, default: Optional[str] = None, sort_keys: bool = False @@ -477,7 +490,7 @@ def _set(value: Iterable[Any], default: Any = None) -> Optional[Set[Any]]: except TypeError: return default - @contextmember + @contextmember() @staticmethod def set_strict(value: Iterable[Any]) -> Set[Any]: """The `set_strict` context method can be used to convert any iterable @@ -519,7 +532,7 @@ def _zip(*args: Iterable[Any], default: Any = None) -> Optional[Iterable[Any]]: except TypeError: return default - @contextmember + @contextmember() @staticmethod def zip_strict(*args: Iterable[Any]) -> Iterable[Any]: """The `zip_strict` context method can be used to used to return @@ -541,7 +554,7 @@ def zip_strict(*args: Iterable[Any]) -> Iterable[Any]: except TypeError as e: raise ZipStrictWrongTypeError(e) - @contextmember + @contextmember() @staticmethod def log(msg: str, info: bool = False) -> str: """Logs a line to either the log file or stdout. @@ -556,13 +569,25 @@ def log(msg: str, info: bool = False) -> str: {{ log("Running some_macro: " ~ arg1 ~ ", " ~ arg2) }} {% endmacro %}" """ + # Detect instances of the placeholder value ($$$DBT_SECRET_START...DBT_SECRET_END$$$) + # and replace it with the standard mask '*****' + if "DBT_SECRET_START" in str(msg): + search_group = f"({SECRET_ENV_PREFIX}(.*))" + pattern = SECRET_PLACEHOLDER.format(search_group).replace("$", r"\$") + m = re.search( + pattern, + msg, + ) + if m: + msg = re.sub(pattern, "*****", msg) + if info: fire_event(JinjaLogInfo(msg=msg, node_info=get_node_info())) else: fire_event(JinjaLogDebug(msg=msg, node_info=get_node_info())) return "" - @contextproperty + @contextproperty() def run_started_at(self) -> Optional[datetime.datetime]: """`run_started_at` outputs the timestamp that this run started, e.g. `2017-04-21 01:23:45.678`. The `run_started_at` variable is a Python @@ -590,19 +615,19 @@ def run_started_at(self) -> Optional[datetime.datetime]: else: return None - @contextproperty + @contextproperty() def invocation_id(self) -> Optional[str]: """invocation_id outputs a UUID generated for this dbt run (useful for auditing) """ return get_invocation_id() - @contextproperty + @contextproperty() def thread_id(self) -> str: """thread_id outputs an ID for the current thread (useful for auditing)""" return threading.current_thread().name - @contextproperty + @contextproperty() def modules(self) -> Dict[str, Any]: """The `modules` variable in the Jinja context contains useful Python modules for operating on data. @@ -627,7 +652,7 @@ def modules(self) -> Dict[str, Any]: """ # noqa return get_context_modules() - @contextproperty + @contextproperty() def flags(self) -> Any: """The `flags` variable contains true/false values for flags provided on the command line. @@ -644,7 +669,7 @@ def flags(self) -> Any: """ return flags_module.get_flag_obj() - @contextmember + @contextmember() @staticmethod def print(msg: str) -> str: """Prints a line to stdout. @@ -659,10 +684,11 @@ def print(msg: str) -> str: """ if get_flags().PRINT: - print(msg) + # No formatting, still get to stdout when --quiet is used + fire_event(PrintEvent(msg=msg)) return "" - @contextmember + @contextmember() @staticmethod def diff_of_two_dicts( dict_a: Dict[str, List[str]], dict_b: Dict[str, List[str]] @@ -691,7 +717,7 @@ def diff_of_two_dicts( dict_diff.update({k: dict_a[k]}) return dict_diff - @contextmember + @contextmember() @staticmethod def local_md5(value: str) -> str: """Calculates an MD5 hash of the given string. diff --git a/core/dbt/context/configured.py b/core/dbt/context/configured.py index bb292a19565..240d9afb843 100644 --- a/core/dbt/context/configured.py +++ b/core/dbt/context/configured.py @@ -1,14 +1,14 @@ -import os from typing import Any, Dict, Optional -from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER -from dbt.contracts.connection import AdapterRequiredConfig -from dbt.node_types import NodeType -from dbt.utils import MultiDict - -from dbt.context.base import contextproperty, contextmember, Var +from dbt.adapters.contracts.connection import AdapterRequiredConfig +from dbt.constants import DEFAULT_ENV_PLACEHOLDER +from dbt.context.base import Var, contextmember, contextproperty from dbt.context.target import TargetContext from dbt.exceptions import EnvVarMissingError, SecretEnvVarLocationError +from dbt.node_types import NodeType +from dbt.utils import MultiDict +from dbt_common.constants import SECRET_ENV_PREFIX +from dbt_common.context import get_invocation_context class ConfiguredContext(TargetContext): @@ -19,7 +19,7 @@ def __init__(self, config: AdapterRequiredConfig) -> None: super().__init__(config.to_target_dict(), config.cli_vars) self.config = config - @contextproperty + @contextproperty() def project_name(self) -> str: return self.config.project_name @@ -80,17 +80,18 @@ def __init__(self, config, project_name: str, schema_yaml_vars: Optional[SchemaY self._project_name = project_name self.schema_yaml_vars = schema_yaml_vars - @contextproperty + @contextproperty() def var(self) -> ConfiguredVar: return ConfiguredVar(self._ctx, self.config, self._project_name) - @contextmember + @contextmember() def env_var(self, var: str, default: Optional[str] = None) -> str: return_value = None if var.startswith(SECRET_ENV_PREFIX): raise SecretEnvVarLocationError(var) - if var in os.environ: - return_value = os.environ[var] + env = get_invocation_context().env + if var in env: + return_value = env[var] elif default is not None: return_value = default @@ -101,7 +102,7 @@ def env_var(self, var: str, default: Optional[str] = None) -> str: # reparsing. If the default changes, the file will have been updated and therefore # will be scheduled for reparsing anyways. self.schema_yaml_vars.env_vars[var] = ( - return_value if var in os.environ else DEFAULT_ENV_PLACEHOLDER + return_value if var in env else DEFAULT_ENV_PLACEHOLDER ) return return_value @@ -113,7 +114,7 @@ class MacroResolvingContext(ConfiguredContext): def __init__(self, config): super().__init__(config) - @contextproperty + @contextproperty() def var(self) -> ConfiguredVar: return ConfiguredVar(self._ctx, self.config, self.config.project_name) diff --git a/core/dbt/context/context_config.py b/core/dbt/context/context_config.py index 437b47eb4b3..caf83d425fe 100644 --- a/core/dbt/context/context_config.py +++ b/core/dbt/context/context_config.py @@ -1,13 +1,15 @@ from abc import abstractmethod from copy import deepcopy from dataclasses import dataclass -from typing import List, Iterator, Dict, Any, TypeVar, Generic, Optional +from typing import Any, Dict, Generic, Iterator, List, Optional, TypeVar -from dbt.config import RuntimeConfig, Project, IsFQNResource -from dbt.contracts.graph.model_config import BaseConfig, get_config_for, _listify -from dbt.exceptions import DbtInternalError +from dbt.adapters.factory import get_config_class_by_name +from dbt.config import IsFQNResource, Project, RuntimeConfig +from dbt.contracts.graph.model_config import get_config_for from dbt.node_types import NodeType from dbt.utils import fqn_search +from dbt_common.contracts.config.base import BaseConfig, merge_config_dicts +from dbt_common.exceptions import DbtInternalError @dataclass @@ -25,8 +27,7 @@ class ConfigSource: def __init__(self, project): self.project = project - def get_config_dict(self, resource_type: NodeType): - ... + def get_config_dict(self, resource_type: NodeType): ... class UnrenderedConfig(ConfigSource): @@ -42,11 +43,17 @@ def get_config_dict(self, resource_type: NodeType) -> Dict[str, Any]: elif resource_type == NodeType.Source: model_configs = unrendered.get("sources") elif resource_type == NodeType.Test: - model_configs = unrendered.get("tests") + model_configs = unrendered.get("data_tests") elif resource_type == NodeType.Metric: model_configs = unrendered.get("metrics") + elif resource_type == NodeType.SemanticModel: + model_configs = unrendered.get("semantic_models") + elif resource_type == NodeType.SavedQuery: + model_configs = unrendered.get("saved_queries") elif resource_type == NodeType.Exposure: model_configs = unrendered.get("exposures") + elif resource_type == NodeType.Unit: + model_configs = unrendered.get("unit_tests") else: model_configs = unrendered.get("models") if model_configs is None: @@ -67,11 +74,17 @@ def get_config_dict(self, resource_type: NodeType) -> Dict[str, Any]: elif resource_type == NodeType.Source: model_configs = self.project.sources elif resource_type == NodeType.Test: - model_configs = self.project.tests + model_configs = self.project.data_tests elif resource_type == NodeType.Metric: model_configs = self.project.metrics + elif resource_type == NodeType.SemanticModel: + model_configs = self.project.semantic_models + elif resource_type == NodeType.SavedQuery: + model_configs = self.project.saved_queries elif resource_type == NodeType.Exposure: model_configs = self.project.exposures + elif resource_type == NodeType.Unit: + model_configs = self.project.unit_tests else: model_configs = self.project.models return model_configs @@ -116,12 +129,12 @@ def _active_project_configs( return self._project_configs(self._active_project, fqn, resource_type) @abstractmethod - def _update_from_config(self, result: T, partial: Dict[str, Any], validate: bool = False) -> T: - ... + def _update_from_config( + self, result: T, partial: Dict[str, Any], validate: bool = False + ) -> T: ... @abstractmethod - def initial_result(self, resource_type: NodeType, base: bool) -> T: - ... + def initial_result(self, resource_type: NodeType, base: bool) -> T: ... def calculate_node_config( self, @@ -167,8 +180,7 @@ def calculate_node_config_dict( project_name: str, base: bool, patch_config_dict: Optional[Dict[str, Any]] = None, - ) -> Dict[str, Any]: - ... + ) -> Dict[str, Any]: ... class ContextConfigGenerator(BaseContextConfigGenerator[C]): @@ -189,9 +201,23 @@ def initial_result(self, resource_type: NodeType, base: bool) -> C: def _update_from_config(self, result: C, partial: Dict[str, Any], validate: bool = False) -> C: translated = self._active_project.credentials.translate_aliases(partial) - return result.update_from( - translated, self._active_project.credentials.type, validate=validate - ) + translated = self.translate_hook_names(translated) + + adapter_type = self._active_project.credentials.type + adapter_config_cls = get_config_class_by_name(adapter_type) + + updated = result.update_from(translated, adapter_config_cls, validate=validate) + return updated + + def translate_hook_names(self, project_dict): + # This is a kind of kludge because the fix for #6411 specifically allowed misspelling + # the hook field names in dbt_project.yml, which only ever worked because we didn't + # run validate on the dbt_project configs. + if "pre_hook" in project_dict: + project_dict["pre-hook"] = project_dict.pop("pre_hook") + if "post_hook" in project_dict: + project_dict["post-hook"] = project_dict.pop("post_hook") + return project_dict def calculate_node_config_dict( self, @@ -267,55 +293,7 @@ def __init__( def add_config_call(self, opts: Dict[str, Any]) -> None: dct = self._config_call_dict - self._add_config_call(dct, opts) - - @classmethod - def _add_config_call(cls, config_call_dict, opts: Dict[str, Any]) -> None: - # config_call_dict is already encountered configs, opts is new - # This mirrors code in _merge_field_value in model_config.py which is similar but - # operates on config objects. - for k, v in opts.items(): - # MergeBehavior for post-hook and pre-hook is to collect all - # values, instead of overwriting - if k in BaseConfig.mergebehavior["append"]: - if not isinstance(v, list): - v = [v] - if k in config_call_dict: # should always be a list here - config_call_dict[k].extend(v) - else: - config_call_dict[k] = v - - elif k in BaseConfig.mergebehavior["update"]: - if not isinstance(v, dict): - raise DbtInternalError(f"expected dict, got {v}") - if k in config_call_dict and isinstance(config_call_dict[k], dict): - config_call_dict[k].update(v) - else: - config_call_dict[k] = v - elif k in BaseConfig.mergebehavior["dict_key_append"]: - if not isinstance(v, dict): - raise DbtInternalError(f"expected dict, got {v}") - if k in config_call_dict: # should always be a dict - for key, value in v.items(): - extend = False - # This might start with a +, to indicate we should extend the list - # instead of just clobbering it - if key.startswith("+"): - extend = True - if key in config_call_dict[k] and extend: - # extend the list - config_call_dict[k][key].extend(_listify(value)) - else: - # clobber the list - config_call_dict[k][key] = _listify(value) - else: - # This is always a dictionary - config_call_dict[k] = v - # listify everything - for key, value in config_call_dict[k].items(): - config_call_dict[k][key] = _listify(value) - else: - config_call_dict[k] = v + merge_config_dicts(dct, opts) def build_config_dict( self, diff --git a/core/dbt/context/docs.py b/core/dbt/context/docs.py index 3d5abf42e11..923e8d402b9 100644 --- a/core/dbt/context/docs.py +++ b/core/dbt/context/docs.py @@ -1,15 +1,11 @@ from typing import Any, Dict, Union -from dbt.exceptions import ( - DocTargetNotFoundError, - DocArgsError, -) from dbt.config.runtime import RuntimeConfig -from dbt.contracts.graph.manifest import Manifest -from dbt.contracts.graph.nodes import Macro, ResultNode - from dbt.context.base import contextmember from dbt.context.configured import SchemaYamlContext +from dbt.contracts.graph.manifest import Manifest +from dbt.contracts.graph.nodes import Macro, ResultNode +from dbt.exceptions import DocArgsError, DocTargetNotFoundError class DocsRuntimeContext(SchemaYamlContext): @@ -24,7 +20,7 @@ def __init__( self.node = node self.manifest = manifest - @contextmember + @contextmember() def doc(self, *args: str) -> str: """The `doc` function is used to reference docs blocks in schema.yml files. It is analogous to the `ref` function. For more information, diff --git a/core/dbt/context/exceptions_jinja.py b/core/dbt/context/exceptions_jinja.py index cffc0e1e483..791a8745859 100644 --- a/core/dbt/context/exceptions_jinja.py +++ b/core/dbt/context/exceptions_jinja.py @@ -1,31 +1,35 @@ import functools from typing import NoReturn -from dbt.events.functions import warn_or_error -from dbt.events.helpers import env_secrets, scrub_secrets -from dbt.events.types import JinjaLogWarning - -from dbt.exceptions import ( - DbtRuntimeError, +from dbt.adapters.exceptions import ( + ColumnTypeMissingError, MissingConfigError, MissingMaterializationError, - MissingRelationError, + RelationWrongTypeError, +) +from dbt.adapters.exceptions.cache import CacheInconsistencyError +from dbt.events.types import JinjaLogWarning, SnapshotTimestampWarning +from dbt.exceptions import ( AmbiguousAliasError, AmbiguousCatalogMatchError, - CacheInconsistencyError, - DataclassNotDictError, CompilationError, - DbtDatabaseError, - DependencyNotFoundError, + ContractError, DependencyError, + DependencyNotFoundError, DuplicatePatchPathError, DuplicateResourceNameError, + FailFastError, + MissingRelationError, PropertyYMLError, + env_secrets, + scrub_secrets, +) +from dbt_common.events.functions import warn_or_error +from dbt_common.exceptions import ( + DataclassNotDictError, + DbtDatabaseError, + DbtRuntimeError, NotImplementedError, - RelationWrongTypeError, - ContractError, - ColumnTypeMissingError, - FailFastError, ) @@ -112,6 +116,17 @@ def raise_fail_fast_error(msg, node=None) -> NoReturn: raise FailFastError(msg, node=node) +def warn_snapshot_timestamp_data_types( + snapshot_time_data_type: str, updated_at_data_type: str +) -> None: + warn_or_error( + SnapshotTimestampWarning( + snapshot_time_data_type=snapshot_time_data_type, + updated_at_data_type=updated_at_data_type, + ) + ) + + # Update this when a new function should be added to the # dbt context's `exceptions` key! CONTEXT_EXPORTS = { @@ -137,6 +152,7 @@ def raise_fail_fast_error(msg, node=None) -> NoReturn: raise_contract_error, column_type_missing, raise_fail_fast_error, + warn_snapshot_timestamp_data_types, ] } diff --git a/core/dbt/context/macro_resolver.py b/core/dbt/context/macro_resolver.py index 20f97febcb0..ad497b3e885 100644 --- a/core/dbt/context/macro_resolver.py +++ b/core/dbt/context/macro_resolver.py @@ -1,8 +1,9 @@ from typing import Dict, MutableMapping, Optional + +from dbt.clients.jinja import MacroGenerator from dbt.contracts.graph.nodes import Macro from dbt.exceptions import DuplicateMacroNameError, PackageNotFoundForMacroError from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME -from dbt.clients.jinja import MacroGenerator MacroNamespace = Dict[str, Macro] @@ -40,7 +41,7 @@ def __init__( self._build_internal_packages_namespace() self._build_macros_by_name() - def _build_internal_packages_namespace(self): + def _build_internal_packages_namespace(self) -> None: # Iterate in reverse-order and overwrite: the packages that are first # in the list are the ones we want to "win". self.internal_packages_namespace: MacroNamespace = {} @@ -56,7 +57,7 @@ def _build_internal_packages_namespace(self): # root package namespace # non-internal packages (that aren't local or root) # dbt internal packages - def _build_macros_by_name(self): + def _build_macros_by_name(self) -> None: macros_by_name = {} # all internal packages (already in the right order) @@ -78,7 +79,7 @@ def _add_macro_to( self, package_namespaces: Dict[str, MacroNamespace], macro: Macro, - ): + ) -> None: if macro.package_name in package_namespaces: namespace = package_namespaces[macro.package_name] else: @@ -89,7 +90,7 @@ def _add_macro_to( raise DuplicateMacroNameError(macro, macro, macro.package_name) package_namespaces[macro.package_name][macro.name] = macro - def add_macro(self, macro: Macro): + def add_macro(self, macro: Macro) -> None: macro_name: str = macro.name # internal macros (from plugins) will be processed separately from @@ -103,11 +104,11 @@ def add_macro(self, macro: Macro): if macro.package_name == self.root_project_name: self.root_package_macros[macro_name] = macro - def add_macros(self): + def add_macros(self) -> None: for macro in self.macros.values(): self.add_macro(macro) - def get_macro(self, local_package, macro_name): + def get_macro(self, local_package, macro_name) -> Optional[Macro]: local_package_macros = {} # If the macro is explicitly prefixed with an internal namespace # (e.g. 'dbt.some_macro'), look there first @@ -125,7 +126,7 @@ def get_macro(self, local_package, macro_name): return self.macros_by_name[macro_name] return None - def get_macro_id(self, local_package, macro_name): + def get_macro_id(self, local_package, macro_name) -> Optional[str]: macro = self.get_macro(local_package, macro_name) if macro is None: return None diff --git a/core/dbt/context/macros.py b/core/dbt/context/macros.py index 1c61e564e06..954cc72c9a4 100644 --- a/core/dbt/context/macros.py +++ b/core/dbt/context/macros.py @@ -1,10 +1,9 @@ -from typing import Any, Dict, Iterable, Union, Optional, List, Iterator, Mapping, Set +from typing import Any, Dict, Iterable, Iterator, List, Mapping, Optional, Set, Union from dbt.clients.jinja import MacroGenerator, MacroStack from dbt.contracts.graph.nodes import Macro -from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME from dbt.exceptions import DuplicateMacroNameError, PackageNotFoundForMacroError - +from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME FlatNamespace = Dict[str, MacroGenerator] NamespaceMember = Union[FlatNamespace, MacroGenerator] @@ -67,7 +66,6 @@ def __getitem__(self, key: str) -> NamespaceMember: raise KeyError(key) def get_from_package(self, package_name: Optional[str], name: str) -> Optional[MacroGenerator]: - pkg: FlatNamespace if package_name is None: return self.get(name) elif package_name == GLOBAL_PROJECT_NAME: @@ -125,7 +123,7 @@ def _add_macro_to( raise DuplicateMacroNameError(macro_func.macro, macro, macro.package_name) hierarchy[macro.package_name][macro.name] = macro_func - def add_macro(self, macro: Macro, ctx: Dict[str, Any]): + def add_macro(self, macro: Macro, ctx: Dict[str, Any]) -> None: macro_name: str = macro.name # MacroGenerator is in clients/jinja.py @@ -147,12 +145,15 @@ def add_macro(self, macro: Macro, ctx: Dict[str, Any]): elif macro.package_name == self.root_package: self.globals[macro_name] = macro_func - def add_macros(self, macros: Iterable[Macro], ctx: Dict[str, Any]): + def add_macros(self, macros: Iterable[Macro], ctx: Dict[str, Any]) -> None: for macro in macros: self.add_macro(macro, ctx) - def build_namespace(self, macros: Iterable[Macro], ctx: Dict[str, Any]) -> MacroNamespace: - self.add_macros(macros, ctx) + def build_namespace( + self, macros_by_package: Dict[str, Dict[str, Macro]], ctx: Dict[str, Any] + ) -> MacroNamespace: + for package in macros_by_package.values(): + self.add_macros(package.values(), ctx) # Iterate in reverse-order and overwrite: the packages that are first # in the list are the ones we want to "win". diff --git a/core/dbt/context/manifest.py b/core/dbt/context/manifest.py index c6a39993d92..264f59a61e2 100644 --- a/core/dbt/context/manifest.py +++ b/core/dbt/context/manifest.py @@ -1,14 +1,13 @@ from typing import List +from dbt.adapters.contracts.connection import AdapterRequiredConfig from dbt.clients.jinja import MacroStack -from dbt.contracts.connection import AdapterRequiredConfig -from dbt.contracts.graph.manifest import Manifest from dbt.context.macro_resolver import TestMacroNamespace -from .base import contextproperty - +from dbt.contracts.graph.manifest import Manifest +from .base import contextproperty from .configured import ConfiguredContext -from .macros import MacroNamespaceBuilder +from .macros import MacroNamespace, MacroNamespaceBuilder class ManifestContext(ConfiguredContext): @@ -36,11 +35,11 @@ def __init__( # to be able to do: namespace.get_from_package(..) self.namespace = self._build_namespace() - def _build_namespace(self): + def _build_namespace(self) -> MacroNamespace: # this takes all the macros in the manifest and adds them # to the MacroNamespaceBuilder stored in self.namespace builder = self._get_namespace_builder() - return builder.build_namespace(self.manifest.macros.values(), self._ctx) + return builder.build_namespace(self.manifest.get_macros_by_package(), self._ctx) def _get_namespace_builder(self) -> MacroNamespaceBuilder: # avoid an import loop @@ -65,18 +64,9 @@ def to_dict(self): dct.update(self.namespace.project_namespace) else: dct.update(self.namespace) + return dct - @contextproperty + @contextproperty() def context_macro_stack(self): return self.macro_stack - - -class QueryHeaderContext(ManifestContext): - def __init__(self, config: AdapterRequiredConfig, manifest: Manifest) -> None: - super().__init__(config, manifest, config.project_name) - - -def generate_query_header_context(config: AdapterRequiredConfig, manifest: Manifest): - ctx = QueryHeaderContext(config, manifest) - return ctx.to_dict() diff --git a/core/dbt/context/providers.py b/core/dbt/context/providers.py index 4cfaa142e25..898437bf4da 100644 --- a/core/dbt/context/providers.py +++ b/core/dbt/context/providers.py @@ -1,82 +1,98 @@ import abc import os +from copy import deepcopy from typing import ( - Callable, + TYPE_CHECKING, Any, + Callable, Dict, - Optional, - Union, - List, - TypeVar, - Type, Iterable, + List, Mapping, + Optional, + Tuple, + Type, + TypeVar, + Union, ) + from typing_extensions import Protocol +from dbt import selected_resources from dbt.adapters.base.column import Column -from dbt.adapters.factory import get_adapter, get_adapter_package_names, get_adapter_type_names -from dbt.clients import agate_helper -from dbt.clients.jinja import get_rendered, MacroGenerator, MacroStack -from dbt.config import RuntimeConfig, Project -from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER -from dbt.context.base import contextmember, contextproperty, Var +from dbt.adapters.base.relation import EventTimeFilter +from dbt.adapters.contracts.connection import AdapterResponse +from dbt.adapters.exceptions import MissingConfigError +from dbt.adapters.factory import ( + get_adapter, + get_adapter_package_names, + get_adapter_type_names, +) +from dbt.artifacts.resources import NodeConfig, NodeVersion, RefArgs, SourceConfig +from dbt.clients.jinja import ( + MacroGenerator, + MacroStack, + UnitTestMacroGenerator, + get_rendered, +) +from dbt.config import IsFQNResource, Project, RuntimeConfig +from dbt.constants import DEFAULT_ENV_PLACEHOLDER +from dbt.context.base import Var, contextmember, contextproperty from dbt.context.configured import FQNLookup from dbt.context.context_config import ContextConfig from dbt.context.exceptions_jinja import wrapped_exports from dbt.context.macro_resolver import MacroResolver, TestMacroNamespace -from dbt.context.macros import MacroNamespaceBuilder, MacroNamespace +from dbt.context.macros import MacroNamespace, MacroNamespaceBuilder from dbt.context.manifest import ManifestContext -from dbt.contracts.connection import AdapterResponse -from dbt.contracts.graph.manifest import Manifest, Disabled +from dbt.contracts.graph.manifest import Disabled, Manifest +from dbt.contracts.graph.metrics import MetricReference, ResolvedMetricReference from dbt.contracts.graph.nodes import ( - Macro, + AccessType, Exposure, - SeedNode, - SourceDefinition, - Resource, + Macro, ManifestNode, - RefArgs, - AccessType, + Resource, + SeedNode, SemanticModel, + SourceDefinition, + UnitTestNode, ) -from dbt.contracts.graph.metrics import MetricReference, ResolvedMetricReference -from dbt.contracts.graph.unparsed import NodeVersion -from dbt.events.functions import get_metadata_vars from dbt.exceptions import ( CompilationError, ConflictingConfigKeysError, - SecretEnvVarLocationError, + DbtReferenceError, EnvVarMissingError, - DbtInternalError, InlineModelConfigError, - NumberSourceArgsError, - PersistDocsValueTypeError, LoadAgateTableNotSeedError, LoadAgateTableValueError, MacroDispatchArgError, MacroResultAlreadyLoadedError, - MacrosSourcesUnWriteableError, MetricArgsError, - MissingConfigError, + NumberSourceArgsError, OperationsCannotRefEphemeralNodesError, - PackageNotInDepsError, ParsingError, - RefBadContextError, + PersistDocsValueTypeError, RefArgsError, - DbtRuntimeError, + RefBadContextError, + SecretEnvVarLocationError, TargetNotFoundError, +) +from dbt.node_types import ModelLanguage, NodeType +from dbt.utils import MultiDict, args_to_dict +from dbt_common.clients.jinja import MacroProtocol +from dbt_common.constants import SECRET_ENV_PREFIX +from dbt_common.context import get_invocation_context +from dbt_common.events.functions import get_metadata_vars +from dbt_common.exceptions import ( + DbtInternalError, + DbtRuntimeError, DbtValidationError, - DbtReferenceError, + MacrosSourcesUnWriteableError, ) -from dbt.config import IsFQNResource -from dbt.node_types import NodeType, ModelLanguage - -from dbt.utils import merge, AttrDict, MultiDict, args_to_dict, cast_to_str +from dbt_common.utils import AttrDict, cast_to_str, merge -from dbt import selected_resources - -import agate +if TYPE_CHECKING: + import agate _MISSING = object() @@ -91,11 +107,6 @@ def __init__(self, adapter): def __getattr__(self, key): return getattr(self._relation_type, key) - def create_from_source(self, *args, **kwargs): - # bypass our create when creating from source so as not to mess up - # the source quoting - return self._relation_type.create_from_source(*args, **kwargs) - def create(self, *args, **kwargs): kwargs["quote_policy"] = merge(self._quoting_config, kwargs.pop("quote_policy", {})) return self._relation_type.create(*args, **kwargs) @@ -216,6 +227,31 @@ def current_project(self): def Relation(self): return self.db_wrapper.Relation + @property + def resolve_limit(self) -> Optional[int]: + return 0 if getattr(self.config.args, "EMPTY", False) else None + + def resolve_event_time_filter(self, target: ManifestNode) -> Optional[EventTimeFilter]: + event_time_filter = None + if ( + os.environ.get("DBT_EXPERIMENTAL_MICROBATCH") + and (isinstance(target.config, NodeConfig) or isinstance(target.config, SourceConfig)) + and target.config.event_time + and self.model.config.materialized == "incremental" + and self.model.config.incremental_strategy == "microbatch" + ): + start = self.model.config.get("__dbt_internal_microbatch_event_time_start") + end = self.model.config.get("__dbt_internal_microbatch_event_time_end") + + if start is not None or end is not None: + event_time_filter = EventTimeFilter( + field_name=target.config.event_time, + start=start, + end=end, + ) + + return event_time_filter + @abc.abstractmethod def __call__(self, *args: str) -> Union[str, RelationProxy, MetricReference]: pass @@ -225,8 +261,7 @@ class BaseRefResolver(BaseResolver): @abc.abstractmethod def resolve( self, name: str, package: Optional[str] = None, version: Optional[NodeVersion] = None - ) -> RelationProxy: - ... + ) -> RelationProxy: ... def _repack_args( self, name: str, package: Optional[str], version: Optional[NodeVersion] @@ -292,8 +327,7 @@ def __call__(self, *args: str) -> RelationProxy: class BaseMetricResolver(BaseResolver): @abc.abstractmethod - def resolve(self, name: str, package: Optional[str] = None) -> MetricReference: - ... + def resolve(self, name: str, package: Optional[str] = None) -> MetricReference: ... def _repack_args(self, name: str, package: Optional[str]) -> List[str]: if package is None: @@ -327,8 +361,7 @@ def __call__(self, *args: str) -> MetricReference: class Config(Protocol): - def __init__(self, model, context_config: Optional[ContextConfig]): - ... + def __init__(self, model, context_config: Optional[ContextConfig]): ... # Implementation of "config(..)" calls in models @@ -497,6 +530,7 @@ def resolve( self.model.package_name, ) + # Raise an error if the reference target is missing if target_model is None or isinstance(target_model, Disabled): raise TargetNotFoundError( node=self.model, @@ -506,6 +540,8 @@ def resolve( target_version=target_version, disabled=isinstance(target_model, Disabled), ) + + # Raise error if trying to reference a 'private' resource outside its 'group' elif self.manifest.is_invalid_private_ref( self.model, target_model, self.config.dependencies ): @@ -515,6 +551,7 @@ def resolve( access=AccessType.Private, scope=cast_to_str(target_model.group), ) + # Or a 'protected' resource outside its project/package namespace elif self.manifest.is_invalid_protected_ref( self.model, target_model, self.config.dependencies ): @@ -524,16 +561,46 @@ def resolve( access=AccessType.Protected, scope=target_model.package_name, ) - self.validate(target_model, target_name, target_package, target_version) return self.create_relation(target_model) def create_relation(self, target_model: ManifestNode) -> RelationProxy: if target_model.is_ephemeral_model: self.model.set_cte(target_model.unique_id, None) - return self.Relation.create_ephemeral_from_node(self.config, target_model) + return self.Relation.create_ephemeral_from( + target_model, + limit=self.resolve_limit, + event_time_filter=self.resolve_event_time_filter(target_model), + ) + elif ( + hasattr(target_model, "defer_relation") + and target_model.defer_relation + and self.config.args.defer + and ( + # User has explicitly opted to prefer defer_relation for unselected resources + ( + self.config.args.favor_state + and target_model.unique_id not in selected_resources.SELECTED_RESOURCES + ) + # Or, this node's relation does not exist in the expected target location (cache lookup) + or not get_adapter(self.config).get_relation( + target_model.database, target_model.schema, target_model.identifier + ) + ) + ): + return self.Relation.create_from( + self.config, + target_model.defer_relation, + limit=self.resolve_limit, + event_time_filter=self.resolve_event_time_filter(target_model), + ) else: - return self.Relation.create_from(self.config, target_model) + return self.Relation.create_from( + self.config, + target_model, + limit=self.resolve_limit, + event_time_filter=self.resolve_event_time_filter(target_model), + ) def validate( self, @@ -566,6 +633,16 @@ def create_relation(self, target_model: ManifestNode) -> RelationProxy: return super().create_relation(target_model) +class RuntimeUnitTestRefResolver(RuntimeRefResolver): + def resolve( + self, + target_name: str, + target_package: Optional[str] = None, + target_version: Optional[NodeVersion] = None, + ) -> RelationProxy: + return super().resolve(target_name, target_package, target_version) + + # `source` implementations class ParseSourceResolver(BaseSourceResolver): def resolve(self, source_name: str, table_name: str): @@ -590,7 +667,35 @@ def resolve(self, source_name: str, table_name: str): target_kind="source", disabled=(isinstance(target_source, Disabled)), ) - return self.Relation.create_from_source(target_source) + return self.Relation.create_from( + self.config, + target_source, + limit=self.resolve_limit, + event_time_filter=self.resolve_event_time_filter(target_source), + ) + + +class RuntimeUnitTestSourceResolver(BaseSourceResolver): + def resolve(self, source_name: str, table_name: str): + target_source = self.manifest.resolve_source( + source_name, + table_name, + self.current_project, + self.model.package_name, + ) + if target_source is None or isinstance(target_source, Disabled): + raise TargetNotFoundError( + node=self.model, + target_name=f"{source_name}.{table_name}", + target_kind="source", + disabled=(isinstance(target_source, Disabled)), + ) + # For unit tests, this isn't a "real" source, it's a ModelNode taking + # the place of a source. We don't really need to return the relation here, + # we just need to set_cte, but skipping it confuses typing. We *do* need + # the relation in the "this" property. + self.model.set_cte(target_source.unique_id, None) + return self.Relation.create_ephemeral_from(target_source) # metric` implementations @@ -618,7 +723,7 @@ def resolve(self, target_name: str, target_package: Optional[str] = None) -> Met target_package=target_package, ) - return ResolvedMetricReference(target_metric, self.manifest, self.Relation) + return ResolvedMetricReference(target_metric, self.manifest) # `var` implementations. @@ -638,10 +743,8 @@ def packages_for_node(self) -> Iterable[Project]: package_name = self._node.package_name if package_name != self._config.project_name: - if package_name not in dependencies: - # I don't think this is actually reachable - raise PackageNotInDepsError(package_name, node=self._node) - yield dependencies[package_name] + if package_name in dependencies: + yield dependencies[package_name] yield self._config def _generate_merged(self) -> Mapping[str, Any]: @@ -670,6 +773,22 @@ class RuntimeVar(ModelConfiguredVar): pass +class UnitTestVar(RuntimeVar): + def __init__( + self, + context: Dict[str, Any], + config: RuntimeConfig, + node: Resource, + ) -> None: + config_copy = None + assert isinstance(node, UnitTestNode) + if node.overrides and node.overrides.vars: + config_copy = deepcopy(config) + config_copy.cli_vars.update(node.overrides.vars) + + super().__init__(context, config_copy or config, node=node) + + # Providers class Provider(Protocol): execute: bool @@ -711,6 +830,16 @@ class RuntimeProvider(Provider): metric = RuntimeMetricResolver +class RuntimeUnitTestProvider(Provider): + execute = True + Config = RuntimeConfigObject + DatabaseWrapper = RuntimeDatabaseWrapper + Var = UnitTestVar + ref = RuntimeUnitTestRefResolver + source = RuntimeUnitTestSourceResolver + metric = RuntimeMetricResolver + + class OperationProvider(RuntimeProvider): ref = OperationRefResolver @@ -754,19 +883,19 @@ def _get_namespace_builder(self): self.model, ) - @contextproperty + @contextproperty() def dbt_metadata_envs(self) -> Dict[str, str]: return get_metadata_vars() - @contextproperty + @contextproperty() def invocation_args_dict(self): return args_to_dict(self.config.args) - @contextproperty + @contextproperty() def _sql_results(self) -> Dict[str, Optional[AttrDict]]: return self.sql_results - @contextmember + @contextmember() def load_result(self, name: str) -> Optional[AttrDict]: if name in self.sql_results: # handle the special case of "main" macro @@ -787,10 +916,12 @@ def load_result(self, name: str) -> Optional[AttrDict]: # Handle trying to load a result that was never stored return None - @contextmember + @contextmember() def store_result( - self, name: str, response: Any, agate_table: Optional[agate.Table] = None + self, name: str, response: Any, agate_table: Optional["agate.Table"] = None ) -> str: + from dbt_common.clients import agate_helper + if agate_table is None: agate_table = agate_helper.empty_table() @@ -803,19 +934,19 @@ def store_result( ) return "" - @contextmember + @contextmember() def store_raw_result( self, name: str, message=Optional[str], code=Optional[str], rows_affected=Optional[str], - agate_table: Optional[agate.Table] = None, + agate_table: Optional["agate.Table"] = None, ) -> str: response = AdapterResponse(_message=message, code=code, rows_affected=rows_affected) return self.store_result(name, response, agate_table) - @contextproperty + @contextproperty() def validation(self): def validate_any(*args) -> Callable[[T], None]: def inner(value: T) -> None: @@ -836,7 +967,7 @@ def inner(value: T) -> None: } ) - @contextmember + @contextmember() def write(self, payload: str) -> str: # macros/source defs aren't 'writeable'. if isinstance(self.model, (Macro, SourceDefinition)): @@ -845,11 +976,11 @@ def write(self, payload: str) -> str: self.model.write_node(self.config.project_root, self.model.build_path, payload) return "" - @contextmember + @contextmember() def render(self, string: str) -> str: return get_rendered(string, self._ctx, self.model) - @contextmember + @contextmember() def try_or_compiler_error( self, message_if_exception: str, func: Callable, *args, **kwargs ) -> Any: @@ -858,21 +989,35 @@ def try_or_compiler_error( except Exception: raise CompilationError(message_if_exception, self.model) - @contextmember - def load_agate_table(self) -> agate.Table: + @contextmember() + def load_agate_table(self) -> "agate.Table": + from dbt_common.clients import agate_helper + if not isinstance(self.model, SeedNode): raise LoadAgateTableNotSeedError(self.model.resource_type, node=self.model) - assert self.model.root_path - path = os.path.join(self.model.root_path, self.model.original_file_path) + + # include package_path for seeds defined in packages + package_path = ( + os.path.join(self.config.packages_install_path, self.model.package_name) + if self.model.package_name != self.config.project_name + else "." + ) + path = os.path.join(self.config.project_root, package_path, self.model.original_file_path) + if not os.path.exists(path): + assert self.model.root_path + path = os.path.join(self.model.root_path, self.model.original_file_path) + column_types = self.model.config.column_types + delimiter = self.model.config.delimiter try: - table = agate_helper.from_csv(path, text_columns=column_types) + table = agate_helper.from_csv(path, text_columns=column_types, delimiter=delimiter) except ValueError as e: raise LoadAgateTableValueError(e, node=self.model) - table.original_abspath = os.path.abspath(path) + # this is used by some adapters + table.original_abspath = os.path.abspath(path) # type: ignore return table - @contextproperty + @contextproperty() def ref(self) -> Callable: """The most important function in dbt is `ref()`; it's impossible to build even moderately complex models without it. `ref()` is how you @@ -913,11 +1058,11 @@ def ref(self) -> Callable: """ return self.provider.ref(self.db_wrapper, self.model, self.config, self.manifest) - @contextproperty + @contextproperty() def source(self) -> Callable: return self.provider.source(self.db_wrapper, self.model, self.config, self.manifest) - @contextproperty + @contextproperty() def metric(self) -> Callable: return self.provider.metric(self.db_wrapper, self.model, self.config, self.manifest) @@ -978,7 +1123,7 @@ def ctx_config(self) -> Config: """ # noqa return self.provider.Config(self.model, self.context_config) - @contextproperty + @contextproperty() def execute(self) -> bool: """`execute` is a Jinja variable that returns True when dbt is in "execute" mode. @@ -1039,7 +1184,7 @@ def execute(self) -> bool: """ # noqa return self.provider.execute - @contextproperty + @contextproperty() def exceptions(self) -> Dict[str, Any]: """The exceptions namespace can be used to raise warnings and errors in dbt userspace. @@ -1077,15 +1222,15 @@ def exceptions(self) -> Dict[str, Any]: """ # noqa return wrapped_exports(self.model) - @contextproperty + @contextproperty() def database(self) -> str: return self.config.credentials.database - @contextproperty + @contextproperty() def schema(self) -> str: return self.config.credentials.schema - @contextproperty + @contextproperty() def var(self) -> ModelConfiguredVar: return self.provider.Var( context=self._ctx, @@ -1102,22 +1247,22 @@ def ctx_adapter(self) -> BaseDatabaseWrapper: """ return self.db_wrapper - @contextproperty + @contextproperty() def api(self) -> Dict[str, Any]: return { "Relation": self.db_wrapper.Relation, "Column": self.adapter.Column, } - @contextproperty + @contextproperty() def column(self) -> Type[Column]: return self.adapter.Column - @contextproperty + @contextproperty() def env(self) -> Dict[str, Any]: return self.target - @contextproperty + @contextproperty() def graph(self) -> Dict[str, Any]: """The `graph` context variable contains information about the nodes in your dbt project. Models, sources, tests, and snapshots are all @@ -1226,30 +1371,42 @@ def graph(self) -> Dict[str, Any]: @contextproperty("model") def ctx_model(self) -> Dict[str, Any]: - ret = self.model.to_dict(omit_none=True) + model_dct = self.model.to_dict(omit_none=True) # Maintain direct use of compiled_sql # TODO add depreciation logic[CT-934] - if "compiled_code" in ret: - ret["compiled_sql"] = ret["compiled_code"] - return ret + if "compiled_code" in model_dct: + model_dct["compiled_sql"] = model_dct["compiled_code"] - @contextproperty + if ( + hasattr(self.model, "contract") + and self.model.contract.alias_types is True + and "columns" in model_dct + ): + for column in model_dct["columns"].values(): + if "data_type" in column: + orig_data_type = column["data_type"] + # translate data_type to value in Column.TYPE_LABELS + new_data_type = self.adapter.Column.translate_type(orig_data_type) + column["data_type"] = new_data_type + return model_dct + + @contextproperty() def pre_hooks(self) -> Optional[List[Dict[str, Any]]]: return None - @contextproperty + @contextproperty() def post_hooks(self) -> Optional[List[Dict[str, Any]]]: return None - @contextproperty + @contextproperty() def sql(self) -> Optional[str]: return None - @contextproperty + @contextproperty() def sql_now(self) -> str: return self.adapter.date_function() - @contextmember + @contextmember() def adapter_macro(self, name: str, *args, **kwargs): """This was deprecated in v0.18 in favor of adapter.dispatch""" msg = ( @@ -1261,7 +1418,7 @@ def adapter_macro(self, name: str, *args, **kwargs): ) raise CompilationError(msg) - @contextmember + @contextmember() def env_var(self, var: str, default: Optional[str] = None) -> str: """The env_var() function. Return the environment variable named 'var'. If there is no such environment variable set, return the default. @@ -1271,8 +1428,11 @@ def env_var(self, var: str, default: Optional[str] = None) -> str: return_value = None if var.startswith(SECRET_ENV_PREFIX): raise SecretEnvVarLocationError(var) - if var in os.environ: - return_value = os.environ[var] + + env = get_invocation_context().env + + if var in env: + return_value = env[var] elif default is not None: return_value = default @@ -1291,7 +1451,7 @@ def env_var(self, var: str, default: Optional[str] = None) -> str: # reparsing. If the default changes, the file will have been updated and therefore # will be scheduled for reparsing anyways. self.manifest.env_vars[var] = ( - return_value if var in os.environ else DEFAULT_ENV_PLACEHOLDER + return_value if var in env else DEFAULT_ENV_PLACEHOLDER ) # hooks come from dbt_project.yml which doesn't have a real file_id @@ -1305,7 +1465,7 @@ def env_var(self, var: str, default: Optional[str] = None) -> str: else: raise EnvVarMissingError(var) - @contextproperty + @contextproperty() def selected_resources(self) -> List[str]: """The `selected_resources` variable contains a list of the resources selected based on the parameters provided to the dbt command. @@ -1314,7 +1474,7 @@ def selected_resources(self) -> List[str]: """ return selected_resources.SELECTED_RESOURCES - @contextmember + @contextmember() def submit_python_job(self, parsed_model: Dict, compiled_code: str) -> AdapterResponse: # Check macro_stack and that the unique id is for a materialization macro if not ( @@ -1339,7 +1499,7 @@ class MacroContext(ProviderContext): def __init__( self, - model: Macro, + model: MacroProtocol, config: RuntimeConfig, manifest: Manifest, provider: Provider, @@ -1357,44 +1517,28 @@ def __init__( class ModelContext(ProviderContext): model: ManifestNode - @contextproperty + @contextproperty() def pre_hooks(self) -> List[Dict[str, Any]]: - if self.model.resource_type in [NodeType.Source, NodeType.Test]: + if self.model.resource_type in [NodeType.Source, NodeType.Test, NodeType.Unit]: return [] # TODO CT-211 return [ h.to_dict(omit_none=True) for h in self.model.config.pre_hook # type: ignore[union-attr] # noqa ] - @contextproperty + @contextproperty() def post_hooks(self) -> List[Dict[str, Any]]: - if self.model.resource_type in [NodeType.Source, NodeType.Test]: + if self.model.resource_type in [NodeType.Source, NodeType.Test, NodeType.Unit]: return [] # TODO CT-211 return [ h.to_dict(omit_none=True) for h in self.model.config.post_hook # type: ignore[union-attr] # noqa ] - @contextproperty - def sql(self) -> Optional[str]: - # only doing this in sql model for backward compatible - if self.model.language == ModelLanguage.sql: # type: ignore[union-attr] - # If the model is deferred and the adapter doesn't support zero-copy cloning, then select * from the prod - # relation - if getattr(self.model, "defer_relation", None): - # TODO https://github.com/dbt-labs/dbt-core/issues/7976 - return f"select * from {self.model.defer_relation.relation_name or str(self.defer_relation)}" # type: ignore[union-attr] - elif getattr(self.model, "extra_ctes_injected", None): - # TODO CT-211 - return self.model.compiled_code # type: ignore[union-attr] - else: - return None - else: - return None - - @contextproperty + @contextproperty() def compiled_code(self) -> Optional[str]: - if getattr(self.model, "defer_relation", None): + # TODO: avoid routing on args.which if possible + if getattr(self.model, "defer_relation", None) and self.config.args.which == "clone": # TODO https://github.com/dbt-labs/dbt-core/issues/7976 return f"select * from {self.model.defer_relation.relation_name or str(self.defer_relation)}" # type: ignore[union-attr] elif getattr(self.model, "extra_ctes_injected", None): @@ -1403,15 +1547,23 @@ def compiled_code(self) -> Optional[str]: else: return None - @contextproperty + @contextproperty() + def sql(self) -> Optional[str]: + # only set this for sql models, for backward compatibility + if self.model.language == ModelLanguage.sql: # type: ignore[union-attr] + return self.compiled_code + else: + return None + + @contextproperty() def database(self) -> str: return getattr(self.model, "database", self.config.credentials.database) - @contextproperty + @contextproperty() def schema(self) -> str: return getattr(self.model, "schema", self.config.credentials.schema) - @contextproperty + @contextproperty() def this(self) -> Optional[RelationProxy]: """`this` makes available schema information about the currently executing model. It's is useful in any context in which you need to @@ -1446,7 +1598,7 @@ def this(self) -> Optional[RelationProxy]: return None return self.db_wrapper.Relation.create_from(self.config, self.model) - @contextproperty + @contextproperty() def defer_relation(self) -> Optional[RelationProxy]: """ For commands which add information about this node's corresponding @@ -1454,13 +1606,40 @@ def defer_relation(self) -> Optional[RelationProxy]: object for that stateful other """ if getattr(self.model, "defer_relation", None): - return self.db_wrapper.Relation.create_from_node( + return self.db_wrapper.Relation.create_from( self.config, self.model.defer_relation # type: ignore ) else: return None +class UnitTestContext(ModelContext): + model: UnitTestNode + + @contextmember() + def env_var(self, var: str, default: Optional[str] = None) -> str: + """The env_var() function. Return the overriden unit test environment variable named 'var'. + + If there is no unit test override, return the environment variable named 'var'. + + If there is no such environment variable set, return the default. + + If the default is None, raise an exception for an undefined variable. + """ + if self.model.overrides and var in self.model.overrides.env_vars: + return self.model.overrides.env_vars[var] + else: + return super().env_var(var, default) + + @contextproperty() + def this(self) -> Optional[str]: + if self.model.this_input_node_unique_id: + this_node = self.manifest.expect(self.model.this_input_node_unique_id) + self.model.set_cte(this_node.unique_id, None) # type: ignore + return self.adapter.Relation.add_ephemeral_prefix(this_node.name) + return None + + # This is called by '_context_for', used in 'render_with_context' def generate_parser_model_context( model: ManifestNode, @@ -1496,7 +1675,7 @@ def generate_runtime_model_context( def generate_runtime_macro_context( - macro: Macro, + macro: MacroProtocol, config: RuntimeConfig, manifest: Manifest, package_name: Optional[str], @@ -1505,6 +1684,59 @@ def generate_runtime_macro_context( return ctx.to_dict() +def generate_runtime_unit_test_context( + unit_test: UnitTestNode, + config: RuntimeConfig, + manifest: Manifest, +) -> Dict[str, Any]: + ctx = UnitTestContext(unit_test, config, manifest, RuntimeUnitTestProvider(), None) + ctx_dict = ctx.to_dict() + + if unit_test.overrides and unit_test.overrides.macros: + global_macro_overrides: Dict[str, Any] = {} + package_macro_overrides: Dict[Tuple[str, str], Any] = {} + + # split macro overrides into global and package-namespaced collections + for macro_name, macro_value in unit_test.overrides.macros.items(): + macro_name_split = macro_name.split(".") + macro_package = macro_name_split[0] if len(macro_name_split) == 2 else None + macro_name = macro_name_split[-1] + + # macro overrides of global macros + if macro_package is None and macro_name in ctx_dict: + original_context_value = ctx_dict[macro_name] + if isinstance(original_context_value, MacroGenerator): + macro_value = UnitTestMacroGenerator(original_context_value, macro_value) + global_macro_overrides[macro_name] = macro_value + + # macro overrides of package-namespaced macros + elif ( + macro_package + and macro_package in ctx_dict + and macro_name in ctx_dict[macro_package] + ): + original_context_value = ctx_dict[macro_package][macro_name] + if isinstance(original_context_value, MacroGenerator): + macro_value = UnitTestMacroGenerator(original_context_value, macro_value) + package_macro_overrides[(macro_package, macro_name)] = macro_value + + # macro overrides of package-namespaced macros + for (macro_package, macro_name), macro_override_value in package_macro_overrides.items(): + ctx_dict[macro_package][macro_name] = macro_override_value + # propgate override of namespaced dbt macro to global namespace + if macro_package == "dbt": + ctx_dict[macro_name] = macro_value + + # macro overrides of global macros, which should take precedence over equivalent package-namespaced overrides + for macro_name, macro_override_value in global_macro_overrides.items(): + ctx_dict[macro_name] = macro_override_value + # propgate override of global dbt macro to dbt namespace + if ctx_dict["dbt"].get(macro_name): + ctx_dict["dbt"][macro_name] = macro_override_value + + return ctx_dict + + class ExposureRefResolver(BaseResolver): def __call__(self, *args, **kwargs) -> str: package = None @@ -1660,13 +1892,15 @@ def _build_test_namespace(self): ) self.namespace = macro_namespace - @contextmember + @contextmember() def env_var(self, var: str, default: Optional[str] = None) -> str: return_value = None if var.startswith(SECRET_ENV_PREFIX): raise SecretEnvVarLocationError(var) - if var in os.environ: - return_value = os.environ[var] + + env = get_invocation_context().env + if var in env: + return_value = env[var] elif default is not None: return_value = default @@ -1678,7 +1912,7 @@ def env_var(self, var: str, default: Optional[str] = None) -> str: # reparsing. If the default changes, the file will have been updated and therefore # will be scheduled for reparsing anyways. self.manifest.env_vars[var] = ( - return_value if var in os.environ else DEFAULT_ENV_PLACEHOLDER + return_value if var in env else DEFAULT_ENV_PLACEHOLDER ) # the "model" should only be test nodes, but just in case, check # TODO CT-211 diff --git a/core/dbt/context/query_header.py b/core/dbt/context/query_header.py new file mode 100644 index 00000000000..95c5a0b7a8f --- /dev/null +++ b/core/dbt/context/query_header.py @@ -0,0 +1,13 @@ +from dbt.adapters.contracts.connection import AdapterRequiredConfig +from dbt.context.manifest import ManifestContext +from dbt.contracts.graph.manifest import Manifest + + +class QueryHeaderContext(ManifestContext): + def __init__(self, config: AdapterRequiredConfig, manifest: Manifest) -> None: + super().__init__(config, manifest, config.project_name) + + +def generate_query_header_context(config: AdapterRequiredConfig, manifest: Manifest): + ctx = QueryHeaderContext(config, manifest) + return ctx.to_dict() diff --git a/core/dbt/context/secret.py b/core/dbt/context/secret.py index 4d8ff342aff..3f2641323fe 100644 --- a/core/dbt/context/secret.py +++ b/core/dbt/context/secret.py @@ -1,20 +1,18 @@ -import os from typing import Any, Dict, Optional -from .base import BaseContext, contextmember - -from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER +from dbt.constants import DEFAULT_ENV_PLACEHOLDER, SECRET_PLACEHOLDER from dbt.exceptions import EnvVarMissingError +from dbt_common.constants import SECRET_ENV_PREFIX +from dbt_common.context import get_invocation_context - -SECRET_PLACEHOLDER = "$$$DBT_SECRET_START$$${}$$$DBT_SECRET_END$$$" +from .base import BaseContext, contextmember class SecretContext(BaseContext): """This context is used in profiles.yml + packages.yml. It can render secret env vars that aren't usable elsewhere""" - @contextmember + @contextmember() def env_var(self, var: str, default: Optional[str] = None) -> str: """The env_var() function. Return the environment variable named 'var'. If there is no such environment variable set, return the default. @@ -30,24 +28,25 @@ def env_var(self, var: str, default: Optional[str] = None) -> str: # if this is a 'secret' env var, just return the name of the env var # instead of rendering the actual value here, to avoid any risk of # Jinja manipulation. it will be subbed out later, in SecretRenderer.render_value - if var in os.environ and var.startswith(SECRET_ENV_PREFIX): + env = get_invocation_context().env + if var in env and var.startswith(SECRET_ENV_PREFIX): return SECRET_PLACEHOLDER.format(var) - elif var in os.environ: - return_value = os.environ[var] + if var in env: + return_value = env[var] elif default is not None: return_value = default if return_value is not None: # store env vars in the internal manifest to power partial parsing # if it's a 'secret' env var, we shouldn't even get here - # but just to be safe — don't save secrets + # but just to be safe, don't save secrets if not var.startswith(SECRET_ENV_PREFIX): # If the environment variable is set from a default, store a string indicating # that so we can skip partial parsing. Otherwise the file will be scheduled for # reparsing. If the default changes, the file will have been updated and therefore # will be scheduled for reparsing anyways. - self.env_vars[var] = return_value if var in os.environ else DEFAULT_ENV_PLACEHOLDER + self.env_vars[var] = return_value if var in env else DEFAULT_ENV_PLACEHOLDER return return_value else: raise EnvVarMissingError(var) diff --git a/core/dbt/context/target.py b/core/dbt/context/target.py index a6d587269d5..39c5a30ee4e 100644 --- a/core/dbt/context/target.py +++ b/core/dbt/context/target.py @@ -9,7 +9,7 @@ def __init__(self, target_dict: Dict[str, Any], cli_vars: Dict[str, Any]): super().__init__(cli_vars=cli_vars) self.target_dict = target_dict - @contextproperty + @contextproperty() def target(self) -> Dict[str, Any]: """`target` contains information about your connection to the warehouse (specified in profiles.yml). Some configs are shared between all diff --git a/core/dbt/contracts/README.md b/core/dbt/contracts/README.md index 9422ca56442..40a7bfe8a02 100644 --- a/core/dbt/contracts/README.md +++ b/core/dbt/contracts/README.md @@ -1 +1,35 @@ # Contracts README + + +## Artifacts + +### Generating JSON schemas +A helper script, `scripts/collect-artifact-schema.py` is available to generate json schemas corresponding to versioned artifacts (`ArtifactMixin`s). + +This script is necessary to run when a new artifact schema version is created, or when changes are made to existing artifact versions, and writes json schema to `schema/dbt/<artifact>/v<version>.json`. + +Schemas in `schema/dbt` power the rendering in https://schemas.getdbt.com/ via https://github.com/dbt-labs/schemas.getdbt.com/ + +#### Example Usage + +Available arguments: +```sh +❯ scripts/collect-artifact-schema.py --help +usage: Collect and write dbt arfifact schema [-h] [--path PATH] [--artifact {manifest,sources,run-results,catalog}] + +options: + -h, --help show this help message and exit + --path PATH The dir to write artifact schema + --artifact {manifest,sources,run-results,catalog} + The name of the artifact to update +``` + +Generate latest version of schemas of all artifacts to `schema/dbt/<artifact>/v<version>.json` +```sh +> sripts/collect-artifact-schema.py --path schemas +``` + +Generate latest version of schemas of manifest to `schema/dbt/manifest/v<version>.json` +```sh +> sripts/collect-artifact-schema.py --path schemas --artifact manifest +``` diff --git a/core/dbt/contracts/connection.py b/core/dbt/contracts/connection.py deleted file mode 100644 index 41eb0bccb8d..00000000000 --- a/core/dbt/contracts/connection.py +++ /dev/null @@ -1,233 +0,0 @@ -import abc -import itertools -from dataclasses import dataclass, field -from typing import ( - Any, - ClassVar, - Dict, - Tuple, - Iterable, - Optional, - List, - Callable, -) -from dbt.exceptions import DbtInternalError -from dbt.utils import translate_aliases, md5 -from dbt.events.functions import fire_event -from dbt.events.types import NewConnectionOpening -from dbt.events.contextvars import get_node_info -from typing_extensions import Protocol -from dbt.dataclass_schema import ( - dbtClassMixin, - StrEnum, - ExtensibleDbtClassMixin, - HyphenatedDbtClassMixin, - ValidatedStringMixin, - register_pattern, -) -from dbt.contracts.util import Replaceable - - -class Identifier(ValidatedStringMixin): - ValidationRegex = r"^[A-Za-z_][A-Za-z0-9_]+$" - - -# we need register_pattern for jsonschema validation -register_pattern(Identifier, r"^[A-Za-z_][A-Za-z0-9_]+$") - - -@dataclass -class AdapterResponse(dbtClassMixin): - _message: str - code: Optional[str] = None - rows_affected: Optional[int] = None - - def __str__(self): - return self._message - - -class ConnectionState(StrEnum): - INIT = "init" - OPEN = "open" - CLOSED = "closed" - FAIL = "fail" - - -@dataclass(init=False) -class Connection(ExtensibleDbtClassMixin, Replaceable): - type: Identifier - name: Optional[str] = None - state: ConnectionState = ConnectionState.INIT - transaction_open: bool = False - _handle: Optional[Any] = None - _credentials: Optional[Any] = None - - def __init__( - self, - type: Identifier, - name: Optional[str], - credentials: dbtClassMixin, - state: ConnectionState = ConnectionState.INIT, - transaction_open: bool = False, - handle: Optional[Any] = None, - ) -> None: - self.type = type - self.name = name - self.state = state - self.credentials = credentials - self.transaction_open = transaction_open - self.handle = handle - - @property - def credentials(self): - return self._credentials - - @credentials.setter - def credentials(self, value): - self._credentials = value - - @property - def handle(self): - if isinstance(self._handle, LazyHandle): - try: - # this will actually change 'self._handle'. - self._handle.resolve(self) - except RecursionError as exc: - raise DbtInternalError( - "A connection's open() method attempted to read the handle value" - ) from exc - return self._handle - - @handle.setter - def handle(self, value): - self._handle = value - - -class LazyHandle: - """The opener must be a callable that takes a Connection object and opens the - connection, updating the handle on the Connection. - """ - - def __init__(self, opener: Callable[[Connection], Connection]): - self.opener = opener - - def resolve(self, connection: Connection) -> Connection: - fire_event( - NewConnectionOpening(connection_state=connection.state, node_info=get_node_info()) - ) - return self.opener(connection) - - -# see https://github.com/python/mypy/issues/4717#issuecomment-373932080 -# and https://github.com/python/mypy/issues/5374 -# for why we have type: ignore. Maybe someday dataclasses + abstract classes -# will work. -@dataclass # type: ignore -class Credentials(ExtensibleDbtClassMixin, Replaceable, metaclass=abc.ABCMeta): - database: str - schema: str - _ALIASES: ClassVar[Dict[str, str]] = field(default={}, init=False) - - @abc.abstractproperty - def type(self) -> str: - raise NotImplementedError("type not implemented for base credentials class") - - @property - def unique_field(self) -> str: - """Hashed and included in anonymous telemetry to track adapter adoption. - Return the field from Credentials that can uniquely identify - one team/organization using this adapter - """ - raise NotImplementedError("unique_field not implemented for base credentials class") - - def hashed_unique_field(self) -> str: - return md5(self.unique_field) - - def connection_info(self, *, with_aliases: bool = False) -> Iterable[Tuple[str, Any]]: - """Return an ordered iterator of key/value pairs for pretty-printing.""" - as_dict = self.to_dict(omit_none=False) - connection_keys = set(self._connection_keys()) - aliases: List[str] = [] - if with_aliases: - aliases = [k for k, v in self._ALIASES.items() if v in connection_keys] - for key in itertools.chain(self._connection_keys(), aliases): - if key in as_dict: - yield key, as_dict[key] - - @abc.abstractmethod - def _connection_keys(self) -> Tuple[str, ...]: - raise NotImplementedError - - @classmethod - def __pre_deserialize__(cls, data): - data = super().__pre_deserialize__(data) - data = cls.translate_aliases(data) - return data - - @classmethod - def translate_aliases(cls, kwargs: Dict[str, Any], recurse: bool = False) -> Dict[str, Any]: - return translate_aliases(kwargs, cls._ALIASES, recurse) - - def __post_serialize__(self, dct): - # no super() -- do we need it? - if self._ALIASES: - dct.update( - { - new_name: dct[canonical_name] - for new_name, canonical_name in self._ALIASES.items() - if canonical_name in dct - } - ) - return dct - - -class UserConfigContract(Protocol): - send_anonymous_usage_stats: bool - use_colors: Optional[bool] = None - partial_parse: Optional[bool] = None - printer_width: Optional[int] = None - - -class HasCredentials(Protocol): - credentials: Credentials - profile_name: str - user_config: UserConfigContract - target_name: str - threads: int - - def to_target_dict(self): - raise NotImplementedError("to_target_dict not implemented") - - -DEFAULT_QUERY_COMMENT = """ -{%- set comment_dict = {} -%} -{%- do comment_dict.update( - app='dbt', - dbt_version=dbt_version, - profile_name=target.get('profile_name'), - target_name=target.get('target_name'), -) -%} -{%- if node is not none -%} - {%- do comment_dict.update( - node_id=node.unique_id, - ) -%} -{% else %} - {# in the node context, the connection name is the node_id #} - {%- do comment_dict.update(connection_name=connection_name) -%} -{%- endif -%} -{{ return(tojson(comment_dict)) }} -""" - - -@dataclass -class QueryComment(HyphenatedDbtClassMixin): - comment: str = DEFAULT_QUERY_COMMENT - append: bool = False - job_label: bool = False - - -class AdapterRequiredConfig(HasCredentials, Protocol): - project_name: str - query_comment: QueryComment - cli_vars: Dict[str, Any] - target_path: str diff --git a/core/dbt/contracts/files.py b/core/dbt/contracts/files.py index f54533c38c1..793451c0b00 100644 --- a/core/dbt/contracts/files.py +++ b/core/dbt/contracts/files.py @@ -1,12 +1,12 @@ -import hashlib import os from dataclasses import dataclass, field +from typing import Any, Dict, List, Optional, Union from mashumaro.types import SerializableType -from typing import List, Optional, Union, Dict, Any +from dbt.artifacts.resources.base import FileHash from dbt.constants import MAXIMUM_SEED_SIZE -from dbt.dataclass_schema import dbtClassMixin, StrEnum +from dbt_common.dataclass_schema import StrEnum, dbtClassMixin from .util import SourceKey @@ -22,6 +22,7 @@ class ParseFileType(StrEnum): Documentation = "docs" Schema = "schema" Hook = "hook" # not a real filetype, from dbt_project.yml + Fixture = "fixture" parse_file_type_to_parser = { @@ -35,6 +36,7 @@ class ParseFileType(StrEnum): ParseFileType.Documentation: "DocumentationParser", ParseFileType.Schema: "SchemaParser", ParseFileType.Hook: "HookParser", + ParseFileType.Fixture: "FixtureParser", } @@ -68,46 +70,6 @@ def seed_too_large(self) -> bool: return os.stat(self.full_path).st_size > MAXIMUM_SEED_SIZE -@dataclass -class FileHash(dbtClassMixin): - name: str # the hash type name - checksum: str # the hashlib.hash_type().hexdigest() of the file contents - - @classmethod - def empty(cls): - return FileHash(name="none", checksum="") - - @classmethod - def path(cls, path: str): - return FileHash(name="path", checksum=path) - - def __eq__(self, other): - if not isinstance(other, FileHash): - return NotImplemented - - if self.name == "none" or self.name != other.name: - return False - - return self.checksum == other.checksum - - def compare(self, contents: str) -> bool: - """Compare the file contents with the given hash""" - if self.name == "none": - return False - - return self.from_contents(contents, name=self.name) == self.checksum - - @classmethod - def from_contents(cls, contents: str, name="sha256") -> "FileHash": - """Create a file hash from the given file contents. The hash is always - the utf-8 encoding of the contents given, because dbt only reads files - as utf-8. - """ - data = contents.encode("utf-8") - checksum = hashlib.new(name, data).hexdigest() - return cls(name=name, checksum=checksum) - - @dataclass class RemoteFile(dbtClassMixin): def __init__(self, language) -> None: @@ -152,7 +114,6 @@ class BaseSourceFile(dbtClassMixin, SerializableType): parse_file_type: Optional[ParseFileType] = None # we don't want to serialize this contents: Optional[str] = None - # the unique IDs contained in this file @property def file_id(self): @@ -160,6 +121,10 @@ def file_id(self): return None return f"{self.project_name}://{self.path.original_file_path}" + @property + def original_file_path(self): + return self.path.original_file_path + def _serialize(self): dct = self.to_dict() return dct @@ -168,12 +133,14 @@ def _serialize(self): def _deserialize(cls, dct: Dict[str, int]): if dct["parse_file_type"] == "schema": sf = SchemaSourceFile.from_dict(dct) + elif dct["parse_file_type"] == "fixture": + sf = FixtureSourceFile.from_dict(dct) else: sf = SourceFile.from_dict(dct) return sf - def __post_serialize__(self, dct): - dct = super().__post_serialize__(dct) + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) # remove empty lists to save space dct_keys = list(dct.keys()) for key in dct_keys: @@ -221,14 +188,23 @@ def remote(cls, contents: str, project_name: str, language: str) -> "SourceFile" class SchemaSourceFile(BaseSourceFile): dfy: Dict[str, Any] = field(default_factory=dict) # these are in the manifest.nodes dictionary - tests: Dict[str, Any] = field(default_factory=dict) + data_tests: Dict[str, Any] = field(default_factory=dict) sources: List[str] = field(default_factory=list) exposures: List[str] = field(default_factory=list) metrics: List[str] = field(default_factory=list) + # The following field will no longer be used. Leaving + # here to avoid breaking existing projects. To be removed + # later if possible. + generated_metrics: List[str] = field(default_factory=list) + # metrics generated from semantic_model measures. The key is + # the name of the semantic_model, so that we can find it later. + metrics_from_measures: Dict[str, Any] = field(default_factory=dict) groups: List[str] = field(default_factory=list) # node patches contain models, seeds, snapshots, analyses ndp: List[str] = field(default_factory=list) semantic_models: List[str] = field(default_factory=list) + unit_tests: List[str] = field(default_factory=list) + saved_queries: List[str] = field(default_factory=list) # any macro patches in this file by macro unique_id. mcp: Dict[str, str] = field(default_factory=dict) # any source patches in this file. The entries are package, name pairs @@ -255,8 +231,8 @@ def macro_patches(self): def source_patches(self): return self.sop - def __post_serialize__(self, dct): - dct = super().__post_serialize__(dct) + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) # Remove partial parsing specific data for key in ("pp_test_index", "pp_dict"): if key in dct: @@ -269,31 +245,65 @@ def append_patch(self, yaml_key, unique_id): def add_test(self, node_unique_id, test_from): name = test_from["name"] key = test_from["key"] - if key not in self.tests: - self.tests[key] = {} - if name not in self.tests[key]: - self.tests[key][name] = [] - self.tests[key][name].append(node_unique_id) + if key not in self.data_tests: + self.data_tests[key] = {} + if name not in self.data_tests[key]: + self.data_tests[key][name] = [] + self.data_tests[key][name].append(node_unique_id) - # this is only used in unit tests + # this is only used in tests/unit def remove_tests(self, yaml_key, name): - if yaml_key in self.tests: - if name in self.tests[yaml_key]: - del self.tests[yaml_key][name] + if yaml_key in self.data_tests: + if name in self.data_tests[yaml_key]: + del self.data_tests[yaml_key][name] - # this is only used in tests (unit + functional) + # this is only used in the tests directory (unit + functional) def get_tests(self, yaml_key, name): - if yaml_key in self.tests: - if name in self.tests[yaml_key]: - return self.tests[yaml_key][name] + if yaml_key in self.data_tests: + if name in self.data_tests[yaml_key]: + return self.data_tests[yaml_key][name] return [] + def add_metrics_from_measures(self, semantic_model_name: str, metric_unique_id: str): + if self.generated_metrics: + # Probably not needed, but for safety sake, convert the + # old generated_metrics to metrics_from_measures. + self.fix_metrics_from_measures() + if semantic_model_name not in self.metrics_from_measures: + self.metrics_from_measures[semantic_model_name] = [] + self.metrics_from_measures[semantic_model_name].append(metric_unique_id) + + def fix_metrics_from_measures(self): + # Temporary method to fix up existing projects with a partial parse file. + # This should only be called if SchemaSourceFile in a msgpack + # pack manifest has an existing "generated_metrics" list, to turn it + # it into a "metrics_from_measures" dictionary, so that we can + # correctly partially parse. + # This code can be removed when "generated_metrics" is removed. + generated_metrics = self.generated_metrics + self.generated_metrics = [] # Should never be needed again + # For each metric_unique_id we loop through the semantic models + # looking for the name of the "measure" which generated the metric. + # When it's found, add it to "metrics_from_measures", with a key + # of the semantic_model name, and a list of metrics. + for metric_unique_id in generated_metrics: + parts = metric_unique_id.split(".") + # get the metric_name + metric_name = parts[-1] + if "semantic_models" in self.dict_from_yaml: + for sem_model in self.dict_from_yaml["semantic_models"]: + if "measures" in sem_model: + for measure in sem_model["measures"]: + if measure["name"] == metric_name: + self.add_metrics_from_measures(sem_model["name"], metric_unique_id) + break + def get_key_and_name_for_test(self, test_unique_id): yaml_key = None block_name = None - for key in self.tests.keys(): - for name in self.tests[key]: - for unique_id in self.tests[key][name]: + for key in self.data_tests.keys(): + for name in self.data_tests[key]: + for unique_id in self.data_tests[key][name]: if unique_id == test_unique_id: yaml_key = key block_name = name @@ -302,9 +312,9 @@ def get_key_and_name_for_test(self, test_unique_id): def get_all_test_ids(self): test_ids = [] - for key in self.tests.keys(): - for name in self.tests[key]: - test_ids.extend(self.tests[key][name]) + for key in self.data_tests.keys(): + for name in self.data_tests[key]: + test_ids.extend(self.data_tests[key][name]) return test_ids def add_env_var(self, var, yaml_key, name): @@ -324,4 +334,14 @@ def delete_from_env_vars(self, yaml_key, name): del self.env_vars[yaml_key] -AnySourceFile = Union[SchemaSourceFile, SourceFile] +@dataclass +class FixtureSourceFile(BaseSourceFile): + fixture: Optional[str] = None + unit_tests: List[str] = field(default_factory=list) + + def add_unit_test(self, value): + if value not in self.unit_tests: + self.unit_tests.append(value) + + +AnySourceFile = Union[SchemaSourceFile, SourceFile, FixtureSourceFile] diff --git a/core/dbt/contracts/graph/manifest.py b/core/dbt/contracts/graph/manifest.py index 20d2dc5f394..f4cdafea737 100644 --- a/core/dbt/contracts/graph/manifest.py +++ b/core/dbt/contracts/graph/manifest.py @@ -1,30 +1,50 @@ import enum from collections import defaultdict -from dataclasses import dataclass, field -from itertools import chain, islice -from mashumaro.mixins.msgpack import DataClassMessagePackMixin +from dataclasses import dataclass, field, replace +from itertools import chain from multiprocessing.synchronize import Lock from typing import ( + Any, + Callable, + ClassVar, DefaultDict, Dict, + Generic, List, - Optional, - Union, Mapping, MutableMapping, - Any, + Optional, Set, Tuple, TypeVar, - Callable, - Generic, - AbstractSet, - ClassVar, + Union, ) + from typing_extensions import Protocol -from uuid import UUID +import dbt_common.exceptions +import dbt_common.utils +from dbt import deprecations, tracking +from dbt.adapters.exceptions import ( + DuplicateMacroInPackageError, + DuplicateMaterializationNameError, +) +from dbt.adapters.factory import get_adapter_package_names + +# to preserve import paths +from dbt.artifacts.resources import BaseResource, DeferRelation, NodeVersion, RefArgs +from dbt.artifacts.resources.v1.config import NodeConfig +from dbt.artifacts.schemas.manifest import ManifestMetadata, UniqueID, WritableManifest +from dbt.clients.jinja_static import statically_parse_ref_or_source +from dbt.contracts.files import ( + AnySourceFile, + FileHash, + FixtureSourceFile, + SchemaSourceFile, + SourceFile, +) from dbt.contracts.graph.nodes import ( + RESOURCE_CLASS_TO_NODE_CLASS, BaseNode, Documentation, Exposure, @@ -35,39 +55,38 @@ ManifestNode, Metric, ModelNode, - DeferRelation, - ResultNode, + SavedQuery, + SeedNode, SemanticModel, SourceDefinition, + UnitTestDefinition, + UnitTestFileFixture, UnpatchedSourceDefinition, ) -from dbt.contracts.graph.unparsed import SourcePatch, NodeVersion, UnparsedVersion -from dbt.contracts.graph.manifest_upgrade import upgrade_manifest_json -from dbt.contracts.files import SourceFile, SchemaSourceFile, FileHash, AnySourceFile -from dbt.contracts.util import BaseArtifactMetadata, SourceKey, ArtifactMixin, schema_version -from dbt.dataclass_schema import dbtClassMixin +from dbt.contracts.graph.unparsed import SourcePatch, UnparsedVersion +from dbt.contracts.util import SourceKey +from dbt.events.types import UnpinnedRefNewVersionAvailable from dbt.exceptions import ( + AmbiguousResourceNameRefError, CompilationError, DuplicateResourceNameError, - DuplicateMacroInPackageError, - DuplicateMaterializationNameError, - AmbiguousResourceNameRefError, ) -from dbt.helper_types import PathSet -from dbt.events.functions import fire_event -from dbt.events.types import MergedFromState, UnpinnedRefNewVersionAvailable -from dbt.events.contextvars import get_node_info -from dbt.node_types import NodeType, AccessType -from dbt.flags import get_flags, MP_CONTEXT -from dbt import tracking -import dbt.utils - +from dbt.flags import get_flags +from dbt.mp_context import get_mp_context +from dbt.node_types import ( + REFABLE_NODE_TYPES, + VERSIONED_NODE_TYPES, + AccessType, + NodeType, +) +from dbt_common.dataclass_schema import dbtClassMixin +from dbt_common.events.contextvars import get_node_info +from dbt_common.events.functions import fire_event +from dbt_common.helper_types import PathSet -NodeEdgeMap = Dict[str, List[str]] PackageName = str DocName = str RefName = str -UniqueID = str def find_unique_id_for_package(storage, key, package: Optional[PackageName]): @@ -88,7 +107,7 @@ def find_unique_id_for_package(storage, key, package: Optional[PackageName]): class DocLookup(dbtClassMixin): - def __init__(self, manifest: "Manifest"): + def __init__(self, manifest: "Manifest") -> None: self.storage: Dict[str, Dict[PackageName, UniqueID]] = {} self.populate(manifest) @@ -112,14 +131,14 @@ def populate(self, manifest): def perform_lookup(self, unique_id: UniqueID, manifest) -> Documentation: if unique_id not in manifest.docs: - raise dbt.exceptions.DbtInternalError( + raise dbt_common.exceptions.DbtInternalError( f"Doc {unique_id} found in cache but not found in manifest" ) return manifest.docs[unique_id] class SourceLookup(dbtClassMixin): - def __init__(self, manifest: "Manifest"): + def __init__(self, manifest: "Manifest") -> None: self.storage: Dict[str, Dict[PackageName, UniqueID]] = {} self.populate(manifest) @@ -145,7 +164,7 @@ def populate(self, manifest): def perform_lookup(self, unique_id: UniqueID, manifest: "Manifest") -> SourceDefinition: if unique_id not in manifest.sources: - raise dbt.exceptions.DbtInternalError( + raise dbt_common.exceptions.DbtInternalError( f"Source {unique_id} found in cache but not found in manifest" ) return manifest.sources[unique_id] @@ -153,10 +172,10 @@ def perform_lookup(self, unique_id: UniqueID, manifest: "Manifest") -> SourceDef class RefableLookup(dbtClassMixin): # model, seed, snapshot - _lookup_types: ClassVar[set] = set(NodeType.refable()) - _versioned_types: ClassVar[set] = set(NodeType.versioned()) + _lookup_types: ClassVar[set] = set(REFABLE_NODE_TYPES) + _versioned_types: ClassVar[set] = set(VERSIONED_NODE_TYPES) - def __init__(self, manifest: "Manifest"): + def __init__(self, manifest: "Manifest") -> None: self.storage: Dict[str, Dict[PackageName, UniqueID]] = {} self.populate(manifest) @@ -244,7 +263,7 @@ def perform_lookup(self, unique_id: UniqueID, manifest) -> ManifestNode: if unique_id in manifest.nodes: node = manifest.nodes[unique_id] else: - raise dbt.exceptions.DbtInternalError( + raise dbt_common.exceptions.DbtInternalError( f"Node {unique_id} found in cache but not found in manifest" ) return node @@ -267,7 +286,7 @@ def _find_unique_ids_for_package(self, key, package: Optional[PackageName]) -> L class MetricLookup(dbtClassMixin): - def __init__(self, manifest: "Manifest"): + def __init__(self, manifest: "Manifest") -> None: self.storage: Dict[str, Dict[PackageName, UniqueID]] = {} self.populate(manifest) @@ -293,12 +312,47 @@ def populate(self, manifest): def perform_lookup(self, unique_id: UniqueID, manifest: "Manifest") -> Metric: if unique_id not in manifest.metrics: - raise dbt.exceptions.DbtInternalError( + raise dbt_common.exceptions.DbtInternalError( f"Metric {unique_id} found in cache but not found in manifest" ) return manifest.metrics[unique_id] +class SavedQueryLookup(dbtClassMixin): + """Lookup utility for finding SavedQuery nodes""" + + def __init__(self, manifest: "Manifest") -> None: + self.storage: Dict[str, Dict[PackageName, UniqueID]] = {} + self.populate(manifest) + + def get_unique_id(self, search_name, package: Optional[PackageName]): + return find_unique_id_for_package(self.storage, search_name, package) + + def find(self, search_name, package: Optional[PackageName], manifest: "Manifest"): + unique_id = self.get_unique_id(search_name, package) + if unique_id is not None: + return self.perform_lookup(unique_id, manifest) + return None + + def add_saved_query(self, saved_query: SavedQuery): + if saved_query.search_name not in self.storage: + self.storage[saved_query.search_name] = {} + + self.storage[saved_query.search_name][saved_query.package_name] = saved_query.unique_id + + def populate(self, manifest): + for saved_query in manifest.saved_queries.values(): + if hasattr(saved_query, "name"): + self.add_saved_query(saved_query) + + def perform_lookup(self, unique_id: UniqueID, manifest: "Manifest") -> SavedQuery: + if unique_id not in manifest.saved_queries: + raise dbt_common.exceptions.DbtInternalError( + f"SavedQUery {unique_id} found in cache but not found in manifest" + ) + return manifest.saved_queries[unique_id] + + class SemanticModelByMeasureLookup(dbtClassMixin): """Lookup utility for finding SemanticModel by measure @@ -306,7 +360,7 @@ class SemanticModelByMeasureLookup(dbtClassMixin): the semantic models in a manifest. """ - def __init__(self, manifest: "Manifest"): + def __init__(self, manifest: "Manifest") -> None: self.storage: DefaultDict[str, Dict[PackageName, UniqueID]] = defaultdict(dict) self.populate(manifest) @@ -331,28 +385,39 @@ def populate(self, manifest: "Manifest"): """Populate storage with all the measure + package paths to the Manifest's SemanticModels""" for semantic_model in manifest.semantic_models.values(): self.add(semantic_model=semantic_model) + for disabled in manifest.disabled.values(): + for node in disabled: + if isinstance(node, SemanticModel): + self.add(semantic_model=node) def perform_lookup(self, unique_id: UniqueID, manifest: "Manifest") -> SemanticModel: """Tries to get a SemanticModel from the Manifest""" - semantic_model = manifest.semantic_models.get(unique_id) - if semantic_model is None: - raise dbt.exceptions.DbtInternalError( + enabled_semantic_model: Optional[SemanticModel] = manifest.semantic_models.get(unique_id) + disabled_semantic_model: Optional[List] = manifest.disabled.get(unique_id) + + if isinstance(enabled_semantic_model, SemanticModel): + return enabled_semantic_model + elif disabled_semantic_model is not None and isinstance( + disabled_semantic_model[0], SemanticModel + ): + return disabled_semantic_model[0] + else: + raise dbt_common.exceptions.DbtInternalError( f"Semantic model `{unique_id}` found in cache but not found in manifest" ) - return semantic_model -# This handles both models/seeds/snapshots and sources/metrics/exposures +# This handles both models/seeds/snapshots and sources/metrics/exposures/semantic_models class DisabledLookup(dbtClassMixin): - def __init__(self, manifest: "Manifest"): + def __init__(self, manifest: "Manifest") -> None: self.storage: Dict[str, Dict[PackageName, List[Any]]] = {} self.populate(manifest) - def populate(self, manifest): + def populate(self, manifest: "Manifest"): for node in list(chain.from_iterable(manifest.disabled.values())): self.add_node(node) - def add_node(self, node): + def add_node(self, node: GraphMemberNode) -> None: if node.search_name not in self.storage: self.storage[node.search_name] = {} if node.package_name not in self.storage[node.search_name]: @@ -362,8 +427,12 @@ def add_node(self, node): # This should return a list of disabled nodes. It's different from # the other Lookup functions in that it returns full nodes, not just unique_ids def find( - self, search_name, package: Optional[PackageName], version: Optional[NodeVersion] = None - ): + self, + search_name, + package: Optional[PackageName], + version: Optional[NodeVersion] = None, + resource_types: Optional[List[NodeType]] = None, + ) -> Optional[List[Any]]: if version: search_name = f"{search_name}.v{version}" @@ -372,16 +441,29 @@ def find( pkg_dct: Mapping[PackageName, List[Any]] = self.storage[search_name] + nodes = [] if package is None: if not pkg_dct: return None else: - return next(iter(pkg_dct.values())) + nodes = next(iter(pkg_dct.values())) elif package in pkg_dct: - return pkg_dct[package] + nodes = pkg_dct[package] else: return None + if resource_types is None: + return nodes + else: + new_nodes = [] + for node in nodes: + if node.resource_type in resource_types: + new_nodes.append(node) + if not new_nodes: + return None + else: + return new_nodes + class AnalysisLookup(RefableLookup): _lookup_types: ClassVar[set] = set([NodeType.Analysis]) @@ -401,59 +483,6 @@ def _packages_to_search( return [current_project, node_package, None] -@dataclass -class ManifestMetadata(BaseArtifactMetadata): - """Metadata for the manifest.""" - - dbt_schema_version: str = field( - default_factory=lambda: str(WritableManifest.dbt_schema_version) - ) - project_name: Optional[str] = field( - default=None, - metadata={ - "description": "Name of the root project", - }, - ) - project_id: Optional[str] = field( - default=None, - metadata={ - "description": "A unique identifier for the project, hashed from the project name", - }, - ) - user_id: Optional[UUID] = field( - default=None, - metadata={ - "description": "A unique identifier for the user", - }, - ) - send_anonymous_usage_stats: Optional[bool] = field( - default=None, - metadata=dict( - description=("Whether dbt is configured to send anonymous usage statistics") - ), - ) - adapter_type: Optional[str] = field( - default=None, - metadata=dict(description="The type name of the adapter"), - ) - - def __post_init__(self): - if tracking.active_user is None: - return - - if self.user_id is None: - self.user_id = tracking.active_user.id - - if self.send_anonymous_usage_stats is None: - self.send_anonymous_usage_stats = get_flags().SEND_ANONYMOUS_USAGE_STATS - - @classmethod - def default(cls): - return cls( - dbt_schema_version=str(WritableManifest.dbt_schema_version), - ) - - def _sort_values(dct): """Given a dictionary, sort each value. This makes output deterministic, which helps for tests. @@ -562,11 +591,29 @@ def __lt__(self, other: object) -> bool: class CandidateList(List[M]): - def last(self) -> Optional[Macro]: + def last_candidate( + self, valid_localities: Optional[List[Locality]] = None + ) -> Optional[MacroCandidate]: + """ + Obtain the last (highest precedence) MacroCandidate from the CandidateList of any locality in valid_localities. + If valid_localities is not specified, return the last MacroCandidate of any locality. + """ if not self: return None self.sort() - return self[-1].macro + + if valid_localities is None: + return self[-1] + + for candidate in reversed(self): + if candidate.locality in valid_localities: + return candidate + + return None + + def last(self) -> Optional[Macro]: + last_candidate = self.last_candidate() + return last_candidate.macro if last_candidate is not None else None def _get_locality(macro: Macro, root_project_name: str, internal_packages: Set[str]) -> Locality: @@ -598,6 +645,9 @@ class Disabled(Generic[D]): MaybeMetricNode = Optional[Union[Metric, Disabled[Metric]]] +MaybeSavedQueryNode = Optional[Union[SavedQuery, Disabled[SavedQuery]]] + + MaybeDocumentation = Optional[Documentation] @@ -622,6 +672,8 @@ class MacroMethods: def __init__(self): self.macros = [] self.metadata = {} + self._macros_by_name = {} + self._macros_by_package = {} def find_macro_by_name( self, name: str, root_project_name: str, package: Optional[str] @@ -686,14 +738,14 @@ def _find_macros_by_name( filter: Optional[Callable[[MacroCandidate], bool]] = None, ) -> CandidateList: """Find macros by their name.""" - # avoid an import cycle - from dbt.adapters.factory import get_adapter_package_names - candidates: CandidateList = CandidateList() + + macros_by_name = self.get_macros_by_name() + if name not in macros_by_name: + return candidates + packages = set(get_adapter_package_names(self.metadata.adapter_type)) - for unique_id, macro in self.macros.items(): - if macro.name != name: - continue + for macro in macros_by_name[name]: candidate = MacroCandidate( locality=_get_locality(macro, root_project_name, packages), macro=macro, @@ -703,6 +755,49 @@ def _find_macros_by_name( return candidates + def get_macros_by_name(self) -> Dict[str, List[Macro]]: + if self._macros_by_name is None: + # The by-name mapping doesn't exist yet (perhaps because the manifest + # was deserialized), so we build it. + self._macros_by_name = self._build_macros_by_name(self.macros) + + return self._macros_by_name + + @staticmethod + def _build_macros_by_name(macros: Mapping[str, Macro]) -> Dict[str, List[Macro]]: + # Convert a macro dictionary keyed on unique id to a flattened version + # keyed on macro name for faster lookup by name. Since macro names are + # not necessarily unique, the dict value is a list. + macros_by_name: Dict[str, List[Macro]] = {} + for macro in macros.values(): + if macro.name not in macros_by_name: + macros_by_name[macro.name] = [] + + macros_by_name[macro.name].append(macro) + + return macros_by_name + + def get_macros_by_package(self) -> Dict[str, Dict[str, Macro]]: + if self._macros_by_package is None: + # The by-package mapping doesn't exist yet (perhaps because the manifest + # was deserialized), so we build it. + self._macros_by_package = self._build_macros_by_package(self.macros) + + return self._macros_by_package + + @staticmethod + def _build_macros_by_package(macros: Mapping[str, Macro]) -> Dict[str, Dict[str, Macro]]: + # Convert a macro dictionary keyed on unique id to a flattened version + # keyed on package name for faster lookup by name. + macros_by_package: Dict[str, Dict[str, Macro]] = {} + for macro in macros.values(): + if macro.package_name not in macros_by_package: + macros_by_package[macro.package_name] = {} + macros_by_name = macros_by_package[macro.package_name] + macros_by_name[macro.name] = macro + + return macros_by_package + @dataclass class ParsingInfo: @@ -719,8 +814,12 @@ class ManifestStateCheck(dbtClassMixin): project_hashes: MutableMapping[str, FileHash] = field(default_factory=dict) +NodeClassT = TypeVar("NodeClassT", bound="BaseNode") +ResourceClassT = TypeVar("ResourceClassT", bound="BaseResource") + + @dataclass -class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin): +class Manifest(MacroMethods, dbtClassMixin): """The manifest for the full graph, after parsing and during compilation.""" # These attributes are both positional and by keyword. If an attribute @@ -742,6 +841,9 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin): disabled: MutableMapping[str, List[GraphMemberNode]] = field(default_factory=dict) env_vars: MutableMapping[str, str] = field(default_factory=dict) semantic_models: MutableMapping[str, SemanticModel] = field(default_factory=dict) + unit_tests: MutableMapping[str, UnitTestDefinition] = field(default_factory=dict) + saved_queries: MutableMapping[str, SavedQuery] = field(default_factory=dict) + fixtures: MutableMapping[str, UnitTestFileFixture] = field(default_factory=dict) _doc_lookup: Optional[DocLookup] = field( default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None} @@ -755,6 +857,9 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin): _metric_lookup: Optional[MetricLookup] = field( default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None} ) + _saved_query_lookup: Optional[SavedQueryLookup] = field( + default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None} + ) _semantic_model_by_measure_lookup: Optional[SemanticModelByMeasureLookup] = field( default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None} ) @@ -769,11 +874,19 @@ class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin): metadata={"serialize": lambda x: None, "deserialize": lambda x: None}, ) _lock: Lock = field( - default_factory=MP_CONTEXT.Lock, + default_factory=get_mp_context().Lock, + metadata={"serialize": lambda x: None, "deserialize": lambda x: None}, + ) + _macros_by_name: Optional[Dict[str, List[Macro]]] = field( + default=None, + metadata={"serialize": lambda x: None, "deserialize": lambda x: None}, + ) + _macros_by_package: Optional[Dict[str, Dict[str, Macro]]] = field( + default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None}, ) - def __pre_serialize__(self): + def __pre_serialize__(self, context: Optional[Dict] = None): # serialization won't work with anything except an empty source_patches because # tuple keys are not supported, so ensure it's empty self.source_patches = {} @@ -781,7 +894,7 @@ def __pre_serialize__(self): @classmethod def __post_deserialize__(cls, obj): - obj._lock = MP_CONTEXT.Lock() + obj._lock = get_mp_context().Lock() return obj def build_flat_graph(self): @@ -799,6 +912,9 @@ def build_flat_graph(self): "semantic_models": { k: v.to_dict(omit_none=False) for k, v in self.semantic_models.items() }, + "saved_queries": { + k: v.to_dict(omit_none=False) for k, v in self.saved_queries.items() + }, } def build_disabled_by_file_id(self): @@ -826,7 +942,7 @@ def _materialization_candidates_for( adapter_type: str, specificity: int, ) -> CandidateList: - full_name = dbt.utils.get_materialization_macro_name( + full_name = dbt_common.utils.get_materialization_macro_name( materialization_name=materialization_name, adapter_type=adapter_type, with_prefix=False, @@ -850,7 +966,33 @@ def find_materialization_macro_by_name( for specificity, atype in enumerate(self._get_parent_adapter_types(adapter_type)) ) ) - return candidates.last() + core_candidates = [ + candidate for candidate in candidates if candidate.locality == Locality.Core + ] + + materialization_candidate = candidates.last_candidate() + # If an imported materialization macro was found that also had a core candidate, fire a deprecation + if ( + materialization_candidate is not None + and materialization_candidate.locality == Locality.Imported + and core_candidates + ): + # preserve legacy behaviour - allow materialization override + if ( + get_flags().require_explicit_package_overrides_for_builtin_materializations + is False + ): + deprecations.warn( + "package-materialization-override", + package_name=materialization_candidate.macro.package_name, + materialization_name=materialization_name, + ) + else: + materialization_candidate = candidates.last_candidate( + valid_localities=[Locality.Core, Locality.Root] + ) + + return materialization_candidate.macro if materialization_candidate else None def get_resource_fqns(self) -> Mapping[str, PathSet]: resource_fqns: Dict[str, Set[Tuple[str, ...]]] = {} @@ -860,6 +1002,8 @@ def get_resource_fqns(self) -> Mapping[str, PathSet]: self.sources.values(), self.metrics.values(), self.semantic_models.values(), + self.saved_queries.values(), + self.unit_tests.values(), ) for resource in all_resources: resource_type_plural = resource.resource_type.pluralize() @@ -895,6 +1039,8 @@ def deepcopy(self): files={k: _deepcopy(v) for k, v in self.files.items()}, state_check=_deepcopy(self.state_check), semantic_models={k: _deepcopy(v) for k, v in self.semantic_models.items()}, + unit_tests={k: _deepcopy(v) for k, v in self.unit_tests.items()}, + saved_queries={k: _deepcopy(v) for k, v in self.saved_queries.items()}, ) copy.build_flat_graph() return copy @@ -907,6 +1053,8 @@ def build_parent_and_child_maps(self): self.exposures.values(), self.metrics.values(), self.semantic_models.values(), + self.saved_queries.values(), + self.unit_tests.values(), ) ) forward_edges, backward_edges = build_node_edges(edge_members) @@ -927,33 +1075,105 @@ def build_group_map(self): groupable_nodes = list( chain( self.nodes.values(), + self.saved_queries.values(), + self.semantic_models.values(), self.metrics.values(), ) ) group_map = {group.name: [] for group in self.groups.values()} for node in groupable_nodes: if node.group is not None: - group_map[node.group].append(node.unique_id) + # group updates are not included with state:modified and + # by ignoring the groups that aren't in the group map we + # can avoid hitting errors for groups that are not getting + # updated. This is a hack but any groups that are not + # valid will be caught in + # parser.manifest.ManifestLoader.check_valid_group_config_node + if node.group in group_map: + group_map[node.group].append(node.unique_id) self.group_map = group_map + def fill_tracking_metadata(self): + self.metadata.user_id = tracking.active_user.id if tracking.active_user else None + self.metadata.send_anonymous_usage_stats = get_flags().SEND_ANONYMOUS_USAGE_STATS + + @classmethod + def from_writable_manifest(cls, writable_manifest: WritableManifest) -> "Manifest": + manifest = Manifest( + nodes=cls._map_resources_to_map_nodes(writable_manifest.nodes), + disabled=cls._map_list_resources_to_map_list_nodes(writable_manifest.disabled), + unit_tests=cls._map_resources_to_map_nodes(writable_manifest.unit_tests), + sources=cls._map_resources_to_map_nodes(writable_manifest.sources), + macros=cls._map_resources_to_map_nodes(writable_manifest.macros), + docs=cls._map_resources_to_map_nodes(writable_manifest.docs), + exposures=cls._map_resources_to_map_nodes(writable_manifest.exposures), + metrics=cls._map_resources_to_map_nodes(writable_manifest.metrics), + groups=cls._map_resources_to_map_nodes(writable_manifest.groups), + semantic_models=cls._map_resources_to_map_nodes(writable_manifest.semantic_models), + saved_queries=cls._map_resources_to_map_nodes(writable_manifest.saved_queries), + selectors={ + selector_id: selector + for selector_id, selector in writable_manifest.selectors.items() + }, + ) + + return manifest + + def _map_nodes_to_map_resources(cls, nodes_map: MutableMapping[str, NodeClassT]): + return {node_id: node.to_resource() for node_id, node in nodes_map.items()} + + def _map_list_nodes_to_map_list_resources( + cls, nodes_map: MutableMapping[str, List[NodeClassT]] + ): + return { + node_id: [node.to_resource() for node in node_list] + for node_id, node_list in nodes_map.items() + } + + @classmethod + def _map_resources_to_map_nodes(cls, resources_map: Mapping[str, ResourceClassT]): + return { + node_id: RESOURCE_CLASS_TO_NODE_CLASS[type(resource)].from_resource(resource) + for node_id, resource in resources_map.items() + } + + @classmethod + def _map_list_resources_to_map_list_nodes( + cls, resources_map: Optional[Mapping[str, List[ResourceClassT]]] + ): + if resources_map is None: + return {} + + return { + node_id: [ + RESOURCE_CLASS_TO_NODE_CLASS[type(resource)].from_resource(resource) + for resource in resource_list + ] + for node_id, resource_list in resources_map.items() + } + def writable_manifest(self) -> "WritableManifest": self.build_parent_and_child_maps() self.build_group_map() + self.fill_tracking_metadata() + return WritableManifest( - nodes=self.nodes, - sources=self.sources, - macros=self.macros, - docs=self.docs, - exposures=self.exposures, - metrics=self.metrics, - groups=self.groups, + nodes=self._map_nodes_to_map_resources(self.nodes), + sources=self._map_nodes_to_map_resources(self.sources), + macros=self._map_nodes_to_map_resources(self.macros), + docs=self._map_nodes_to_map_resources(self.docs), + exposures=self._map_nodes_to_map_resources(self.exposures), + metrics=self._map_nodes_to_map_resources(self.metrics), + groups=self._map_nodes_to_map_resources(self.groups), selectors=self.selectors, metadata=self.metadata, - disabled=self.disabled, + disabled=self._map_list_nodes_to_map_list_resources(self.disabled), child_map=self.child_map, parent_map=self.parent_map, group_map=self.group_map, - semantic_models=self.semantic_models, + semantic_models=self._map_nodes_to_map_resources(self.semantic_models), + unit_tests=self._map_nodes_to_map_resources(self.unit_tests), + saved_queries=self._map_nodes_to_map_resources(self.saved_queries), ) def write(self, path): @@ -972,9 +1192,13 @@ def expect(self, unique_id: str) -> GraphMemberNode: return self.metrics[unique_id] elif unique_id in self.semantic_models: return self.semantic_models[unique_id] + elif unique_id in self.unit_tests: + return self.unit_tests[unique_id] + elif unique_id in self.saved_queries: + return self.saved_queries[unique_id] else: # something terrible has happened - raise dbt.exceptions.DbtInternalError( + raise dbt_common.exceptions.DbtInternalError( "Expected node {} not found in manifest".format(unique_id) ) @@ -1008,6 +1232,13 @@ def metric_lookup(self) -> MetricLookup: self._metric_lookup = MetricLookup(self) return self._metric_lookup + @property + def saved_query_lookup(self) -> SavedQueryLookup: + """Retuns a SavedQueryLookup, instantiating it first if necessary.""" + if self._saved_query_lookup is None: + self._saved_query_lookup = SavedQueryLookup(self) + return self._saved_query_lookup + @property def semantic_model_by_measure_lookup(self) -> SemanticModelByMeasureLookup: """Gets (and creates if necessary) the lookup utility for getting SemanticModels by measures""" @@ -1056,8 +1287,7 @@ def resolve_refs( return resolved_refs - # Called by dbt.parser.manifest._process_refs_for_exposure, _process_refs_for_metric, - # and dbt.parser.manifest._process_refs_for_node + # Called by dbt.parser.manifest._process_refs & ManifestLoader.check_for_model_deprecations def resolve_ref( self, source_node: GraphMemberNode, @@ -1082,7 +1312,12 @@ def resolve_ref( # it's possible that the node is disabled if disabled is None: - disabled = self.disabled_lookup.find(target_model_name, pkg, target_model_version) + disabled = self.disabled_lookup.find( + target_model_name, + pkg, + version=target_model_version, + resource_types=REFABLE_NODE_TYPES, + ) if disabled: return Disabled(disabled[0]) @@ -1142,6 +1377,35 @@ def resolve_metric( return Disabled(disabled[0]) return None + def resolve_saved_query( + self, + target_saved_query_name: str, + target_saved_query_package: Optional[str], + current_project: str, + node_package: str, + ) -> MaybeSavedQueryNode: + """Tries to find the SavedQuery by name within the available project and packages. + + Will return the first enabled SavedQuery matching the name found while iterating over + the scoped packages. If no enabled SavedQuery node match is found, returns the last + disabled SavedQuery node. Otherwise it returns None. + """ + disabled: Optional[List[SavedQuery]] = None + candidates = _packages_to_search(current_project, node_package, target_saved_query_package) + for pkg in candidates: + saved_query = self.saved_query_lookup.find(target_saved_query_name, pkg, self) + + if saved_query is not None and saved_query.config.enabled: + return saved_query + + # it's possible that the node is disabled + if disabled is None: + disabled = self.disabled_lookup.find(f"{target_saved_query_name}", pkg) + if disabled: + return Disabled(disabled[0]) + + return None + def resolve_semantic_model_for_measure( self, target_measure_name: str, @@ -1156,6 +1420,7 @@ def resolve_semantic_model_for_measure( semantic_model = self.semantic_model_by_measure_lookup.find( target_measure_name, pkg, self ) + # need to return it even if it's disabled so know it's not fully missing if semantic_model is not None: return semantic_model @@ -1225,70 +1490,60 @@ def is_invalid_protected_ref( node.package_name != target_model.package_name and restrict_package_access ) - # Called by RunTask.defer_to_manifest - def merge_from_artifact( - self, - adapter, - other: "WritableManifest", - selected: AbstractSet[UniqueID], - favor_state: bool = False, - ) -> None: - """Given the selected unique IDs and a writable manifest, update this - manifest by replacing any unselected nodes with their counterpart. + # Called in GraphRunnableTask.before_run, RunTask.before_run, CloneTask.before_run + def merge_from_artifact(self, other: "Manifest") -> None: + """Update this manifest by adding the 'defer_relation' attribute to all nodes + with a counterpart in the stateful manifest used for deferral. Only non-ephemeral refable nodes are examined. """ - refables = set(NodeType.refable()) - merged = set() + refables = set(REFABLE_NODE_TYPES) for unique_id, node in other.nodes.items(): current = self.nodes.get(unique_id) - if current and ( - node.resource_type in refables - and not node.is_ephemeral - and unique_id not in selected - and ( - not adapter.get_relation(current.database, current.schema, current.identifier) - or favor_state + if current and node.resource_type in refables and not node.is_ephemeral: + assert isinstance(node.config, NodeConfig) # this makes mypy happy + defer_relation = DeferRelation( + database=node.database, + schema=node.schema, + alias=node.alias, + relation_name=node.relation_name, + resource_type=node.resource_type, + name=node.name, + description=node.description, + compiled_code=(node.compiled_code if not isinstance(node, SeedNode) else None), + meta=node.meta, + tags=node.tags, + config=node.config, ) - ): - merged.add(unique_id) - self.nodes[unique_id] = node.replace(deferred=True) + self.nodes[unique_id] = replace(current, defer_relation=defer_relation) - # Rebuild the flat_graph, which powers the 'graph' context variable, - # now that we've deferred some nodes + # Rebuild the flat_graph, which powers the 'graph' context variable self.build_flat_graph() - # log up to 5 items - sample = list(islice(merged, 5)) - fire_event(MergedFromState(num_merged=len(merged), sample=sample)) - - # Called by CloneTask.defer_to_manifest - def add_from_artifact( - self, - other: "WritableManifest", - ) -> None: - """Update this manifest by *adding* information about each node's location - in the other manifest. - - Only non-ephemeral refable nodes are examined. - """ - refables = set(NodeType.refable()) - for unique_id, node in other.nodes.items(): - current = self.nodes.get(unique_id) - if current and (node.resource_type in refables and not node.is_ephemeral): - defer_relation = DeferRelation( - node.database, node.schema, node.alias, node.relation_name - ) - self.nodes[unique_id] = current.replace(defer_relation=defer_relation) - # Methods that were formerly in ParseResult - def add_macro(self, source_file: SourceFile, macro: Macro): if macro.unique_id in self.macros: # detect that the macro exists and emit an error raise DuplicateMacroInPackageError(macro=macro, macro_mapping=self.macros) self.macros[macro.unique_id] = macro + + if self._macros_by_name is None: + self._macros_by_name = self._build_macros_by_name(self.macros) + + if macro.name not in self._macros_by_name: + self._macros_by_name[macro.name] = [] + + self._macros_by_name[macro.name].append(macro) + + if self._macros_by_package is None: + self._macros_by_package = self._build_macros_by_package(self.macros) + + if macro.package_name not in self._macros_by_package: + self._macros_by_package[macro.package_name] = {} + + self._macros_by_package[macro.package_name][macro.name] = macro + source_file.macros.append(macro.unique_id) def has_file(self, source_file: SourceFile) -> bool: @@ -1323,6 +1578,8 @@ def add_node(self, source_file: AnySourceFile, node: ManifestNode, test_from=Non source_file.exposures.append(node.unique_id) if isinstance(node, Group): source_file.groups.append(node.unique_id) + elif isinstance(source_file, FixtureSourceFile): + pass else: source_file.nodes.append(node.unique_id) @@ -1331,10 +1588,15 @@ def add_exposure(self, source_file: SchemaSourceFile, exposure: Exposure): self.exposures[exposure.unique_id] = exposure source_file.exposures.append(exposure.unique_id) - def add_metric(self, source_file: SchemaSourceFile, metric: Metric): + def add_metric( + self, source_file: SchemaSourceFile, metric: Metric, generated_from: Optional[str] = None + ): _check_duplicates(metric, self.metrics) self.metrics[metric.unique_id] = metric - source_file.metrics.append(metric.unique_id) + if not generated_from: + source_file.metrics.append(metric.unique_id) + else: + source_file.add_metrics_from_measures(generated_from, metric.unique_id) def add_group(self, source_file: SchemaSourceFile, group: Group): _check_duplicates(group, self.groups) @@ -1348,7 +1610,7 @@ def add_disabled_nofile(self, node: GraphMemberNode): else: self.disabled[node.unique_id] = [node] - def add_disabled(self, source_file: AnySourceFile, node: ResultNode, test_from=None): + def add_disabled(self, source_file: AnySourceFile, node: GraphMemberNode, test_from=None): self.add_disabled_nofile(node) if isinstance(source_file, SchemaSourceFile): if isinstance(node, GenericTestNode): @@ -1356,8 +1618,14 @@ def add_disabled(self, source_file: AnySourceFile, node: ResultNode, test_from=N source_file.add_test(node.unique_id, test_from) if isinstance(node, Metric): source_file.metrics.append(node.unique_id) + if isinstance(node, SavedQuery): + source_file.saved_queries.append(node.unique_id) + if isinstance(node, SemanticModel): + source_file.semantic_models.append(node.unique_id) if isinstance(node, Exposure): source_file.exposures.append(node.unique_id) + elif isinstance(source_file, FixtureSourceFile): + pass else: source_file.nodes.append(node.unique_id) @@ -1371,8 +1639,41 @@ def add_semantic_model(self, source_file: SchemaSourceFile, semantic_model: Sema self.semantic_models[semantic_model.unique_id] = semantic_model source_file.semantic_models.append(semantic_model.unique_id) + def add_unit_test(self, source_file: SchemaSourceFile, unit_test: UnitTestDefinition): + if unit_test.unique_id in self.unit_tests: + raise DuplicateResourceNameError(unit_test, self.unit_tests[unit_test.unique_id]) + self.unit_tests[unit_test.unique_id] = unit_test + source_file.unit_tests.append(unit_test.unique_id) + + def add_fixture(self, source_file: FixtureSourceFile, fixture: UnitTestFileFixture): + if fixture.unique_id in self.fixtures: + raise DuplicateResourceNameError(fixture, self.fixtures[fixture.unique_id]) + self.fixtures[fixture.unique_id] = fixture + source_file.fixture = fixture.unique_id + + def add_saved_query(self, source_file: SchemaSourceFile, saved_query: SavedQuery) -> None: + _check_duplicates(saved_query, self.saved_queries) + self.saved_queries[saved_query.unique_id] = saved_query + source_file.saved_queries.append(saved_query.unique_id) + # end of methods formerly in ParseResult + def find_node_from_ref_or_source( + self, expression: str + ) -> Optional[Union[ModelNode, SourceDefinition]]: + ref_or_source = statically_parse_ref_or_source(expression) + + node = None + if isinstance(ref_or_source, RefArgs): + node = self.ref_lookup.find( + ref_or_source.name, ref_or_source.package, ref_or_source.version, self + ) + else: + source_name, source_table_name = ref_or_source[0], ref_or_source[1] + node = self.source_lookup.find(f"{source_name}.{source_table_name}", None, self) + + return node + # Provide support for copy.deepcopy() - we just need to avoid the lock! # pickle and deepcopy use this. It returns a callable object used to # create the initial version of the object and a tuple of arguments @@ -1398,6 +1699,8 @@ def __reduce_ex__(self, protocol): self.disabled, self.env_vars, self.semantic_models, + self.unit_tests, + self.saved_queries, self._doc_lookup, self._source_lookup, self._ref_lookup, @@ -1410,109 +1713,24 @@ def __reduce_ex__(self, protocol): class MacroManifest(MacroMethods): - def __init__(self, macros): + def __init__(self, macros) -> None: self.macros = macros - self.metadata = ManifestMetadata() + self.metadata = ManifestMetadata( + user_id=tracking.active_user.id if tracking.active_user else None, + send_anonymous_usage_stats=( + get_flags().SEND_ANONYMOUS_USAGE_STATS if tracking.active_user else None + ), + ) # This is returned by the 'graph' context property # in the ProviderContext class. - self.flat_graph = {} + self.flat_graph: Dict[str, Any] = {} + self._macros_by_name: Optional[Dict[str, List[Macro]]] = None + self._macros_by_package: Optional[Dict[str, Dict[str, Macro]]] = None AnyManifest = Union[Manifest, MacroManifest] -@dataclass -@schema_version("manifest", 10) -class WritableManifest(ArtifactMixin): - nodes: Mapping[UniqueID, ManifestNode] = field( - metadata=dict(description=("The nodes defined in the dbt project and its dependencies")) - ) - sources: Mapping[UniqueID, SourceDefinition] = field( - metadata=dict(description=("The sources defined in the dbt project and its dependencies")) - ) - macros: Mapping[UniqueID, Macro] = field( - metadata=dict(description=("The macros defined in the dbt project and its dependencies")) - ) - docs: Mapping[UniqueID, Documentation] = field( - metadata=dict(description=("The docs defined in the dbt project and its dependencies")) - ) - exposures: Mapping[UniqueID, Exposure] = field( - metadata=dict( - description=("The exposures defined in the dbt project and its dependencies") - ) - ) - metrics: Mapping[UniqueID, Metric] = field( - metadata=dict(description=("The metrics defined in the dbt project and its dependencies")) - ) - groups: Mapping[UniqueID, Group] = field( - metadata=dict(description=("The groups defined in the dbt project")) - ) - selectors: Mapping[UniqueID, Any] = field( - metadata=dict(description=("The selectors defined in selectors.yml")) - ) - disabled: Optional[Mapping[UniqueID, List[GraphMemberNode]]] = field( - metadata=dict(description="A mapping of the disabled nodes in the target") - ) - parent_map: Optional[NodeEdgeMap] = field( - metadata=dict( - description="A mapping from child nodes to their dependencies", - ) - ) - child_map: Optional[NodeEdgeMap] = field( - metadata=dict( - description="A mapping from parent nodes to their dependents", - ) - ) - group_map: Optional[NodeEdgeMap] = field( - metadata=dict( - description="A mapping from group names to their nodes", - ) - ) - semantic_models: Mapping[UniqueID, SemanticModel] = field( - metadata=dict(description=("The semantic models defined in the dbt project")) - ) - metadata: ManifestMetadata = field( - metadata=dict( - description="Metadata about the manifest", - ) - ) - - @classmethod - def compatible_previous_versions(self): - return [ - ("manifest", 4), - ("manifest", 5), - ("manifest", 6), - ("manifest", 7), - ("manifest", 8), - ("manifest", 9), - ] - - @classmethod - def upgrade_schema_version(cls, data): - """This overrides the "upgrade_schema_version" call in VersionedSchema (via - ArtifactMixin) to modify the dictionary passed in from earlier versions of the manifest.""" - manifest_schema_version = get_manifest_schema_version(data) - if manifest_schema_version <= 9: - data = upgrade_manifest_json(data, manifest_schema_version) - return cls.from_dict(data) - - def __post_serialize__(self, dct): - for unique_id, node in dct["nodes"].items(): - if "config_call_dict" in node: - del node["config_call_dict"] - if "defer_relation" in node: - del node["defer_relation"] - return dct - - -def get_manifest_schema_version(dct: dict) -> int: - schema_version = dct.get("metadata", {}).get("dbt_schema_version", None) - if not schema_version: - raise ValueError("Manifest doesn't have schema version") - return int(schema_version.split(".")[-2][-1]) - - def _check_duplicates(value: BaseNode, src: Mapping[str, BaseNode]): if value.unique_id in src: raise DuplicateResourceNameError(value, src[value.unique_id]) diff --git a/core/dbt/contracts/graph/metrics.py b/core/dbt/contracts/graph/metrics.py index b895aa5e2f5..49934eb7cf6 100644 --- a/core/dbt/contracts/graph/metrics.py +++ b/core/dbt/contracts/graph/metrics.py @@ -1,8 +1,14 @@ -from dbt.node_types import NodeType +from typing import Any, Dict, Iterator, List + +from dbt.contracts.graph.manifest import Manifest, Metric +from dbt_semantic_interfaces.type_enums import MetricType + +DERIVED_METRICS = [MetricType.DERIVED, MetricType.RATIO] +BASE_METRICS = [MetricType.SIMPLE, MetricType.CUMULATIVE, MetricType.CONVERSION] class MetricReference(object): - def __init__(self, metric_name, package_name=None): + def __init__(self, metric_name, package_name=None) -> None: self.metric_name = metric_name self.package_name = package_name @@ -17,76 +23,74 @@ class ResolvedMetricReference(MetricReference): for working with metrics (ie. __str__ and templating functions) """ - def __init__(self, node, manifest, Relation): + def __init__(self, node: Metric, manifest: Manifest) -> None: super().__init__(node.name, node.package_name) self.node = node self.manifest = manifest - self.Relation = Relation - def __getattr__(self, key): + def __getattr__(self, key) -> Any: return getattr(self.node, key) - def __str__(self): + def __str__(self) -> str: return f"{self.node.name}" @classmethod - def parent_metrics(cls, metric_node, manifest): + def parent_metrics(cls, metric_node: Metric, manifest: Manifest) -> Iterator[Metric]: + """For a given metric, yeilds all upstream metrics.""" yield metric_node for parent_unique_id in metric_node.depends_on.nodes: - node = manifest.metrics.get(parent_unique_id) - if node and node.resource_type == NodeType.Metric: + node = manifest.expect(parent_unique_id) + if isinstance(node, Metric): yield from cls.parent_metrics(node, manifest) @classmethod - def parent_metrics_names(cls, metric_node, manifest): - yield metric_node.name - - for parent_unique_id in metric_node.depends_on.nodes: - node = manifest.metrics.get(parent_unique_id) - if node and node.resource_type == NodeType.Metric: - yield from cls.parent_metrics_names(node, manifest) + def parent_metrics_names(cls, metric_node: Metric, manifest: Manifest) -> Iterator[str]: + """For a given metric, yeilds all upstream metric names""" + for metric in cls.parent_metrics(metric_node, manifest): + yield metric.name @classmethod - def reverse_dag_parsing(cls, metric_node, manifest, metric_depth_count): - if metric_node.calculation_method == "derived": + def reverse_dag_parsing( + cls, metric_node: Metric, manifest: Manifest, metric_depth_count: int + ) -> Iterator[Dict[str, int]]: + """For the given metric, yeilds dictionaries having {<metric_name>: <depth_from_initial_metric} of upstream derived metrics. + + This function is intended as a helper function for other metric helper functions. + """ + if metric_node.type in DERIVED_METRICS: yield {metric_node.name: metric_depth_count} - metric_depth_count = metric_depth_count + 1 - for parent_unique_id in metric_node.depends_on.nodes: - node = manifest.metrics.get(parent_unique_id) - if ( - node - and node.resource_type == NodeType.Metric - and node.calculation_method == "derived" - ): - yield from cls.reverse_dag_parsing(node, manifest, metric_depth_count) + for parent_unique_id in metric_node.depends_on.nodes: + node = manifest.expect(parent_unique_id) + if isinstance(node, Metric): + yield from cls.reverse_dag_parsing(node, manifest, metric_depth_count + 1) def full_metric_dependency(self): + """Returns a unique list of all upstream metric names.""" to_return = list(set(self.parent_metrics_names(self.node, self.manifest))) return to_return - def base_metric_dependency(self): + def base_metric_dependency(self) -> List[str]: + """Returns a unique list of names for all upstream non-derived metrics.""" in_scope_metrics = list(self.parent_metrics(self.node, self.manifest)) + base_metrics = { + metric.name for metric in in_scope_metrics if metric.type not in DERIVED_METRICS + } - to_return = [] - for metric in in_scope_metrics: - if metric.calculation_method != "derived" and metric.name not in to_return: - to_return.append(metric.name) - - return to_return + return list(base_metrics) - def derived_metric_dependency(self): + def derived_metric_dependency(self) -> List[str]: + """Returns a unique list of names for all upstream derived metrics.""" in_scope_metrics = list(self.parent_metrics(self.node, self.manifest)) + derived_metrics = { + metric.name for metric in in_scope_metrics if metric.type in DERIVED_METRICS + } - to_return = [] - for metric in in_scope_metrics: - if metric.calculation_method == "derived" and metric.name not in to_return: - to_return.append(metric.name) - - return to_return + return list(derived_metrics) - def derived_metric_dependency_depth(self): + def derived_metric_dependency_depth(self) -> List[Dict[str, int]]: + """Returns a list of {<metric_name>: <depth_from_initial_metric>} for all upstream metrics.""" metric_depth_count = 1 to_return = list(self.reverse_dag_parsing(self.node, self.manifest, metric_depth_count)) diff --git a/core/dbt/contracts/graph/model_config.py b/core/dbt/contracts/graph/model_config.py index b69b2500d63..b3d5952e268 100644 --- a/core/dbt/contracts/graph/model_config.py +++ b/core/dbt/contracts/graph/model_config.py @@ -1,117 +1,22 @@ -from dataclasses import field, Field, dataclass -from enum import Enum -from itertools import chain -from typing import Any, List, Optional, Dict, Union, Type, TypeVar, Callable - -from dbt.dataclass_schema import ( - dbtClassMixin, - ValidationError, - register_pattern, - StrEnum, +from dataclasses import dataclass, field +from typing import Any, Dict, List, Optional, Type + +from dbt.artifacts.resources import ( + ExposureConfig, + MetricConfig, + ModelConfig, + NodeConfig, + SavedQueryConfig, + SeedConfig, + SemanticModelConfig, + SnapshotConfig, + SourceConfig, + TestConfig, + UnitTestConfig, ) -from dbt.contracts.graph.unparsed import AdditionalPropertiesAllowed, Docs -from dbt.contracts.graph.utils import validate_color -from dbt.contracts.util import Replaceable, list_str -from dbt.exceptions import DbtInternalError, CompilationError -from dbt import hooks from dbt.node_types import NodeType - - -M = TypeVar("M", bound="Metadata") - - -def _get_meta_value(cls: Type[M], fld: Field, key: str, default: Any) -> M: - # a metadata field might exist. If it does, it might have a matching key. - # If it has both, make sure the value is valid and return it. If it - # doesn't, return the default. - if fld.metadata: - value = fld.metadata.get(key, default) - else: - value = default - - try: - return cls(value) - except ValueError as exc: - raise DbtInternalError(f"Invalid {cls} value: {value}") from exc - - -def _set_meta_value(obj: M, key: str, existing: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: - if existing is None: - result = {} - else: - result = existing.copy() - result.update({key: obj}) - return result - - -class Metadata(Enum): - @classmethod - def from_field(cls: Type[M], fld: Field) -> M: - default = cls.default_field() - key = cls.metadata_key() - - return _get_meta_value(cls, fld, key, default) - - def meta(self, existing: Optional[Dict[str, Any]] = None) -> Dict[str, Any]: - key = self.metadata_key() - return _set_meta_value(self, key, existing) - - @classmethod - def default_field(cls) -> "Metadata": - raise NotImplementedError("Not implemented") - - @classmethod - def metadata_key(cls) -> str: - raise NotImplementedError("Not implemented") - - -class MergeBehavior(Metadata): - Append = 1 - Update = 2 - Clobber = 3 - DictKeyAppend = 4 - - @classmethod - def default_field(cls) -> "MergeBehavior": - return cls.Clobber - - @classmethod - def metadata_key(cls) -> str: - return "merge" - - -class ShowBehavior(Metadata): - Show = 1 - Hide = 2 - - @classmethod - def default_field(cls) -> "ShowBehavior": - return cls.Show - - @classmethod - def metadata_key(cls) -> str: - return "show_hide" - - @classmethod - def should_show(cls, fld: Field) -> bool: - return cls.from_field(fld) == cls.Show - - -class CompareBehavior(Metadata): - Include = 1 - Exclude = 2 - - @classmethod - def default_field(cls) -> "CompareBehavior": - return cls.Include - - @classmethod - def metadata_key(cls) -> str: - return "compare" - - @classmethod - def should_include(cls, fld: Field) -> bool: - return cls.from_field(fld) == cls.Include +from dbt_common.contracts.config.base import BaseConfig +from dbt_common.contracts.config.metadata import Metadata def metas(*metas: Metadata) -> Dict[str, Any]: @@ -121,62 +26,6 @@ def metas(*metas: Metadata) -> Dict[str, Any]: return existing -def _listify(value: Any) -> List: - if isinstance(value, list): - return value[:] - else: - return [value] - - -# There are two versions of this code. The one here is for config -# objects, the one in _add_config_call in context_config.py is for -# config_call_dict dictionaries. -def _merge_field_value( - merge_behavior: MergeBehavior, - self_value: Any, - other_value: Any, -): - if merge_behavior == MergeBehavior.Clobber: - return other_value - elif merge_behavior == MergeBehavior.Append: - return _listify(self_value) + _listify(other_value) - elif merge_behavior == MergeBehavior.Update: - if not isinstance(self_value, dict): - raise DbtInternalError(f"expected dict, got {self_value}") - if not isinstance(other_value, dict): - raise DbtInternalError(f"expected dict, got {other_value}") - value = self_value.copy() - value.update(other_value) - return value - elif merge_behavior == MergeBehavior.DictKeyAppend: - if not isinstance(self_value, dict): - raise DbtInternalError(f"expected dict, got {self_value}") - if not isinstance(other_value, dict): - raise DbtInternalError(f"expected dict, got {other_value}") - new_dict = {} - for key in self_value.keys(): - new_dict[key] = _listify(self_value[key]) - for key in other_value.keys(): - extend = False - new_key = key - # This might start with a +, to indicate we should extend the list - # instead of just clobbering it - if new_key.startswith("+"): - new_key = key.lstrip("+") - extend = True - if new_key in new_dict and extend: - # extend the list - value = other_value[key] - new_dict[new_key].extend(_listify(value)) - else: - # clobber the list - new_dict[new_key] = _listify(other_value[key]) - return new_dict - - else: - raise DbtInternalError(f"Got an invalid merge_behavior: {merge_behavior}") - - def insensitive_patterns(*patterns: str): lowercased = [] for pattern in patterns: @@ -184,495 +33,29 @@ def insensitive_patterns(*patterns: str): return "^({})$".format("|".join(lowercased)) -class Severity(str): - pass - - -register_pattern(Severity, insensitive_patterns("warn", "error")) - - -class OnConfigurationChangeOption(StrEnum): - Apply = "apply" - Continue = "continue" - Fail = "fail" - - @classmethod - def default(cls) -> "OnConfigurationChangeOption": - return cls.Apply - - -@dataclass -class ContractConfig(dbtClassMixin, Replaceable): - enforced: bool = False - - -@dataclass -class Hook(dbtClassMixin, Replaceable): - sql: str - transaction: bool = True - index: Optional[int] = None - - -T = TypeVar("T", bound="BaseConfig") - - -@dataclass -class BaseConfig(AdditionalPropertiesAllowed, Replaceable): - - # enable syntax like: config['key'] - def __getitem__(self, key): - return self.get(key) - - # like doing 'get' on a dictionary - def get(self, key, default=None): - if hasattr(self, key): - return getattr(self, key) - elif key in self._extra: - return self._extra[key] - else: - return default - - # enable syntax like: config['key'] = value - def __setitem__(self, key, value): - if hasattr(self, key): - setattr(self, key, value) - else: - self._extra[key] = value - - def __delitem__(self, key): - if hasattr(self, key): - msg = ( - 'Error, tried to delete config key "{}": Cannot delete ' "built-in keys" - ).format(key) - raise CompilationError(msg) - else: - del self._extra[key] - - def _content_iterator(self, include_condition: Callable[[Field], bool]): - seen = set() - for fld, _ in self._get_fields(): - seen.add(fld.name) - if include_condition(fld): - yield fld.name - - for key in self._extra: - if key not in seen: - seen.add(key) - yield key - - def __iter__(self): - yield from self._content_iterator(include_condition=lambda f: True) - - def __len__(self): - return len(self._get_fields()) + len(self._extra) - - @staticmethod - def compare_key( - unrendered: Dict[str, Any], - other: Dict[str, Any], - key: str, - ) -> bool: - if key not in unrendered and key not in other: - return True - elif key not in unrendered and key in other: - return False - elif key in unrendered and key not in other: - return False - else: - return unrendered[key] == other[key] - - @classmethod - def same_contents(cls, unrendered: Dict[str, Any], other: Dict[str, Any]) -> bool: - """This is like __eq__, except it ignores some fields.""" - seen = set() - for fld, target_name in cls._get_fields(): - key = target_name - seen.add(key) - if CompareBehavior.should_include(fld): - if not cls.compare_key(unrendered, other, key): - return False - - for key in chain(unrendered, other): - if key not in seen: - seen.add(key) - if not cls.compare_key(unrendered, other, key): - return False - return True - - # This is used in 'add_config_call' to create the combined config_call_dict. - # 'meta' moved here from node - mergebehavior = { - "append": ["pre-hook", "pre_hook", "post-hook", "post_hook", "tags"], - "update": [ - "quoting", - "column_types", - "meta", - "docs", - "contract", - ], - "dict_key_append": ["grants"], - } - - @classmethod - def _merge_dicts(cls, src: Dict[str, Any], data: Dict[str, Any]) -> Dict[str, Any]: - """Find all the items in data that match a target_field on this class, - and merge them with the data found in `src` for target_field, using the - field's specified merge behavior. Matching items will be removed from - `data` (but _not_ `src`!). - - Returns a dict with the merge results. - - That means this method mutates its input! Any remaining values in data - were not merged. - """ - result = {} - - for fld, target_field in cls._get_fields(): - if target_field not in data: - continue - - data_attr = data.pop(target_field) - if target_field not in src: - result[target_field] = data_attr - continue - - merge_behavior = MergeBehavior.from_field(fld) - self_attr = src[target_field] - - result[target_field] = _merge_field_value( - merge_behavior=merge_behavior, - self_value=self_attr, - other_value=data_attr, - ) - return result - - def update_from(self: T, data: Dict[str, Any], adapter_type: str, validate: bool = True) -> T: - """Given a dict of keys, update the current config from them, validate - it, and return a new config with the updated values - """ - # sadly, this is a circular import - from dbt.adapters.factory import get_config_class_by_name - - dct = self.to_dict(omit_none=False) - - adapter_config_cls = get_config_class_by_name(adapter_type) - - self_merged = self._merge_dicts(dct, data) - dct.update(self_merged) - - adapter_merged = adapter_config_cls._merge_dicts(dct, data) - dct.update(adapter_merged) - - # any remaining fields must be "clobber" - dct.update(data) - - # any validation failures must have come from the update - if validate: - self.validate(dct) - return self.from_dict(dct) - - def finalize_and_validate(self: T) -> T: - dct = self.to_dict(omit_none=False) - self.validate(dct) - return self.from_dict(dct) - - def replace(self, **kwargs): - dct = self.to_dict(omit_none=True) - - mapping = self.field_mapping() - for key, value in kwargs.items(): - new_key = mapping.get(key, key) - dct[new_key] = value - return self.from_dict(dct) - - -@dataclass -class SemanticModelConfig(BaseConfig): - enabled: bool = True - - -@dataclass -class MetricConfig(BaseConfig): - enabled: bool = True - group: Optional[str] = None - - -@dataclass -class ExposureConfig(BaseConfig): - enabled: bool = True - - @dataclass -class SourceConfig(BaseConfig): - enabled: bool = True - - -@dataclass -class NodeAndTestConfig(BaseConfig): - enabled: bool = True - # these fields are included in serialized output, but are not part of - # config comparison (they are part of database_representation) - alias: Optional[str] = field( - default=None, - metadata=CompareBehavior.Exclude.meta(), - ) - schema: Optional[str] = field( - default=None, - metadata=CompareBehavior.Exclude.meta(), - ) - database: Optional[str] = field( - default=None, - metadata=CompareBehavior.Exclude.meta(), - ) - tags: Union[List[str], str] = field( - default_factory=list_str, - metadata=metas(ShowBehavior.Hide, MergeBehavior.Append, CompareBehavior.Exclude), - ) - meta: Dict[str, Any] = field( - default_factory=dict, - metadata=MergeBehavior.Update.meta(), - ) - group: Optional[str] = field( - default=None, - metadata=CompareBehavior.Exclude.meta(), - ) - - -@dataclass -class NodeConfig(NodeAndTestConfig): - # Note: if any new fields are added with MergeBehavior, also update the - # 'mergebehavior' dictionary - materialized: str = "view" - incremental_strategy: Optional[str] = None - persist_docs: Dict[str, Any] = field(default_factory=dict) - post_hook: List[Hook] = field( - default_factory=list, - metadata=MergeBehavior.Append.meta(), - ) - pre_hook: List[Hook] = field( - default_factory=list, - metadata=MergeBehavior.Append.meta(), - ) - quoting: Dict[str, Any] = field( - default_factory=dict, - metadata=MergeBehavior.Update.meta(), - ) - # This is actually only used by seeds. Should it be available to others? - # That would be a breaking change! - column_types: Dict[str, Any] = field( - default_factory=dict, - metadata=MergeBehavior.Update.meta(), - ) - full_refresh: Optional[bool] = None - # 'unique_key' doesn't use 'Optional' because typing.get_type_hints was - # sometimes getting the Union order wrong, causing serialization failures. - unique_key: Union[str, List[str], None] = None - on_schema_change: Optional[str] = "ignore" - on_configuration_change: OnConfigurationChangeOption = field( - default_factory=OnConfigurationChangeOption.default - ) - grants: Dict[str, Any] = field( - default_factory=dict, metadata=MergeBehavior.DictKeyAppend.meta() - ) - packages: List[str] = field( - default_factory=list, - metadata=MergeBehavior.Append.meta(), - ) - docs: Docs = field( - default_factory=Docs, - metadata=MergeBehavior.Update.meta(), - ) - contract: ContractConfig = field( - default_factory=ContractConfig, - metadata=MergeBehavior.Update.meta(), - ) - - def __post_init__(self): - # we validate that node_color has a suitable value to prevent dbt-docs from crashing - if self.docs.node_color: - node_color = self.docs.node_color - if not validate_color(node_color): - raise ValidationError( - f"Invalid color name for docs.node_color: {node_color}. " - "It is neither a valid HTML color name nor a valid HEX code." - ) - - if ( - self.contract.enforced - and self.materialized == "incremental" - and self.on_schema_change not in ("append_new_columns", "fail") - ): - raise ValidationError( - f"Invalid value for on_schema_change: {self.on_schema_change}. Models " - "materialized as incremental with contracts enabled must set " - "on_schema_change to 'append_new_columns' or 'fail'" - ) - - @classmethod - def __pre_deserialize__(cls, data): - data = super().__pre_deserialize__(data) - field_map = {"post-hook": "post_hook", "pre-hook": "pre_hook"} - # create a new dict because otherwise it gets overwritten in - # tests - new_dict = {} - for key in data: - new_dict[key] = data[key] - data = new_dict - for key in hooks.ModelHookType: - if key in data: - data[key] = [hooks.get_hook_dict(h) for h in data[key]] - for field_name in field_map: - if field_name in data: - new_name = field_map[field_name] - data[new_name] = data.pop(field_name) - return data - - def __post_serialize__(self, dct): - dct = super().__post_serialize__(dct) - field_map = {"post_hook": "post-hook", "pre_hook": "pre-hook"} - for field_name in field_map: - if field_name in dct: - dct[field_map[field_name]] = dct.pop(field_name) - return dct - - # this is still used by jsonschema validation - @classmethod - def field_mapping(cls): - return {"post_hook": "post-hook", "pre_hook": "pre-hook"} - - -@dataclass -class SeedConfig(NodeConfig): - materialized: str = "seed" - quote_columns: Optional[bool] = None - - @classmethod - def validate(cls, data): - super().validate(data) - if data.get("materialized") and data.get("materialized") != "seed": - raise ValidationError("A seed must have a materialized value of 'seed'") - - -@dataclass -class TestConfig(NodeAndTestConfig): - __test__ = False - - # this is repeated because of a different default - schema: Optional[str] = field( - default="dbt_test__audit", - metadata=CompareBehavior.Exclude.meta(), - ) - materialized: str = "test" - severity: Severity = Severity("ERROR") - store_failures: Optional[bool] = None - where: Optional[str] = None - limit: Optional[int] = None - fail_calc: str = "count(*)" - warn_if: str = "!= 0" - error_if: str = "!= 0" - - @classmethod - def same_contents(cls, unrendered: Dict[str, Any], other: Dict[str, Any]) -> bool: - """This is like __eq__, except it explicitly checks certain fields.""" - modifiers = [ - "severity", - "where", - "limit", - "fail_calc", - "warn_if", - "error_if", - "store_failures", - ] - - seen = set() - for _, target_name in cls._get_fields(): - key = target_name - seen.add(key) - if key in modifiers: - if not cls.compare_key(unrendered, other, key): - return False - return True - - @classmethod - def validate(cls, data): - super().validate(data) - if data.get("materialized") and data.get("materialized") != "test": - raise ValidationError("A test must have a materialized value of 'test'") - - -@dataclass -class EmptySnapshotConfig(NodeConfig): - materialized: str = "snapshot" - unique_key: Optional[str] = None # override NodeConfig unique_key definition - - -@dataclass -class SnapshotConfig(EmptySnapshotConfig): - strategy: Optional[str] = None - unique_key: Optional[str] = None - target_schema: Optional[str] = None - target_database: Optional[str] = None - updated_at: Optional[str] = None - # Not using Optional because of serialization issues with a Union of str and List[str] - check_cols: Union[str, List[str], None] = None - - @classmethod - def validate(cls, data): - super().validate(data) - # Note: currently you can't just set these keys in schema.yml because this validation - # will fail when parsing the snapshot node. - if not data.get("strategy") or not data.get("unique_key") or not data.get("target_schema"): - raise ValidationError( - "Snapshots must be configured with a 'strategy', 'unique_key', " - "and 'target_schema'." - ) - if data.get("strategy") == "check": - if not data.get("check_cols"): - raise ValidationError( - "A snapshot configured with the check strategy must " - "specify a check_cols configuration." - ) - if isinstance(data["check_cols"], str) and data["check_cols"] != "all": - raise ValidationError( - f"Invalid value for 'check_cols': {data['check_cols']}. " - "Expected 'all' or a list of strings." - ) - elif data.get("strategy") == "timestamp": - if not data.get("updated_at"): - raise ValidationError( - "A snapshot configured with the timestamp strategy " - "must specify an updated_at configuration." - ) - if data.get("check_cols"): - raise ValidationError("A 'timestamp' snapshot should not have 'check_cols'") - # If the strategy is not 'check' or 'timestamp' it's a custom strategy, - # formerly supported with GenericSnapshotConfig - - if data.get("materialized") and data.get("materialized") != "snapshot": - raise ValidationError("A snapshot must have a materialized value of 'snapshot'") - - # Called by "calculate_node_config_dict" in ContextConfigGenerator - def finalize_and_validate(self): - data = self.to_dict(omit_none=True) - self.validate(data) - return self.from_dict(data) +class UnitTestNodeConfig(NodeConfig): + expected_rows: List[Dict[str, Any]] = field(default_factory=list) + expected_sql: Optional[str] = None RESOURCE_TYPES: Dict[NodeType, Type[BaseConfig]] = { NodeType.Metric: MetricConfig, + NodeType.SemanticModel: SemanticModelConfig, + NodeType.SavedQuery: SavedQueryConfig, NodeType.Exposure: ExposureConfig, NodeType.Source: SourceConfig, NodeType.Seed: SeedConfig, NodeType.Test: TestConfig, - NodeType.Model: NodeConfig, + NodeType.Model: ModelConfig, NodeType.Snapshot: SnapshotConfig, + NodeType.Unit: UnitTestConfig, } # base resource types are like resource types, except nothing has mandatory # configs. BASE_RESOURCE_TYPES: Dict[NodeType, Type[BaseConfig]] = RESOURCE_TYPES.copy() -BASE_RESOURCE_TYPES.update({NodeType.Snapshot: EmptySnapshotConfig}) def get_config_for(resource_type: NodeType, base=False) -> Type[BaseConfig]: diff --git a/core/dbt/contracts/graph/node_args.py b/core/dbt/contracts/graph/node_args.py index 09b87a21c50..60b2a2aa1f8 100644 --- a/core/dbt/contracts/graph/node_args.py +++ b/core/dbt/contracts/graph/node_args.py @@ -1,9 +1,9 @@ from dataclasses import dataclass, field from datetime import datetime -from typing import Optional, List +from typing import List, Optional -from dbt.contracts.graph.unparsed import NodeVersion -from dbt.node_types import NodeType, AccessType +from dbt.artifacts.resources import NodeVersion +from dbt.node_types import AccessType, NodeType @dataclass @@ -29,3 +29,12 @@ def unique_id(self) -> str: unique_id = f"{unique_id}.v{self.version}" return unique_id + + @property + def fqn(self) -> List[str]: + fqn = [self.package_name, self.name] + # Test for None explicitly because version can be 0 + if self.version is not None: + fqn.append(f"v{self.version}") + + return fqn diff --git a/core/dbt/contracts/graph/nodes.py b/core/dbt/contracts/graph/nodes.py index 59ac576db79..b28910c0de3 100644 --- a/core/dbt/contracts/graph/nodes.py +++ b/core/dbt/contracts/graph/nodes.py @@ -1,77 +1,97 @@ +import hashlib import os -from datetime import datetime -import time from dataclasses import dataclass, field -from enum import Enum -import hashlib +from datetime import datetime +from typing import ( + Any, + Dict, + Iterator, + List, + Literal, + Optional, + Sequence, + Tuple, + Type, + Union, + get_args, +) from mashumaro.types import SerializableType -from typing import Optional, Union, List, Dict, Any, Sequence, Tuple, Iterator - -from dbt.dataclass_schema import dbtClassMixin, ExtensibleDbtClassMixin - -from dbt.clients.system import write_file -from dbt.contracts.files import FileHash -from dbt.contracts.graph.semantic_models import ( - Defaults, - Dimension, - Entity, - Measure, - SourceFileMetadata, + +from dbt.adapters.base import ConstraintSupport +from dbt.adapters.factory import get_adapter_constraint_support +from dbt.artifacts.resources import Analysis as AnalysisResource +from dbt.artifacts.resources import ( + BaseResource, + ColumnInfo, + CompiledResource, + DependsOn, + Docs, ) +from dbt.artifacts.resources import Documentation as DocumentationResource +from dbt.artifacts.resources import Exposure as ExposureResource +from dbt.artifacts.resources import FileHash +from dbt.artifacts.resources import GenericTest as GenericTestResource +from dbt.artifacts.resources import GraphResource +from dbt.artifacts.resources import Group as GroupResource +from dbt.artifacts.resources import HasRelationMetadata as HasRelationMetadataResource +from dbt.artifacts.resources import HookNode as HookNodeResource +from dbt.artifacts.resources import InjectedCTE +from dbt.artifacts.resources import Macro as MacroResource +from dbt.artifacts.resources import MacroArgument +from dbt.artifacts.resources import Metric as MetricResource +from dbt.artifacts.resources import MetricInputMeasure +from dbt.artifacts.resources import Model as ModelResource +from dbt.artifacts.resources import ( + ModelConfig, + NodeConfig, + NodeVersion, + ParsedResource, + ParsedResourceMandatory, +) +from dbt.artifacts.resources import Quoting as QuotingResource +from dbt.artifacts.resources import SavedQuery as SavedQueryResource +from dbt.artifacts.resources import Seed as SeedResource +from dbt.artifacts.resources import SemanticModel as SemanticModelResource +from dbt.artifacts.resources import SingularTest as SingularTestResource +from dbt.artifacts.resources import Snapshot as SnapshotResource +from dbt.artifacts.resources import SourceDefinition as SourceDefinitionResource +from dbt.artifacts.resources import SqlOperation as SqlOperationResource +from dbt.artifacts.resources import TimeSpine +from dbt.artifacts.resources import UnitTestDefinition as UnitTestDefinitionResource +from dbt.contracts.graph.model_config import UnitTestNodeConfig +from dbt.contracts.graph.node_args import ModelNodeArgs from dbt.contracts.graph.unparsed import ( - Docs, - ExposureType, - ExternalTable, - FreshnessThreshold, HasYamlMetadata, - MacroArgument, - MaturityType, - Owner, - Quoting, TestDef, - NodeVersion, + UnitTestOverrides, + UnparsedColumn, UnparsedSourceDefinition, UnparsedSourceTableDefinition, - UnparsedColumn, ) -from dbt.contracts.graph.node_args import ModelNodeArgs -from dbt.contracts.util import Replaceable, AdditionalPropertiesMixin -from dbt.events.functions import warn_or_error -from dbt.exceptions import ParsingError, ContractBreakingChangeError from dbt.events.types import ( - SeedIncreased, - SeedExceedsLimitSamePath, SeedExceedsLimitAndPathChanged, SeedExceedsLimitChecksumChanged, + SeedExceedsLimitSamePath, + SeedIncreased, + UnversionedBreakingChange, ) -from dbt.events.contextvars import set_log_contextvars +from dbt.exceptions import ContractBreakingChangeError, ParsingError, ValidationError from dbt.flags import get_flags -from dbt.node_types import ModelLanguage, NodeType, AccessType -from dbt_semantic_interfaces.call_parameter_sets import FilterCallParameterSets -from dbt_semantic_interfaces.references import ( - EntityReference, - MeasureReference, - LinkableElementReference, - SemanticModelReference, - TimeDimensionReference, +from dbt.node_types import ( + REFABLE_NODE_TYPES, + VERSIONED_NODE_TYPES, + AccessType, + NodeType, ) -from dbt_semantic_interfaces.references import MetricReference as DSIMetricReference -from dbt_semantic_interfaces.type_enums import MetricType, TimeGranularity -from dbt_semantic_interfaces.parsing.where_filter_parser import WhereFilterParser - -from .model_config import ( - NodeConfig, - SeedConfig, - TestConfig, - SourceConfig, - MetricConfig, - ExposureConfig, - EmptySnapshotConfig, - SnapshotConfig, - SemanticModelConfig, +from dbt_common.clients.system import write_file +from dbt_common.contracts.constraints import ( + ColumnLevelConstraint, + ConstraintType, + ModelLevelConstraint, ) - +from dbt_common.events.contextvars import set_log_contextvars +from dbt_common.events.functions import warn_or_error # ===================================================================== # This contains the classes for all of the nodes and node-like objects @@ -97,15 +117,17 @@ @dataclass -class BaseNode(dbtClassMixin, Replaceable): +class BaseNode(BaseResource): """All nodes or node-like objects in this file should have this as a base class""" - name: str - resource_type: NodeType - package_name: str - path: str - original_file_path: str - unique_id: str + # In an ideal world this would be a class property. However, chaining @classmethod and + # @property was deprecated in python 3.11 and removed in 3.13. There are more + # complicated ways of making a class property, however a class method suits our + # purposes well enough + @classmethod + def resource_class(cls) -> Type[BaseResource]: + """Should be overriden by any class inheriting BaseNode""" + raise NotImplementedError @property def search_name(self): @@ -117,7 +139,7 @@ def file_id(self): @property def is_refable(self): - return self.resource_type in NodeType.refable() + return self.resource_type in REFABLE_NODE_TYPES @property def should_store_failures(self): @@ -126,11 +148,11 @@ def should_store_failures(self): # will this node map to an object in the database? @property def is_relational(self): - return self.resource_type in NodeType.refable() + return self.resource_type in REFABLE_NODE_TYPES @property def is_versioned(self): - return self.resource_type in NodeType.versioned() and self.version is not None + return self.resource_type in VERSIONED_NODE_TYPES and self.version is not None @property def is_ephemeral(self): @@ -143,105 +165,25 @@ def is_ephemeral_model(self): def get_materialization(self): return self.config.materialized - -@dataclass -class GraphNode(BaseNode): - """Nodes in the DAG. Macro and Documentation don't have fqn.""" - - fqn: List[str] - - def same_fqn(self, other) -> bool: - return self.fqn == other.fqn - - -@dataclass -class RefArgs(dbtClassMixin): - name: str - package: Optional[str] = None - version: Optional[NodeVersion] = None - - @property - def positional_args(self) -> List[str]: - if self.package: - return [self.package, self.name] - else: - return [self.name] - - @property - def keyword_args(self) -> Dict[str, Optional[NodeVersion]]: - if self.version: - return {"version": self.version} - else: - return {} - - -class ConstraintType(str, Enum): - check = "check" - not_null = "not_null" - unique = "unique" - primary_key = "primary_key" - foreign_key = "foreign_key" - custom = "custom" - @classmethod - def is_valid(cls, item): - try: - cls(item) - except ValueError: - return False - return True + def from_resource(cls, resource_instance: BaseResource): + assert isinstance(resource_instance, cls.resource_class()) + return cls.from_dict(resource_instance.to_dict()) - -@dataclass -class ColumnLevelConstraint(dbtClassMixin): - type: ConstraintType - name: Optional[str] = None - # expression is a user-provided field that will depend on the constraint type. - # It could be a predicate (check type), or a sequence sql keywords (e.g. unique type), - # so the vague naming of 'expression' is intended to capture this range. - expression: Optional[str] = None - warn_unenforced: bool = ( - True # Warn if constraint cannot be enforced by platform but will be in DDL - ) - warn_unsupported: bool = ( - True # Warn if constraint is not supported by the platform and won't be in DDL - ) - - -@dataclass -class ModelLevelConstraint(ColumnLevelConstraint): - columns: List[str] = field(default_factory=list) + def to_resource(self): + return self.resource_class().from_dict(self.to_dict()) @dataclass -class ColumnInfo(AdditionalPropertiesMixin, ExtensibleDbtClassMixin, Replaceable): - """Used in all ManifestNodes and SourceDefinition""" - - name: str - description: str = "" - meta: Dict[str, Any] = field(default_factory=dict) - data_type: Optional[str] = None - constraints: List[ColumnLevelConstraint] = field(default_factory=list) - quote: Optional[bool] = None - tags: List[str] = field(default_factory=list) - _extra: Dict[str, Any] = field(default_factory=dict) - +class GraphNode(GraphResource, BaseNode): + """Nodes in the DAG. Macro and Documentation don't have fqn.""" -@dataclass -class Contract(dbtClassMixin, Replaceable): - enforced: bool = False - checksum: Optional[str] = None + def same_fqn(self, other) -> bool: + return self.fqn == other.fqn -# Metrics, exposures, @dataclass -class HasRelationMetadata(dbtClassMixin, Replaceable): - database: Optional[str] - schema: str - - # Can't set database to None like it ought to be - # because it messes up the subclasses and default parameters - # so hack it here +class HasRelationMetadata(HasRelationMetadataResource): @classmethod def __pre_deserialize__(cls, data): data = super().__pre_deserialize__(data) @@ -249,51 +191,22 @@ def __pre_deserialize__(cls, data): data["database"] = None return data - -@dataclass -class MacroDependsOn(dbtClassMixin, Replaceable): - """Used only in the Macro class""" - - macros: List[str] = field(default_factory=list) - - # 'in' on lists is O(n) so this is O(n^2) for # of macros - def add_macro(self, value: str): - if value not in self.macros: - self.macros.append(value) - - -@dataclass -class DeferRelation(HasRelationMetadata): - alias: str - relation_name: Optional[str] - @property - def identifier(self): - return self.alias + def quoting_dict(self) -> Dict[str, bool]: + if hasattr(self, "quoting"): + return self.quoting.to_dict(omit_none=True) + else: + return {} @dataclass -class DependsOn(MacroDependsOn): - nodes: List[str] = field(default_factory=list) - - def add_node(self, value: str): - if value not in self.nodes: - self.nodes.append(value) - - -@dataclass -class ParsedNodeMandatory(GraphNode, HasRelationMetadata, Replaceable): - alias: str - checksum: FileHash - config: NodeConfig = field(default_factory=NodeConfig) - - @property - def identifier(self): - return self.alias +class ParsedNodeMandatory(ParsedResourceMandatory, GraphNode, HasRelationMetadata): + pass # This needs to be in all ManifestNodes and also in SourceDefinition, -# because of "source freshness" +# because of "source freshness". Should not be in artifacts, because we +# don't write out _event_status. @dataclass class NodeInfoMixin: _event_status: Dict[str, Any] = field(default_factory=dict) @@ -329,22 +242,7 @@ def clear_event_status(self): @dataclass -class ParsedNode(NodeInfoMixin, ParsedNodeMandatory, SerializableType): - tags: List[str] = field(default_factory=list) - description: str = field(default="") - columns: Dict[str, ColumnInfo] = field(default_factory=dict) - meta: Dict[str, Any] = field(default_factory=dict) - group: Optional[str] = None - docs: Docs = field(default_factory=Docs) - patch_path: Optional[str] = None - build_path: Optional[str] = None - deferred: bool = False - unrendered_config: Dict[str, Any] = field(default_factory=dict) - created_at: float = field(default_factory=lambda: time.time()) - config_call_dict: Dict[str, Any] = field(default_factory=dict) - relation_name: Optional[str] = None - raw_code: str = "" - +class ParsedNode(ParsedResource, NodeInfoMixin, ParsedNodeMandatory, SerializableType): def get_target_write_path(self, target_path: str, subdirectory: str): # This is called for both the "compiled" subdirectory of "target" and the "run" subdirectory if os.path.basename(self.path) == os.path.basename(self.original_file_path): @@ -366,8 +264,8 @@ def write_node(self, project_root: str, compiled_path, compiled_code: str): def _serialize(self): return self.to_dict() - def __post_serialize__(self, dct): - dct = super().__post_serialize__(dct) + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) if "_event_status" in dct: del dct["_event_status"] return dct @@ -384,8 +282,6 @@ def _deserialize(cls, dct: Dict[str, int]): return AnalysisNode.from_dict(dct) elif resource_type == "seed": return SeedNode.from_dict(dct) - elif resource_type == "rpc": - return RPCNode.from_dict(dct) elif resource_type == "sql": return SqlNode.from_dict(dct) elif resource_type == "test": @@ -483,31 +379,10 @@ def is_external_node(self): @dataclass -class InjectedCTE(dbtClassMixin, Replaceable): - """Used in CompiledNodes as part of ephemeral model processing""" - - id: str - sql: str - - -@dataclass -class CompiledNode(ParsedNode): +class CompiledNode(CompiledResource, ParsedNode): """Contains attributes necessary for SQL files and nodes with refs, sources, etc, so all ManifestNodes except SeedNode.""" - language: str = "sql" - refs: List[RefArgs] = field(default_factory=list) - sources: List[List[str]] = field(default_factory=list) - metrics: List[List[str]] = field(default_factory=list) - depends_on: DependsOn = field(default_factory=DependsOn) - compiled_path: Optional[str] = None - compiled: bool = False - compiled_code: Optional[str] = None - extra_ctes_injected: bool = False - extra_ctes: List[InjectedCTE] = field(default_factory=list) - _pre_injected_sql: Optional[str] = None - contract: Contract = field(default_factory=Contract) - @property def empty(self): return not self.raw_code.strip() @@ -525,20 +400,6 @@ def set_cte(self, cte_id: str, sql: str): else: self.extra_ctes.append(InjectedCTE(id=cte_id, sql=sql)) - def __post_serialize__(self, dct): - dct = super().__post_serialize__(dct) - if "_pre_injected_sql" in dct: - del dct["_pre_injected_sql"] - # Remove compiled attributes - if "compiled" in dct and dct["compiled"] is False: - del dct["compiled"] - del dct["extra_ctes_injected"] - del dct["extra_ctes"] - # "omit_none" means these might not be in the dictionary - if "compiled_code" in dct: - del dct["compiled_code"] - return dct - @property def depends_on_nodes(self): return self.depends_on.nodes @@ -554,25 +415,24 @@ def depends_on_macros(self): @dataclass -class AnalysisNode(CompiledNode): - resource_type: NodeType = field(metadata={"restrict": [NodeType.Analysis]}) +class AnalysisNode(AnalysisResource, CompiledNode): + @classmethod + def resource_class(cls) -> Type[AnalysisResource]: + return AnalysisResource @dataclass -class HookNode(CompiledNode): - resource_type: NodeType = field(metadata={"restrict": [NodeType.Operation]}) - index: Optional[int] = None +class HookNode(HookNodeResource, CompiledNode): + @classmethod + def resource_class(cls) -> Type[HookNodeResource]: + return HookNodeResource @dataclass -class ModelNode(CompiledNode): - resource_type: NodeType = field(metadata={"restrict": [NodeType.Model]}) - access: AccessType = AccessType.Protected - constraints: List[ModelLevelConstraint] = field(default_factory=list) - version: Optional[NodeVersion] = None - latest_version: Optional[NodeVersion] = None - deprecation_date: Optional[datetime] = None - defer_relation: Optional[DeferRelation] = None +class ModelNode(ModelResource, CompiledNode): + @classmethod + def resource_class(cls) -> Type[ModelResource]: + return ModelResource @classmethod def from_args(cls, args: ModelNodeArgs) -> "ModelNode": @@ -590,7 +450,7 @@ def from_args(cls, args: ModelNodeArgs) -> "ModelNode": name=args.name, package_name=args.package_name, unique_id=unique_id, - fqn=[args.package_name, args.name], + fqn=args.fqn, version=args.version, latest_version=args.latest_version, relation_name=args.relation_name, @@ -604,7 +464,7 @@ def from_args(cls, args: ModelNodeArgs) -> "ModelNode": path="", unrendered_config=unrendered_config, depends_on=DependsOn(nodes=args.depends_on_nodes), - config=NodeConfig(enabled=args.enabled), + config=ModelConfig(enabled=args.enabled), ) @property @@ -615,6 +475,13 @@ def is_external_node(self) -> bool: def is_latest_version(self) -> bool: return self.version is not None and self.version == self.latest_version + @property + def is_past_deprecation_date(self) -> bool: + return ( + self.deprecation_date is not None + and self.deprecation_date < datetime.now().astimezone() + ) + @property def search_name(self): if self.version is None: @@ -626,6 +493,84 @@ def search_name(self): def materialization_enforces_constraints(self) -> bool: return self.config.materialized in ["table", "incremental"] + @property + def all_constraints(self) -> List[Union[ModelLevelConstraint, ColumnLevelConstraint]]: + constraints: List[Union[ModelLevelConstraint, ColumnLevelConstraint]] = [] + for model_level_constraint in self.constraints: + constraints.append(model_level_constraint) + + for column in self.columns.values(): + for column_level_constraint in column.constraints: + constraints.append(column_level_constraint) + + return constraints + + def infer_primary_key(self, data_tests: List["GenericTestNode"]) -> List[str]: + """ + Infers the columns that can be used as primary key of a model in the following order: + 1. Columns with primary key constraints + 2. Columns with unique and not_null data tests + 3. Columns with enabled unique or dbt_utils.unique_combination_of_columns data tests + 4. Columns with disabled unique or dbt_utils.unique_combination_of_columns data tests + """ + for constraint in self.constraints: + if constraint.type == ConstraintType.primary_key: + return constraint.columns + + for column, column_info in self.columns.items(): + for column_constraint in column_info.constraints: + if column_constraint.type == ConstraintType.primary_key: + return [column] + + columns_with_enabled_unique_tests = set() + columns_with_disabled_unique_tests = set() + columns_with_not_null_tests = set() + for test in data_tests: + columns = [] + if "column_name" in test.test_metadata.kwargs: + columns = [test.test_metadata.kwargs["column_name"]] + elif "combination_of_columns" in test.test_metadata.kwargs: + columns = test.test_metadata.kwargs["combination_of_columns"] + + for column in columns: + if test.test_metadata.name in ["unique", "unique_combination_of_columns"]: + if test.config.enabled: + columns_with_enabled_unique_tests.add(column) + else: + columns_with_disabled_unique_tests.add(column) + elif test.test_metadata.name == "not_null": + columns_with_not_null_tests.add(column) + + columns_with_unique_and_not_null_tests = [] + for column in columns_with_not_null_tests: + if ( + column in columns_with_enabled_unique_tests + or column in columns_with_disabled_unique_tests + ): + columns_with_unique_and_not_null_tests.append(column) + if columns_with_unique_and_not_null_tests: + return columns_with_unique_and_not_null_tests + + if columns_with_enabled_unique_tests: + return list(columns_with_enabled_unique_tests) + + if columns_with_disabled_unique_tests: + return list(columns_with_disabled_unique_tests) + + return [] + + def same_contents(self, old, adapter_type) -> bool: + return super().same_contents(old, adapter_type) and self.same_ref_representation(old) + + def same_ref_representation(self, old) -> bool: + return ( + # Changing the latest_version may break downstream unpinned refs + self.latest_version == old.latest_version + # Changes to access or deprecation_date may lead to ref-related parsing errors + and self.access == old.access + and self.deprecation_date == old.deprecation_date + ) + def build_contract_checksum(self): # We don't need to construct the checksum if the model does not # have contract enforced, because it won't be used. @@ -650,6 +595,42 @@ def build_contract_checksum(self): data = contract_state.encode("utf-8") self.contract.checksum = hashlib.new("sha256", data).hexdigest() + def same_contract_removed(self) -> bool: + """ + self: the removed (deleted, renamed, or disabled) model node + """ + # If the contract wasn't previously enforced, no contract change has occurred + if self.contract.enforced is False: + return True + + # Removed node is past its deprecation_date, so deletion does not constitute a contract change + if self.is_past_deprecation_date: + return True + + # Disabled, deleted, or renamed node with previously enforced contract. + if not self.config.enabled: + breaking_change = f"Contracted model '{self.unique_id}' was disabled." + else: + breaking_change = f"Contracted model '{self.unique_id}' was deleted or renamed." + + if self.version is None: + warn_or_error( + UnversionedBreakingChange( + breaking_changes=[breaking_change], + model_name=self.name, + model_file_path=self.original_file_path, + ), + node=self, + ) + return False + else: + raise ( + ContractBreakingChangeError( + breaking_changes=[breaking_change], + node=self, + ) + ) + def same_contract(self, old, adapter_type=None) -> bool: # If the contract wasn't previously enforced: if old.contract.enforced is False and self.contract.enforced is False: @@ -670,21 +651,17 @@ def same_contract(self, old, adapter_type=None) -> bool: # These are the categories of breaking changes: contract_enforced_disabled: bool = False columns_removed: List[str] = [] - column_type_changes: List[Tuple[str, str, str]] = [] - enforced_column_constraint_removed: List[Tuple[str, str]] = [] # column, constraint_type - enforced_model_constraint_removed: List[ - Tuple[str, List[str]] - ] = [] # constraint_type, columns + column_type_changes: List[Dict[str, str]] = [] + enforced_column_constraint_removed: List[Dict[str, str]] = ( + [] + ) # column_name, constraint_type + enforced_model_constraint_removed: List[Dict[str, Any]] = [] # constraint_type, columns materialization_changed: List[str] = [] if old.contract.enforced is True and self.contract.enforced is False: # Breaking change: the contract was previously enforced, and it no longer is contract_enforced_disabled = True - # TODO: this avoid the circular imports but isn't ideal - from dbt.adapters.factory import get_adapter_constraint_support - from dbt.adapters.base import ConstraintSupport - constraint_support = get_adapter_constraint_support(adapter_type) column_constraints_exist = False @@ -696,11 +673,11 @@ def same_contract(self, old, adapter_type=None) -> bool: # Has this column's data type changed? elif old_value.data_type != self.columns[old_key].data_type: column_type_changes.append( - ( - str(old_value.name), - str(old_value.data_type), - str(self.columns[old_key].data_type), - ) + { + "column_name": str(old_value.name), + "previous_column_type": str(old_value.data_type), + "current_column_type": str(self.columns[old_key].data_type), + } ) # track if there are any column level constraints for the materialization check late @@ -721,7 +698,11 @@ def same_contract(self, old, adapter_type=None) -> bool: and constraint_support[old_constraint.type] == ConstraintSupport.ENFORCED ): enforced_column_constraint_removed.append( - (old_key, str(old_constraint.type)) + { + "column_name": old_key, + "constraint_name": old_constraint.name, + "constraint_type": ConstraintType(old_constraint.type), + } ) # Now compare the model level constraints @@ -732,7 +713,11 @@ def same_contract(self, old, adapter_type=None) -> bool: and constraint_support[old_constraint.type] == ConstraintSupport.ENFORCED ): enforced_model_constraint_removed.append( - (str(old_constraint.type), old_constraint.columns) + { + "constraint_name": old_constraint.name, + "constraint_type": ConstraintType(old_constraint.type), + "columns": old_constraint.columns, + } ) # Check for relevant materialization changes. @@ -746,7 +731,8 @@ def same_contract(self, old, adapter_type=None) -> bool: # If a column has been added, it will be missing in the old.columns, and present in self.columns # That's a change (caught by the different checksums), but not a breaking change - # Did we find any changes that we consider breaking? If so, that's an error + # Did we find any changes that we consider breaking? If there's an enforced contract, that's + # a warning unless the model is versioned, then it's an error. if ( contract_enforced_disabled or columns_removed @@ -755,32 +741,85 @@ def same_contract(self, old, adapter_type=None) -> bool: or enforced_column_constraint_removed or materialization_changed ): - raise ( - ContractBreakingChangeError( - contract_enforced_disabled=contract_enforced_disabled, - columns_removed=columns_removed, - column_type_changes=column_type_changes, - enforced_column_constraint_removed=enforced_column_constraint_removed, - enforced_model_constraint_removed=enforced_model_constraint_removed, - materialization_changed=materialization_changed, - node=self, + + breaking_changes = [] + if contract_enforced_disabled: + breaking_changes.append( + "Contract enforcement was removed: Previously, this model had an enforced contract. It is no longer configured to enforce its contract, and this is a breaking change." + ) + if columns_removed: + columns_removed_str = "\n - ".join(columns_removed) + breaking_changes.append(f"Columns were removed: \n - {columns_removed_str}") + if column_type_changes: + column_type_changes_str = "\n - ".join( + [ + f"{c['column_name']} ({c['previous_column_type']} -> {c['current_column_type']})" + for c in column_type_changes + ] + ) + breaking_changes.append( + f"Columns with data_type changes: \n - {column_type_changes_str}" + ) + if enforced_column_constraint_removed: + column_constraint_changes_str = "\n - ".join( + [ + f"'{c['constraint_name'] if c['constraint_name'] is not None else c['constraint_type']}' constraint on column {c['column_name']}" + for c in enforced_column_constraint_removed + ] + ) + breaking_changes.append( + f"Enforced column level constraints were removed: \n - {column_constraint_changes_str}" + ) + if enforced_model_constraint_removed: + model_constraint_changes_str = "\n - ".join( + [ + f"'{c['constraint_name'] if c['constraint_name'] is not None else c['constraint_type']}' constraint on columns {c['columns']}" + for c in enforced_model_constraint_removed + ] + ) + breaking_changes.append( + f"Enforced model level constraints were removed: \n - {model_constraint_changes_str}" + ) + if materialization_changed: + materialization_changes_str = ( + f"{materialization_changed[0]} -> {materialization_changed[1]}" ) - ) - # Otherwise, though we didn't find any *breaking* changes, the contract has still changed -- same_contract: False - else: - return False + breaking_changes.append( + f"Materialization changed with enforced constraints: \n - {materialization_changes_str}" + ) + if self.version is None: + warn_or_error( + UnversionedBreakingChange( + contract_enforced_disabled=contract_enforced_disabled, + columns_removed=columns_removed, + column_type_changes=column_type_changes, + enforced_column_constraint_removed=enforced_column_constraint_removed, + enforced_model_constraint_removed=enforced_model_constraint_removed, + breaking_changes=breaking_changes, + model_name=self.name, + model_file_path=self.original_file_path, + ), + node=self, + ) + else: + raise ( + ContractBreakingChangeError( + breaking_changes=breaking_changes, + node=self, + ) + ) -# TODO: rm? -@dataclass -class RPCNode(CompiledNode): - resource_type: NodeType = field(metadata={"restrict": [NodeType.RPCCall]}) + # Otherwise, the contract has changed -- same_contract: False + return False @dataclass -class SqlNode(CompiledNode): - resource_type: NodeType = field(metadata={"restrict": [NodeType.SqlOperation]}) +class SqlNode(SqlOperationResource, CompiledNode): + @classmethod + def resource_class(cls) -> Type[SqlOperationResource]: + return SqlOperationResource # ==================================== @@ -789,14 +828,10 @@ class SqlNode(CompiledNode): @dataclass -class SeedNode(ParsedNode): # No SQLDefaults! - resource_type: NodeType = field(metadata={"restrict": [NodeType.Seed]}) - config: SeedConfig = field(default_factory=SeedConfig) - # seeds need the root_path because the contents are not loaded initially - # and we need the root_path to load the seed later - root_path: Optional[str] = None - depends_on: MacroDependsOn = field(default_factory=MacroDependsOn) - defer_relation: Optional[DeferRelation] = None +class SeedNode(SeedResource, ParsedNode): # No SQLDefaults! + @classmethod + def resource_class(cls) -> Type[SeedResource]: + return SeedResource def same_seeds(self, other: "SeedNode") -> bool: # for seeds, we check the hashes. If the hashes are different types, @@ -895,6 +930,11 @@ def language(self): return "sql" +# @property +# def compiled_code(self): +# return None + + # ==================================== # Singular Test node # ==================================== @@ -915,11 +955,10 @@ def is_relational(self): @dataclass -class SingularTestNode(TestShouldStoreFailures, CompiledNode): - resource_type: NodeType = field(metadata={"restrict": [NodeType.Test]}) - # Was not able to make mypy happy and keep the code working. We need to - # refactor the various configs. - config: TestConfig = field(default_factory=TestConfig) # type: ignore +class SingularTestNode(SingularTestResource, TestShouldStoreFailures, CompiledNode): + @classmethod + def resource_class(cls) -> Type[SingularTestResource]: + return SingularTestResource @property def test_node_type(self): @@ -932,43 +971,85 @@ def test_node_type(self): @dataclass -class TestMetadata(dbtClassMixin, Replaceable): - __test__ = False +class GenericTestNode(GenericTestResource, TestShouldStoreFailures, CompiledNode): + @classmethod + def resource_class(cls) -> Type[GenericTestResource]: + return GenericTestResource - name: str - # kwargs are the args that are left in the test builder after - # removing configs. They are set from the test builder when - # the test node is created. - kwargs: Dict[str, Any] = field(default_factory=dict) - namespace: Optional[str] = None + def same_contents(self, other, adapter_type: Optional[str]) -> bool: + if other is None: + return False + + return self.same_config(other) and self.same_fqn(other) and True + + @property + def test_node_type(self): + return "generic" -# This has to be separated out because it has no default and so -# has to be included as a superclass, not an attribute @dataclass -class HasTestMetadata(dbtClassMixin): - test_metadata: TestMetadata +class UnitTestSourceDefinition(ModelNode): + source_name: str = "undefined" + quoting: QuotingResource = field(default_factory=QuotingResource) + + @property + def search_name(self): + return f"{self.source_name}.{self.name}" @dataclass -class GenericTestNode(TestShouldStoreFailures, CompiledNode, HasTestMetadata): - resource_type: NodeType = field(metadata={"restrict": [NodeType.Test]}) - column_name: Optional[str] = None - file_key_name: Optional[str] = None - # Was not able to make mypy happy and keep the code working. We need to - # refactor the various configs. - config: TestConfig = field(default_factory=TestConfig) # type: ignore - attached_node: Optional[str] = None +class UnitTestNode(CompiledNode): + resource_type: Literal[NodeType.Unit] + tested_node_unique_id: Optional[str] = None + this_input_node_unique_id: Optional[str] = None + overrides: Optional[UnitTestOverrides] = None + config: UnitTestNodeConfig = field(default_factory=UnitTestNodeConfig) - def same_contents(self, other, adapter_type: Optional[str]) -> bool: + +@dataclass +class UnitTestDefinition(NodeInfoMixin, GraphNode, UnitTestDefinitionResource): + @classmethod + def resource_class(cls) -> Type[UnitTestDefinitionResource]: + return UnitTestDefinitionResource + + @property + def depends_on_nodes(self): + return self.depends_on.nodes + + @property + def tags(self) -> List[str]: + tags = self.config.tags + return [tags] if isinstance(tags, str) else tags + + @property + def versioned_name(self) -> str: + versioned_name = self.name + if self.version is not None: + versioned_name += f"_v{self.version}" + return versioned_name + + def build_unit_test_checksum(self): + # everything except 'description' + data = f"{self.model}-{self.versions}-{self.given}-{self.expect}-{self.overrides}" + + # include underlying fixture data + for input in self.given: + if input.fixture: + data += f"-{input.rows}" + + self.checksum = hashlib.new("sha256", data.encode("utf-8")).hexdigest() + + def same_contents(self, other: Optional["UnitTestDefinition"]) -> bool: if other is None: return False - return self.same_config(other) and self.same_fqn(other) and True + return self.checksum == other.checksum - @property - def test_node_type(self): - return "generic" + +@dataclass +class UnitTestFileFixture(BaseNode): + resource_type: Literal[NodeType.Fixture] + rows: Optional[Union[List[Dict[str, Any]], str]] = None # ==================================== @@ -977,23 +1058,10 @@ def test_node_type(self): @dataclass -class IntermediateSnapshotNode(CompiledNode): - # at an intermediate stage in parsing, where we've built something better - # than an unparsed node for rendering in parse mode, it's pretty possible - # that we won't have critical snapshot-related information that is only - # defined in config blocks. To fix that, we have an intermediate type that - # uses a regular node config, which the snapshot parser will then convert - # into a full ParsedSnapshotNode after rendering. Note: it currently does - # not work to set snapshot config in schema files because of the validation. - resource_type: NodeType = field(metadata={"restrict": [NodeType.Snapshot]}) - config: EmptySnapshotConfig = field(default_factory=EmptySnapshotConfig) - - -@dataclass -class SnapshotNode(CompiledNode): - resource_type: NodeType = field(metadata={"restrict": [NodeType.Snapshot]}) - config: SnapshotConfig - defer_relation: Optional[DeferRelation] = None +class SnapshotNode(SnapshotResource, CompiledNode): + @classmethod + def resource_class(cls) -> Type[SnapshotResource]: + return SnapshotResource # ==================================== @@ -1002,17 +1070,10 @@ class SnapshotNode(CompiledNode): @dataclass -class Macro(BaseNode): - macro_sql: str - resource_type: NodeType = field(metadata={"restrict": [NodeType.Macro]}) - depends_on: MacroDependsOn = field(default_factory=MacroDependsOn) - description: str = "" - meta: Dict[str, Any] = field(default_factory=dict) - docs: Docs = field(default_factory=Docs) - patch_path: Optional[str] = None - arguments: List[MacroArgument] = field(default_factory=list) - created_at: float = field(default_factory=lambda: time.time()) - supported_languages: Optional[List[ModelLanguage]] = None +class Macro(MacroResource, BaseNode): + @classmethod + def resource_class(cls) -> Type[MacroResource]: + return MacroResource def same_contents(self, other: Optional["Macro"]) -> bool: if other is None: @@ -1032,9 +1093,10 @@ def depends_on_macros(self): @dataclass -class Documentation(BaseNode): - block_contents: str - resource_type: NodeType = field(metadata={"restrict": [NodeType.Documentation]}) +class Documentation(DocumentationResource, BaseNode): + @classmethod + def resource_class(cls) -> Type[DocumentationResource]: + return DocumentationResource @property def search_name(self): @@ -1065,7 +1127,7 @@ class UnpatchedSourceDefinition(BaseNode): source: UnparsedSourceDefinition table: UnparsedSourceTableDefinition fqn: List[str] - resource_type: NodeType = field(metadata={"restrict": [NodeType.Source]}) + resource_type: Literal[NodeType.Source] patch_path: Optional[str] = None def get_full_source_name(self): @@ -1074,6 +1136,30 @@ def get_full_source_name(self): def get_source_representation(self): return f'source("{self.source.name}", "{self.table.name}")' + def validate_data_tests(self, is_root_project: bool): + """ + sources parse tests differently than models, so we need to do some validation + here where it's done in the PatchParser for other nodes + """ + # source table-level tests + if self.tests and self.data_tests: + raise ValidationError( + "Invalid test config: cannot have both 'tests' and 'data_tests' defined" + ) + if self.tests: + self.data_tests.extend(self.tests) + self.tests.clear() + + # column-level tests + for column in self.columns: + if column.tests and column.data_tests: + raise ValidationError( + "Invalid test config: cannot have both 'tests' and 'data_tests' defined" + ) + if column.tests: + column.data_tests.extend(column.tests) + column.tests.clear() + @property def quote_columns(self) -> Optional[bool]: result = None @@ -1088,14 +1174,22 @@ def columns(self) -> Sequence[UnparsedColumn]: return [] if self.table.columns is None else self.table.columns def get_tests(self) -> Iterator[Tuple[Dict[str, Any], Optional[UnparsedColumn]]]: - for test in self.tests: - yield normalize_test(test), None + for data_test in self.data_tests: + yield normalize_test(data_test), None for column in self.columns: - if column.tests is not None: - for test in column.tests: - yield normalize_test(test), column + if column.data_tests is not None: + for data_test in column.data_tests: + yield normalize_test(data_test), column + + @property + def data_tests(self) -> List[TestDef]: + if self.table.data_tests is None: + return [] + else: + return self.table.data_tests + # deprecated @property def tests(self) -> List[TestDef]: if self.table.tests is None: @@ -1105,35 +1199,15 @@ def tests(self) -> List[TestDef]: @dataclass -class ParsedSourceMandatory(GraphNode, HasRelationMetadata): - source_name: str - source_description: str - loader: str - identifier: str - resource_type: NodeType = field(metadata={"restrict": [NodeType.Source]}) - - -@dataclass -class SourceDefinition(NodeInfoMixin, ParsedSourceMandatory): - quoting: Quoting = field(default_factory=Quoting) - loaded_at_field: Optional[str] = None - freshness: Optional[FreshnessThreshold] = None - external: Optional[ExternalTable] = None - description: str = "" - columns: Dict[str, ColumnInfo] = field(default_factory=dict) - meta: Dict[str, Any] = field(default_factory=dict) - source_meta: Dict[str, Any] = field(default_factory=dict) - tags: List[str] = field(default_factory=list) - config: SourceConfig = field(default_factory=SourceConfig) - patch_path: Optional[str] = None - unrendered_config: Dict[str, Any] = field(default_factory=dict) - relation_name: Optional[str] = None - created_at: float = field(default_factory=lambda: time.time()) - - def __post_serialize__(self, dct): - if "_event_status" in dct: - del dct["_event_status"] - return dct +class SourceDefinition( + NodeInfoMixin, + GraphNode, + SourceDefinitionResource, + HasRelationMetadata, +): + @classmethod + def resource_class(cls) -> Type[SourceDefinitionResource]: + return SourceDefinitionResource def same_database_representation(self, other: "SourceDefinition") -> bool: return ( @@ -1220,13 +1294,17 @@ def sources(self): return [] @property - def has_freshness(self): - return bool(self.freshness) and self.loaded_at_field is not None + def has_freshness(self) -> bool: + return bool(self.freshness) @property def search_name(self): return f"{self.source_name}.{self.name}" + @property + def group(self): + return None + # ==================================== # Exposure node @@ -1234,24 +1312,7 @@ def search_name(self): @dataclass -class Exposure(GraphNode): - type: ExposureType - owner: Owner - resource_type: NodeType = field(metadata={"restrict": [NodeType.Exposure]}) - description: str = "" - label: Optional[str] = None - maturity: Optional[MaturityType] = None - meta: Dict[str, Any] = field(default_factory=dict) - tags: List[str] = field(default_factory=list) - config: ExposureConfig = field(default_factory=ExposureConfig) - unrendered_config: Dict[str, Any] = field(default_factory=dict) - url: Optional[str] = None - depends_on: DependsOn = field(default_factory=DependsOn) - refs: List[RefArgs] = field(default_factory=list) - sources: List[List[str]] = field(default_factory=list) - metrics: List[List[str]] = field(default_factory=list) - created_at: float = field(default_factory=lambda: time.time()) - +class Exposure(GraphNode, ExposureResource): @property def depends_on_nodes(self): return self.depends_on.nodes @@ -1260,6 +1321,10 @@ def depends_on_nodes(self): def search_name(self): return self.name + @classmethod + def resource_class(cls) -> Type[ExposureResource]: + return ExposureResource + def same_depends_on(self, old: "Exposure") -> bool: return set(self.depends_on.nodes) == set(old.depends_on.nodes) @@ -1317,87 +1382,7 @@ def group(self): @dataclass -class WhereFilter(dbtClassMixin): - where_sql_template: str - - @property - def call_parameter_sets(self) -> FilterCallParameterSets: - return WhereFilterParser.parse_call_parameter_sets(self.where_sql_template) - - -@dataclass -class MetricInputMeasure(dbtClassMixin): - name: str - filter: Optional[WhereFilter] = None - alias: Optional[str] = None - - def measure_reference(self) -> MeasureReference: - return MeasureReference(element_name=self.name) - - def post_aggregation_measure_reference(self) -> MeasureReference: - return MeasureReference(element_name=self.alias or self.name) - - -@dataclass -class MetricTimeWindow(dbtClassMixin): - count: int - granularity: TimeGranularity - - -@dataclass -class MetricInput(dbtClassMixin): - name: str - filter: Optional[WhereFilter] = None - alias: Optional[str] = None - offset_window: Optional[MetricTimeWindow] = None - offset_to_grain: Optional[TimeGranularity] = None - - def as_reference(self) -> DSIMetricReference: - return DSIMetricReference(element_name=self.name) - - def post_aggregation_reference(self) -> DSIMetricReference: - return DSIMetricReference(element_name=self.alias or self.name) - - -@dataclass -class MetricTypeParams(dbtClassMixin): - measure: Optional[MetricInputMeasure] = None - input_measures: List[MetricInputMeasure] = field(default_factory=list) - numerator: Optional[MetricInput] = None - denominator: Optional[MetricInput] = None - expr: Optional[str] = None - window: Optional[MetricTimeWindow] = None - grain_to_date: Optional[TimeGranularity] = None - metrics: Optional[List[MetricInput]] = None - - -@dataclass -class MetricReference(dbtClassMixin, Replaceable): - sql: Optional[Union[str, int]] = None - unique_id: Optional[str] = None - - -@dataclass -class Metric(GraphNode): - name: str - description: str - label: str - type: MetricType - type_params: MetricTypeParams - filter: Optional[WhereFilter] = None - metadata: Optional[SourceFileMetadata] = None - resource_type: NodeType = field(metadata={"restrict": [NodeType.Metric]}) - meta: Dict[str, Any] = field(default_factory=dict) - tags: List[str] = field(default_factory=list) - config: MetricConfig = field(default_factory=MetricConfig) - unrendered_config: Dict[str, Any] = field(default_factory=dict) - sources: List[List[str]] = field(default_factory=list) - depends_on: DependsOn = field(default_factory=DependsOn) - refs: List[RefArgs] = field(default_factory=list) - metrics: List[List[str]] = field(default_factory=list) - created_at: float = field(default_factory=lambda: time.time()) - group: Optional[str] = None - +class Metric(GraphNode, MetricResource): @property def depends_on_nodes(self): return self.depends_on.nodes @@ -1406,17 +1391,9 @@ def depends_on_nodes(self): def search_name(self): return self.name - @property - def input_measures(self) -> List[MetricInputMeasure]: - return self.type_params.input_measures - - @property - def measure_references(self) -> List[MeasureReference]: - return [x.measure_reference() for x in self.input_measures] - - @property - def input_metrics(self) -> List[MetricInput]: - return self.type_params.metrics or [] + @classmethod + def resource_class(cls) -> Type[MetricResource]: + return MetricResource def same_description(self, old: "Metric") -> bool: return self.description == old.description @@ -1459,6 +1436,12 @@ def same_contents(self, old: Optional["Metric"]) -> bool: and True ) + def add_input_measure(self, input_measure: MetricInputMeasure) -> None: + for existing_input_measure in self.type_params.input_measures: + if input_measure == existing_input_measure: + return + self.type_params.input_measures.append(input_measure) + # ==================================== # Group node @@ -1466,128 +1449,148 @@ def same_contents(self, old: Optional["Metric"]) -> bool: @dataclass -class Group(BaseNode): - name: str - owner: Owner - resource_type: NodeType = field(metadata={"restrict": [NodeType.Group]}) +class Group(GroupResource, BaseNode): + @classmethod + def resource_class(cls) -> Type[GroupResource]: + return GroupResource + + def to_logging_dict(self) -> Dict[str, Union[str, Dict[str, str]]]: + return { + "name": self.name, + "package_name": self.package_name, + "owner": self.owner.to_dict(), + } # ==================================== -# SemanticModel and related classes +# SemanticModel node # ==================================== @dataclass -class NodeRelation(dbtClassMixin): - alias: str - schema_name: str # TODO: Could this be called simply "schema" so we could reuse StateRelation? - database: Optional[str] = None - relation_name: Optional[str] = None - - -@dataclass -class SemanticModel(GraphNode): - model: str - node_relation: Optional[NodeRelation] - description: Optional[str] = None - defaults: Optional[Defaults] = None - entities: Sequence[Entity] = field(default_factory=list) - measures: Sequence[Measure] = field(default_factory=list) - dimensions: Sequence[Dimension] = field(default_factory=list) - metadata: Optional[SourceFileMetadata] = None - depends_on: DependsOn = field(default_factory=DependsOn) - refs: List[RefArgs] = field(default_factory=list) - created_at: float = field(default_factory=lambda: time.time()) - config: SemanticModelConfig = field(default_factory=SemanticModelConfig) - primary_entity: Optional[str] = None - +class SemanticModel(GraphNode, SemanticModelResource): @property - def entity_references(self) -> List[LinkableElementReference]: - return [entity.reference for entity in self.entities] + def depends_on_nodes(self): + return self.depends_on.nodes @property - def dimension_references(self) -> List[LinkableElementReference]: - return [dimension.reference for dimension in self.dimensions] + def depends_on_macros(self): + return self.depends_on.macros - @property - def measure_references(self) -> List[MeasureReference]: - return [measure.reference for measure in self.measures] + @classmethod + def resource_class(cls) -> Type[SemanticModelResource]: + return SemanticModelResource - @property - def has_validity_dimensions(self) -> bool: - return any([dim.validity_params is not None for dim in self.dimensions]) + def same_model(self, old: "SemanticModel") -> bool: + return self.model == old.model - @property - def validity_start_dimension(self) -> Optional[Dimension]: - validity_start_dims = [ - dim for dim in self.dimensions if dim.validity_params and dim.validity_params.is_start - ] - if not validity_start_dims: - return None - return validity_start_dims[0] + def same_description(self, old: "SemanticModel") -> bool: + return self.description == old.description - @property - def validity_end_dimension(self) -> Optional[Dimension]: - validity_end_dims = [ - dim for dim in self.dimensions if dim.validity_params and dim.validity_params.is_end - ] - if not validity_end_dims: - return None - return validity_end_dims[0] + def same_defaults(self, old: "SemanticModel") -> bool: + return self.defaults == old.defaults - @property - def partitions(self) -> List[Dimension]: # noqa: D - return [dim for dim in self.dimensions or [] if dim.is_partition] + def same_entities(self, old: "SemanticModel") -> bool: + return self.entities == old.entities - @property - def partition(self) -> Optional[Dimension]: - partitions = self.partitions - if not partitions: - return None - return partitions[0] + def same_dimensions(self, old: "SemanticModel") -> bool: + return self.dimensions == old.dimensions - @property - def reference(self) -> SemanticModelReference: - return SemanticModelReference(semantic_model_name=self.name) + def same_measures(self, old: "SemanticModel") -> bool: + return self.measures == old.measures - @property - def depends_on_nodes(self): - return self.depends_on.nodes + def same_config(self, old: "SemanticModel") -> bool: + return self.config == old.config - @property - def depends_on_macros(self): - return self.depends_on.macros + def same_primary_entity(self, old: "SemanticModel") -> bool: + return self.primary_entity == old.primary_entity - def checked_agg_time_dimension_for_measure( - self, measure_reference: MeasureReference - ) -> TimeDimensionReference: - measure: Optional[Measure] = None - for measure in self.measures: - if measure.reference == measure_reference: - measure = measure + def same_group(self, old: "SemanticModel") -> bool: + return self.group == old.group - assert ( - measure is not None - ), f"No measure with name ({measure_reference.element_name}) in semantic_model with name ({self.name})" + def same_contents(self, old: Optional["SemanticModel"]) -> bool: + # existing when it didn't before is a change! + # metadata/tags changes are not "changes" + if old is None: + return True - default_agg_time_dimension = ( - self.defaults.agg_time_dimension if self.defaults is not None else None + return ( + self.same_model(old) + and self.same_description(old) + and self.same_defaults(old) + and self.same_entities(old) + and self.same_dimensions(old) + and self.same_measures(old) + and self.same_config(old) + and self.same_primary_entity(old) + and self.same_group(old) + and True ) - agg_time_dimension_name = measure.agg_time_dimension or default_agg_time_dimension - assert agg_time_dimension_name is not None, ( - f"Aggregation time dimension for measure {measure.name} on semantic model {self.name} is not set! " - "To fix this either specify a default `agg_time_dimension` for the semantic model or define an " - "`agg_time_dimension` on the measure directly." - ) - return TimeDimensionReference(element_name=agg_time_dimension_name) - @property - def primary_entity_reference(self) -> Optional[EntityReference]: +# ==================================== +# SavedQuery +# ==================================== + + +@dataclass +class SavedQuery(NodeInfoMixin, GraphNode, SavedQueryResource): + @classmethod + def resource_class(cls) -> Type[SavedQueryResource]: + return SavedQueryResource + + def same_metrics(self, old: "SavedQuery") -> bool: + return self.query_params.metrics == old.query_params.metrics + + def same_group_by(self, old: "SavedQuery") -> bool: + return self.query_params.group_by == old.query_params.group_by + + def same_description(self, old: "SavedQuery") -> bool: + return self.description == old.description + + def same_where(self, old: "SavedQuery") -> bool: + return self.query_params.where == old.query_params.where + + def same_label(self, old: "SavedQuery") -> bool: + return self.label == old.label + + def same_config(self, old: "SavedQuery") -> bool: + return self.config == old.config + + def same_group(self, old: "SavedQuery") -> bool: + return self.group == old.group + + def same_exports(self, old: "SavedQuery") -> bool: + if len(self.exports) != len(old.exports): + return False + + # exports should be in the same order, so we zip them for easy iteration + for old_export, new_export in zip(old.exports, self.exports): + if not (old_export.name == new_export.name): + return False + keys = ["export_as", "schema", "alias"] + for key in keys: + if old_export.unrendered_config.get(key) != new_export.unrendered_config.get(key): + return False + + return True + + def same_contents(self, old: Optional["SavedQuery"]) -> bool: + # existing when it didn't before is a change! + # metadata/tags changes are not "changes" + if old is None: + return True + return ( - EntityReference(element_name=self.primary_entity) - if self.primary_entity is not None - else None + self.same_metrics(old) + and self.same_group_by(old) + and self.same_description(old) + and self.same_where(old) + and self.same_label(old) + and self.same_config(old) + and self.same_group(old) + and self.same_exports(old) + and True ) @@ -1597,7 +1600,7 @@ def primary_entity_reference(self) -> Optional[EntityReference]: @dataclass -class ParsedPatch(HasYamlMetadata, Replaceable): +class ParsedPatch(HasYamlMetadata): name: str description: str meta: Dict[str, Any] @@ -1616,6 +1619,7 @@ class ParsedNodePatch(ParsedPatch): latest_version: Optional[NodeVersion] constraints: List[Dict[str, Any]] deprecation_date: Optional[datetime] + time_spine: Optional[TimeSpine] = None @dataclass @@ -1635,10 +1639,10 @@ class ParsedMacroPatch(ParsedPatch): SingularTestNode, HookNode, ModelNode, - RPCNode, SqlNode, GenericTestNode, SnapshotNode, + UnitTestNode, ] # All SQL nodes plus SeedNode (csv files) @@ -1657,7 +1661,9 @@ class ParsedMacroPatch(ParsedPatch): ResultNode, Exposure, Metric, + SavedQuery, SemanticModel, + UnitTestDefinition, ] # All "nodes" (or node-like objects) in this file @@ -1668,7 +1674,11 @@ class ParsedMacroPatch(ParsedPatch): Group, ] -TestNode = Union[ - SingularTestNode, - GenericTestNode, -] +TestNode = Union[SingularTestNode, GenericTestNode] + + +RESOURCE_CLASS_TO_NODE_CLASS: Dict[Type[BaseResource], Type[BaseNode]] = { + node_class.resource_class(): node_class + for node_class in get_args(Resource) + if node_class is not UnitTestNode +} diff --git a/core/dbt/contracts/graph/semantic_manifest.py b/core/dbt/contracts/graph/semantic_manifest.py index 8c599b1a7fa..c8bc2d6685c 100644 --- a/core/dbt/contracts/graph/semantic_manifest.py +++ b/core/dbt/contracts/graph/semantic_manifest.py @@ -1,26 +1,43 @@ +from typing import List, Optional + +from dbt.constants import ( + LEGACY_TIME_SPINE_GRANULARITY, + LEGACY_TIME_SPINE_MODEL_NAME, + MINIMUM_REQUIRED_TIME_SPINE_GRANULARITY, +) +from dbt.contracts.graph.manifest import Manifest +from dbt.contracts.graph.nodes import ModelNode +from dbt.events.types import SemanticValidationFailure +from dbt.exceptions import ParsingError +from dbt_common.clients.system import write_file +from dbt_common.events.base_types import EventLevel +from dbt_common.events.functions import fire_event from dbt_semantic_interfaces.implementations.metric import PydanticMetric +from dbt_semantic_interfaces.implementations.node_relation import PydanticNodeRelation from dbt_semantic_interfaces.implementations.project_configuration import ( PydanticProjectConfiguration, ) -from dbt_semantic_interfaces.implementations.semantic_manifest import PydanticSemanticManifest +from dbt_semantic_interfaces.implementations.saved_query import PydanticSavedQuery +from dbt_semantic_interfaces.implementations.semantic_manifest import ( + PydanticSemanticManifest, +) from dbt_semantic_interfaces.implementations.semantic_model import PydanticSemanticModel +from dbt_semantic_interfaces.implementations.time_spine import ( + PydanticTimeSpine, + PydanticTimeSpineCustomGranularityColumn, + PydanticTimeSpinePrimaryColumn, +) from dbt_semantic_interfaces.implementations.time_spine_table_configuration import ( - PydanticTimeSpineTableConfiguration, + PydanticTimeSpineTableConfiguration as LegacyTimeSpine, ) from dbt_semantic_interfaces.type_enums import TimeGranularity from dbt_semantic_interfaces.validations.semantic_manifest_validator import ( SemanticManifestValidator, ) -from dbt.clients.system import write_file -from dbt.events.base_types import EventLevel -from dbt.events.functions import fire_event -from dbt.events.types import SemanticValidationFailure -from dbt.exceptions import ParsingError - class SemanticManifest: - def __init__(self, manifest): + def __init__(self, manifest: Manifest) -> None: self.manifest = manifest def validate(self) -> bool: @@ -56,8 +73,56 @@ def write_json_to_file(self, file_path: str): write_file(file_path, json) def _get_pydantic_semantic_manifest(self) -> PydanticSemanticManifest: + pydantic_time_spines: List[PydanticTimeSpine] = [] + minimum_time_spine_granularity: Optional[TimeGranularity] = None + for node in self.manifest.nodes.values(): + if not (isinstance(node, ModelNode) and node.time_spine): + continue + time_spine = node.time_spine + standard_granularity_column = None + for column in node.columns.values(): + if column.name == time_spine.standard_granularity_column: + standard_granularity_column = column + break + # Assertions needed for type checking + if not standard_granularity_column: + raise ParsingError( + "Expected to find time spine standard granularity column in model columns, but did not. " + "This should have been caught in YAML parsing." + ) + if not standard_granularity_column.granularity: + raise ParsingError( + "Expected to find granularity set for time spine standard granularity column, but did not. " + "This should have been caught in YAML parsing." + ) + pydantic_time_spine = PydanticTimeSpine( + node_relation=PydanticNodeRelation( + alias=node.alias, + schema_name=node.schema, + database=node.database, + relation_name=node.relation_name, + ), + primary_column=PydanticTimeSpinePrimaryColumn( + name=time_spine.standard_granularity_column, + time_granularity=standard_granularity_column.granularity, + ), + custom_granularities=[ + PydanticTimeSpineCustomGranularityColumn( + name=custom_granularity.name, column_name=custom_granularity.column_name + ) + for custom_granularity in time_spine.custom_granularities + ], + ) + pydantic_time_spines.append(pydantic_time_spine) + if ( + not minimum_time_spine_granularity + or standard_granularity_column.granularity.to_int() + < minimum_time_spine_granularity.to_int() + ): + minimum_time_spine_granularity = standard_granularity_column.granularity + project_config = PydanticProjectConfiguration( - time_spine_table_configurations=[], + time_spine_table_configurations=[], time_spines=pydantic_time_spines ) pydantic_semantic_manifest = PydanticSemanticManifest( metrics=[], semantic_models=[], project_configuration=project_config @@ -71,25 +136,44 @@ def _get_pydantic_semantic_manifest(self) -> PydanticSemanticManifest: for metric in self.manifest.metrics.values(): pydantic_semantic_manifest.metrics.append(PydanticMetric.parse_obj(metric.to_dict())) - # Look for time-spine table model and create time spine table configuration + for saved_query in self.manifest.saved_queries.values(): + pydantic_semantic_manifest.saved_queries.append( + PydanticSavedQuery.parse_obj(saved_query.to_dict()) + ) + if self.manifest.semantic_models: - # Get model for time_spine_table - time_spine_model_name = "metricflow_time_spine" - model = self.manifest.ref_lookup.find(time_spine_model_name, None, None, self.manifest) - if not model: + legacy_time_spine_model = self.manifest.ref_lookup.find( + LEGACY_TIME_SPINE_MODEL_NAME, None, None, self.manifest + ) + if legacy_time_spine_model: + if ( + not minimum_time_spine_granularity + or LEGACY_TIME_SPINE_GRANULARITY.to_int() + < minimum_time_spine_granularity.to_int() + ): + minimum_time_spine_granularity = LEGACY_TIME_SPINE_GRANULARITY + + # If no time spines have been configured at DAY or smaller AND legacy time spine model does not exist, error. + if ( + not minimum_time_spine_granularity + or minimum_time_spine_granularity.to_int() + > MINIMUM_REQUIRED_TIME_SPINE_GRANULARITY.to_int() + ): raise ParsingError( - "The semantic layer requires a 'metricflow_time_spine' model in the project, but none was found. " - "Guidance on creating this model can be found on our docs site (" - "https://docs.getdbt.com/docs/build/metricflow-time-spine) " + "The semantic layer requires a time spine model with granularity DAY or smaller in the project, " + "but none was found. Guidance on creating this model can be found on our docs site " + "(https://docs.getdbt.com/docs/build/metricflow-time-spine)." ) - # Create time_spine_table_config, set it in project_config, and add to semantic manifest - time_spine_table_config = PydanticTimeSpineTableConfiguration( - location=model.relation_name, - column_name="date_day", - grain=TimeGranularity.DAY, - ) - pydantic_semantic_manifest.project_configuration.time_spine_table_configurations = [ - time_spine_table_config - ] + + # For backward compatibility: if legacy time spine exists, include it in the manifest. + if legacy_time_spine_model: + legacy_time_spine = LegacyTimeSpine( + location=legacy_time_spine_model.relation_name, + column_name="date_day", + grain=LEGACY_TIME_SPINE_GRANULARITY, + ) + pydantic_semantic_manifest.project_configuration.time_spine_table_configurations = [ + legacy_time_spine + ] return pydantic_semantic_manifest diff --git a/core/dbt/contracts/graph/semantic_models.py b/core/dbt/contracts/graph/semantic_models.py deleted file mode 100644 index 2bb75382c46..00000000000 --- a/core/dbt/contracts/graph/semantic_models.py +++ /dev/null @@ -1,147 +0,0 @@ -from dataclasses import dataclass -from dbt.dataclass_schema import dbtClassMixin -from dbt_semantic_interfaces.references import ( - DimensionReference, - EntityReference, - MeasureReference, - TimeDimensionReference, -) -from dbt_semantic_interfaces.type_enums import ( - AggregationType, - DimensionType, - EntityType, - TimeGranularity, -) -from typing import List, Optional - - -@dataclass -class FileSlice(dbtClassMixin): - """Provides file slice level context about what something was created from. - - Implementation of the dbt-semantic-interfaces `FileSlice` protocol - """ - - filename: str - content: str - start_line_number: int - end_line_number: int - - -@dataclass -class SourceFileMetadata(dbtClassMixin): - """Provides file context about what something was created from. - - Implementation of the dbt-semantic-interfaces `Metadata` protocol - """ - - repo_file_path: str - file_slice: FileSlice - - -@dataclass -class Defaults(dbtClassMixin): - agg_time_dimension: Optional[str] = None - - -# ==================================== -# Dimension objects -# ==================================== - - -@dataclass -class DimensionValidityParams(dbtClassMixin): - is_start: bool = False - is_end: bool = False - - -@dataclass -class DimensionTypeParams(dbtClassMixin): - time_granularity: TimeGranularity - validity_params: Optional[DimensionValidityParams] = None - - -@dataclass -class Dimension(dbtClassMixin): - name: str - type: DimensionType - description: Optional[str] = None - is_partition: bool = False - type_params: Optional[DimensionTypeParams] = None - expr: Optional[str] = None - metadata: Optional[SourceFileMetadata] = None - - @property - def reference(self) -> DimensionReference: - return DimensionReference(element_name=self.name) - - @property - def time_dimension_reference(self) -> Optional[TimeDimensionReference]: - if self.type == DimensionType.TIME: - return TimeDimensionReference(element_name=self.name) - else: - return None - - @property - def validity_params(self) -> Optional[DimensionValidityParams]: - if self.type_params: - return self.type_params.validity_params - else: - return None - - -# ==================================== -# Entity objects -# ==================================== - - -@dataclass -class Entity(dbtClassMixin): - name: str - type: EntityType - description: Optional[str] = None - role: Optional[str] = None - expr: Optional[str] = None - - @property - def reference(self) -> EntityReference: - return EntityReference(element_name=self.name) - - @property - def is_linkable_entity_type(self) -> bool: - return self.type in (EntityType.PRIMARY, EntityType.UNIQUE, EntityType.NATURAL) - - -# ==================================== -# Measure objects -# ==================================== - - -@dataclass -class MeasureAggregationParameters(dbtClassMixin): - percentile: Optional[float] = None - use_discrete_percentile: bool = False - use_approximate_percentile: bool = False - - -@dataclass -class NonAdditiveDimension(dbtClassMixin): - name: str - window_choice: AggregationType - window_groupings: List[str] - - -@dataclass -class Measure(dbtClassMixin): - name: str - agg: AggregationType - description: Optional[str] = None - create_metric: bool = False - expr: Optional[str] = None - agg_params: Optional[MeasureAggregationParameters] = None - non_additive_dimension: Optional[NonAdditiveDimension] = None - agg_time_dimension: Optional[str] = None - - @property - def reference(self) -> MeasureReference: - return MeasureReference(element_name=self.name) diff --git a/core/dbt/contracts/graph/unparsed.py b/core/dbt/contracts/graph/unparsed.py index 585ac9cc3e0..847be3d3a2a 100644 --- a/core/dbt/contracts/graph/unparsed.py +++ b/core/dbt/contracts/graph/unparsed.py @@ -1,33 +1,51 @@ import datetime import re +from dataclasses import dataclass, field +from pathlib import Path +from typing import Any, Dict, List, Literal, Optional, Sequence, Union +# trigger the PathEncoder +import dbt_common.helper_types # noqa:F401 from dbt import deprecations -from dbt.node_types import NodeType -from dbt.contracts.graph.semantic_models import ( +from dbt.artifacts.resources import ( + ConstantPropertyInput, Defaults, DimensionValidityParams, + Docs, + ExposureType, + ExternalTable, + FreshnessThreshold, + MacroArgument, + MaturityType, MeasureAggregationParameters, + NodeVersion, + Owner, + Quoting, + TimeSpine, + UnitTestInputFixture, + UnitTestNodeVersions, + UnitTestOutputFixture, + UnitTestOverrides, ) -from dbt.contracts.util import ( - AdditionalPropertiesMixin, - Mergeable, - Replaceable, +from dbt.exceptions import ParsingError +from dbt.node_types import NodeType +from dbt_common.contracts.config.properties import AdditionalPropertiesMixin +from dbt_common.contracts.util import Mergeable +from dbt_common.dataclass_schema import ( + ExtensibleDbtClassMixin, + StrEnum, + ValidationError, + dbtClassMixin, +) +from dbt_common.exceptions import DbtInternalError +from dbt_semantic_interfaces.type_enums import ( + ConversionCalculationType, + PeriodAggregation, ) - -# trigger the PathEncoder -import dbt.helper_types # noqa:F401 -from dbt.exceptions import CompilationError, ParsingError, DbtInternalError - -from dbt.dataclass_schema import dbtClassMixin, StrEnum, ExtensibleDbtClassMixin, ValidationError - -from dataclasses import dataclass, field -from datetime import timedelta -from pathlib import Path -from typing import Optional, List, Union, Dict, Any, Sequence @dataclass -class UnparsedBaseNode(dbtClassMixin, Replaceable): +class UnparsedBaseNode(dbtClassMixin): package_name: str path: str original_file_path: str @@ -49,31 +67,18 @@ def empty(self): @dataclass class UnparsedMacro(UnparsedBaseNode, HasCode): - resource_type: NodeType = field(metadata={"restrict": [NodeType.Macro]}) + resource_type: Literal[NodeType.Macro] @dataclass class UnparsedGenericTest(UnparsedBaseNode, HasCode): - resource_type: NodeType = field(metadata={"restrict": [NodeType.Macro]}) + resource_type: Literal[NodeType.Macro] @dataclass class UnparsedNode(UnparsedBaseNode, HasCode): name: str - resource_type: NodeType = field( - metadata={ - "restrict": [ - NodeType.Model, - NodeType.Analysis, - NodeType.Test, - NodeType.Snapshot, - NodeType.Operation, - NodeType.Seed, - NodeType.RPCCall, - NodeType.SqlOperation, - ] - } - ) + resource_type: NodeType @property def search_name(self): @@ -82,18 +87,12 @@ def search_name(self): @dataclass class UnparsedRunHook(UnparsedNode): - resource_type: NodeType = field(metadata={"restrict": [NodeType.Operation]}) + resource_type: Literal[NodeType.Operation] index: Optional[int] = None @dataclass -class Docs(dbtClassMixin, Replaceable): - show: bool = True - node_color: Optional[str] = None - - -@dataclass -class HasColumnProps(AdditionalPropertiesMixin, ExtensibleDbtClassMixin, Replaceable): +class HasColumnProps(AdditionalPropertiesMixin, ExtensibleDbtClassMixin): name: str description: str = "" meta: Dict[str, Any] = field(default_factory=dict) @@ -108,22 +107,26 @@ class HasColumnProps(AdditionalPropertiesMixin, ExtensibleDbtClassMixin, Replace @dataclass class HasColumnAndTestProps(HasColumnProps): - tests: List[TestDef] = field(default_factory=list) + data_tests: List[TestDef] = field(default_factory=list) + tests: List[TestDef] = field( + default_factory=list + ) # back compat for previous name of 'data_tests' @dataclass class UnparsedColumn(HasColumnAndTestProps): quote: Optional[bool] = None tags: List[str] = field(default_factory=list) + granularity: Optional[str] = None # str is really a TimeGranularity Enum @dataclass -class HasColumnDocs(dbtClassMixin, Replaceable): +class HasColumnDocs(dbtClassMixin): columns: Sequence[HasColumnProps] = field(default_factory=list) @dataclass -class HasColumnTests(dbtClassMixin, Replaceable): +class HasColumnTests(dbtClassMixin): columns: Sequence[UnparsedColumn] = field(default_factory=list) @@ -143,9 +146,6 @@ class HasConfig: config: Dict[str, Any] = field(default_factory=dict) -NodeVersion = Union[str, float] - - @dataclass class UnparsedVersion(dbtClassMixin): v: NodeVersion @@ -155,25 +155,21 @@ class UnparsedVersion(dbtClassMixin): config: Dict[str, Any] = field(default_factory=dict) constraints: List[Dict[str, Any]] = field(default_factory=list) docs: Docs = field(default_factory=Docs) - tests: Optional[List[TestDef]] = None - columns: Sequence[Union[dbt.helper_types.IncludeExclude, UnparsedColumn]] = field( + data_tests: Optional[List[TestDef]] = None + tests: Optional[List[TestDef]] = None # back compat for previous name of 'data_tests' + columns: Sequence[Union[dbt_common.helper_types.IncludeExclude, UnparsedColumn]] = field( default_factory=list ) deprecation_date: Optional[datetime.datetime] = None def __lt__(self, other): try: - v = type(other.v)(self.v) - return v < other.v + return float(self.v) < float(other.v) except ValueError: - try: - other_v = type(self.v)(other.v) - return self.v < other_v - except ValueError: - return str(self.v) < str(other.v) + return str(self.v) < str(other.v) @property - def include_exclude(self) -> dbt.helper_types.IncludeExclude: + def include_exclude(self) -> dbt_common.helper_types.IncludeExclude: return self._include_exclude @property @@ -186,10 +182,10 @@ def formatted_v(self) -> str: def __post_init__(self): has_include_exclude = False - self._include_exclude = dbt.helper_types.IncludeExclude(include="*") + self._include_exclude = dbt_common.helper_types.IncludeExclude(include="*") self._unparsed_columns = [] for column in self.columns: - if isinstance(column, dbt.helper_types.IncludeExclude): + if isinstance(column, dbt_common.helper_types.IncludeExclude): if not has_include_exclude: self._include_exclude = column has_include_exclude = True @@ -219,8 +215,9 @@ class UnparsedModelUpdate(UnparsedNodeUpdate): latest_version: Optional[NodeVersion] = None versions: Sequence[UnparsedVersion] = field(default_factory=list) deprecation_date: Optional[datetime.datetime] = None + time_spine: Optional[TimeSpine] = None - def __post_init__(self): + def __post_init__(self) -> None: if self.latest_version: version_values = [version.v for version in self.versions] if self.latest_version not in version_values: @@ -228,7 +225,7 @@ def __post_init__(self): f"latest_version: {self.latest_version} is not one of model '{self.name}' versions: {version_values} " ) - seen_versions: set[str] = set() + seen_versions = set() for version in self.versions: if str(version.v) in seen_versions: raise ParsingError( @@ -240,6 +237,41 @@ def __post_init__(self): self.deprecation_date = normalize_date(self.deprecation_date) + if self.time_spine: + columns = ( + self.get_columns_for_version(self.latest_version) + if self.latest_version + else self.columns + ) + column_names_to_columns = {column.name: column for column in columns} + if self.time_spine.standard_granularity_column not in column_names_to_columns: + raise ParsingError( + f"Time spine standard granularity column must be defined on the model. Got invalid " + f"column name '{self.time_spine.standard_granularity_column}' for model '{self.name}'. Valid names" + f"{' for latest version' if self.latest_version else ''}: {list(column_names_to_columns.keys())}." + ) + standard_column = column_names_to_columns[self.time_spine.standard_granularity_column] + if not standard_column.granularity: + raise ParsingError( + f"Time spine standard granularity column must have a granularity defined. Please add one for " + f"column '{self.time_spine.standard_granularity_column}' in model '{self.name}'." + ) + custom_granularity_columns_not_found = [] + for custom_granularity in self.time_spine.custom_granularities: + column_name = ( + custom_granularity.column_name + if custom_granularity.column_name + else custom_granularity.name + ) + if column_name not in column_names_to_columns: + custom_granularity_columns_not_found.append(column_name) + if custom_granularity_columns_not_found: + raise ParsingError( + "Time spine custom granularity columns do not exist in the model. " + f"Columns not found: {custom_granularity_columns_not_found}; " + f"Available columns: {list(column_names_to_columns.keys())}" + ) + def get_columns_for_version(self, version: NodeVersion) -> List[UnparsedColumn]: if version not in self._version_map: raise DbtInternalError( @@ -263,14 +295,11 @@ def get_tests_for_version(self, version: NodeVersion) -> List[TestDef]: f"get_tests_for_version called for version '{version}' not in version map" ) unparsed_version = self._version_map[version] - return unparsed_version.tests if unparsed_version.tests is not None else self.tests - - -@dataclass -class MacroArgument(dbtClassMixin): - name: str - type: Optional[str] = None - description: str = "" + return ( + unparsed_version.data_tests + if unparsed_version.data_tests is not None + else self.data_tests + ) @dataclass @@ -278,107 +307,26 @@ class UnparsedMacroUpdate(HasConfig, HasColumnProps, HasYamlMetadata): arguments: List[MacroArgument] = field(default_factory=list) -class TimePeriod(StrEnum): - minute = "minute" - hour = "hour" - day = "day" - - def plural(self) -> str: - return str(self) + "s" - - -@dataclass -class Time(dbtClassMixin, Mergeable): - count: Optional[int] = None - period: Optional[TimePeriod] = None - - def exceeded(self, actual_age: float) -> bool: - if self.period is None or self.count is None: - return False - kwargs: Dict[str, int] = {self.period.plural(): self.count} - difference = timedelta(**kwargs).total_seconds() - return actual_age > difference - - def __bool__(self): - return self.count is not None and self.period is not None - - -@dataclass -class FreshnessThreshold(dbtClassMixin, Mergeable): - warn_after: Optional[Time] = field(default_factory=Time) - error_after: Optional[Time] = field(default_factory=Time) - filter: Optional[str] = None - - def status(self, age: float) -> "dbt.contracts.results.FreshnessStatus": - from dbt.contracts.results import FreshnessStatus - - if self.error_after and self.error_after.exceeded(age): - return FreshnessStatus.Error - elif self.warn_after and self.warn_after.exceeded(age): - return FreshnessStatus.Warn - else: - return FreshnessStatus.Pass - - def __bool__(self): - return bool(self.warn_after) or bool(self.error_after) - - -@dataclass -class AdditionalPropertiesAllowed(AdditionalPropertiesMixin, ExtensibleDbtClassMixin): - _extra: Dict[str, Any] = field(default_factory=dict) - - -@dataclass -class ExternalPartition(AdditionalPropertiesAllowed, Replaceable): - name: str = "" - description: str = "" - data_type: str = "" - meta: Dict[str, Any] = field(default_factory=dict) - - def __post_init__(self): - if self.name == "" or self.data_type == "": - raise CompilationError("External partition columns must have names and data types") - - -@dataclass -class ExternalTable(AdditionalPropertiesAllowed, Mergeable): - location: Optional[str] = None - file_format: Optional[str] = None - row_format: Optional[str] = None - tbl_properties: Optional[str] = None - partitions: Optional[Union[List[str], List[ExternalPartition]]] = None - - def __bool__(self): - return self.location is not None - - -@dataclass -class Quoting(dbtClassMixin, Mergeable): - database: Optional[bool] = None - schema: Optional[bool] = None - identifier: Optional[bool] = None - column: Optional[bool] = None - - @dataclass class UnparsedSourceTableDefinition(HasColumnTests, HasColumnAndTestProps): config: Dict[str, Any] = field(default_factory=dict) loaded_at_field: Optional[str] = None + loaded_at_field_present: Optional[bool] = None identifier: Optional[str] = None quoting: Quoting = field(default_factory=Quoting) freshness: Optional[FreshnessThreshold] = field(default_factory=FreshnessThreshold) external: Optional[ExternalTable] = None tags: List[str] = field(default_factory=list) - def __post_serialize__(self, dct): - dct = super().__post_serialize__(dct) + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) if "freshness" not in dct and self.freshness is None: dct["freshness"] = None return dct @dataclass -class UnparsedSourceDefinition(dbtClassMixin, Replaceable): +class UnparsedSourceDefinition(dbtClassMixin): name: str description: str = "" meta: Dict[str, Any] = field(default_factory=dict) @@ -388,16 +336,28 @@ class UnparsedSourceDefinition(dbtClassMixin, Replaceable): quoting: Quoting = field(default_factory=Quoting) freshness: Optional[FreshnessThreshold] = field(default_factory=FreshnessThreshold) loaded_at_field: Optional[str] = None + loaded_at_field_present: Optional[bool] = None tables: List[UnparsedSourceTableDefinition] = field(default_factory=list) tags: List[str] = field(default_factory=list) config: Dict[str, Any] = field(default_factory=dict) + @classmethod + def validate(cls, data): + super(UnparsedSourceDefinition, cls).validate(data) + + if data.get("loaded_at_field", None) == "": + raise ValidationError("loaded_at_field cannot be an empty string.") + if "tables" in data: + for table in data["tables"]: + if table.get("loaded_at_field", None) == "": + raise ValidationError("loaded_at_field cannot be an empty string.") + @property def yaml_key(self) -> "str": return "sources" - def __post_serialize__(self, dct): - dct = super().__post_serialize__(dct) + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): + dct = super().__post_serialize__(dct, context) if "freshness" not in dct and self.freshness is None: dct["freshness"] = None return dct @@ -411,12 +371,14 @@ class SourceTablePatch(dbtClassMixin): data_type: Optional[str] = None docs: Optional[Docs] = None loaded_at_field: Optional[str] = None + loaded_at_field_present: Optional[bool] = None identifier: Optional[str] = None quoting: Quoting = field(default_factory=Quoting) freshness: Optional[FreshnessThreshold] = field(default_factory=FreshnessThreshold) external: Optional[ExternalTable] = None tags: Optional[List[str]] = None - tests: Optional[List[TestDef]] = None + data_tests: Optional[List[TestDef]] = None + tests: Optional[List[TestDef]] = None # back compat for previous name of 'data_tests' columns: Optional[Sequence[UnparsedColumn]] = None def to_patch_dict(self) -> Dict[str, Any]: @@ -433,7 +395,7 @@ def to_patch_dict(self) -> Dict[str, Any]: @dataclass -class SourcePatch(dbtClassMixin, Replaceable): +class SourcePatch(dbtClassMixin): name: str = field( metadata=dict(description="The name of the source to override"), ) @@ -452,6 +414,7 @@ class SourcePatch(dbtClassMixin, Replaceable): quoting: Optional[Quoting] = None freshness: Optional[Optional[FreshnessThreshold]] = field(default_factory=FreshnessThreshold) loaded_at_field: Optional[str] = None + loaded_at_field_present: Optional[bool] = None tables: Optional[List[SourceTablePatch]] = None tags: Optional[List[str]] = None @@ -476,7 +439,7 @@ def get_table_named(self, name: str) -> Optional[SourceTablePatch]: @dataclass -class UnparsedDocumentation(dbtClassMixin, Replaceable): +class UnparsedDocumentation(dbtClassMixin): package_name: str path: str original_file_path: str @@ -524,28 +487,8 @@ def __le__(self, other): return self == other or self < other -class ExposureType(StrEnum): - Dashboard = "dashboard" - Notebook = "notebook" - Analysis = "analysis" - ML = "ml" - Application = "application" - - -class MaturityType(StrEnum): - Low = "low" - Medium = "medium" - High = "high" - - @dataclass -class Owner(AdditionalPropertiesAllowed, Replaceable): - email: Optional[str] = None - name: Optional[str] = None - - -@dataclass -class UnparsedExposure(dbtClassMixin, Replaceable): +class UnparsedExposure(dbtClassMixin): name: str type: ExposureType owner: Owner @@ -571,7 +514,7 @@ def validate(cls, data): @dataclass -class MetricFilter(dbtClassMixin, Replaceable): +class MetricFilter(dbtClassMixin): field: str operator: str # TODO : Can we make this Any? @@ -600,19 +543,42 @@ def __bool__(self): @dataclass class UnparsedMetricInputMeasure(dbtClassMixin): name: str - filter: Optional[str] = None + # Note: `Union` must be the outermost part of the type annotation for serialization to work properly. + filter: Union[str, List[str], None] = None alias: Optional[str] = None + join_to_timespine: bool = False + fill_nulls_with: Optional[int] = None @dataclass class UnparsedMetricInput(dbtClassMixin): name: str - filter: Optional[str] = None + # Note: `Union` must be the outermost part of the type annotation for serialization to work properly. + filter: Union[str, List[str], None] = None alias: Optional[str] = None offset_window: Optional[str] = None offset_to_grain: Optional[str] = None # str is really a TimeGranularity Enum +@dataclass +class UnparsedConversionTypeParams(dbtClassMixin): + base_measure: Union[UnparsedMetricInputMeasure, str] + conversion_measure: Union[UnparsedMetricInputMeasure, str] + entity: str + calculation: str = ( + ConversionCalculationType.CONVERSION_RATE.value + ) # ConversionCalculationType Enum + window: Optional[str] = None + constant_properties: Optional[List[ConstantPropertyInput]] = None + + +@dataclass +class UnparsedCumulativeTypeParams(dbtClassMixin): + window: Optional[str] = None + grain_to_date: Optional[str] = None + period_agg: str = PeriodAggregation.FIRST.value + + @dataclass class UnparsedMetricTypeParams(dbtClassMixin): measure: Optional[Union[UnparsedMetricInputMeasure, str]] = None @@ -622,6 +588,8 @@ class UnparsedMetricTypeParams(dbtClassMixin): window: Optional[str] = None grain_to_date: Optional[str] = None # str is really a TimeGranularity Enum metrics: Optional[List[Union[UnparsedMetricInput, str]]] = None + conversion_type_params: Optional[UnparsedConversionTypeParams] = None + cumulative_type_params: Optional[UnparsedCumulativeTypeParams] = None @dataclass @@ -631,7 +599,9 @@ class UnparsedMetric(dbtClassMixin): type: str type_params: UnparsedMetricTypeParams description: str = "" - filter: Optional[str] = None + # Note: `Union` must be the outermost part of the type annotation for serialization to work properly. + filter: Union[str, List[str], None] = None + time_granularity: Optional[str] = None # metadata: Optional[Unparsedetadata] = None # TODO meta: Dict[str, Any] = field(default_factory=dict) tags: List[str] = field(default_factory=list) @@ -660,7 +630,7 @@ def validate(cls, data): @dataclass -class UnparsedGroup(dbtClassMixin, Replaceable): +class UnparsedGroup(dbtClassMixin): name: str owner: Owner @@ -681,6 +651,7 @@ class UnparsedEntity(dbtClassMixin): name: str type: str # EntityType enum description: Optional[str] = None + label: Optional[str] = None role: Optional[str] = None expr: Optional[str] = None @@ -689,7 +660,7 @@ class UnparsedEntity(dbtClassMixin): class UnparsedNonAdditiveDimension(dbtClassMixin): name: str window_choice: str # AggregationType enum - window_groupings: List[str] + window_groupings: List[str] = field(default_factory=list) @dataclass @@ -697,10 +668,12 @@ class UnparsedMeasure(dbtClassMixin): name: str agg: str # actually an enum description: Optional[str] = None + label: Optional[str] = None expr: Optional[Union[str, bool, int]] = None agg_params: Optional[MeasureAggregationParameters] = None non_additive_dimension: Optional[UnparsedNonAdditiveDimension] = None agg_time_dimension: Optional[str] = None + create_metric: bool = False @dataclass @@ -714,6 +687,7 @@ class UnparsedDimension(dbtClassMixin): name: str type: str # actually an enum description: Optional[str] = None + label: Optional[str] = None is_partition: bool = False type_params: Optional[UnparsedDimensionTypeParams] = None expr: Optional[str] = None @@ -723,7 +697,9 @@ class UnparsedDimension(dbtClassMixin): class UnparsedSemanticModel(dbtClassMixin): name: str model: str # looks like "ref(...)" + config: Dict[str, Any] = field(default_factory=dict) description: Optional[str] = None + label: Optional[str] = None defaults: Optional[Defaults] = None entities: List[UnparsedEntity] = field(default_factory=list) measures: List[UnparsedMeasure] = field(default_factory=list) @@ -731,6 +707,32 @@ class UnparsedSemanticModel(dbtClassMixin): primary_entity: Optional[str] = None +@dataclass +class UnparsedQueryParams(dbtClassMixin): + metrics: List[str] = field(default_factory=list) + group_by: List[str] = field(default_factory=list) + # Note: `Union` must be the outermost part of the type annotation for serialization to work properly. + where: Union[str, List[str], None] = None + + +@dataclass +class UnparsedExport(dbtClassMixin): + """Configuration for writing query results to a table.""" + + name: str + config: Dict[str, Any] = field(default_factory=dict) + + +@dataclass +class UnparsedSavedQuery(dbtClassMixin): + name: str + query_params: UnparsedQueryParams + description: Optional[str] = None + label: Optional[str] = None + exports: List[UnparsedExport] = field(default_factory=list) + config: Dict[str, Any] = field(default_factory=dict) + + def normalize_date(d: Optional[datetime.date]) -> Optional[datetime.datetime]: """Convert date to datetime (at midnight), and add local time zone if naive""" if d is None: @@ -744,3 +746,22 @@ def normalize_date(d: Optional[datetime.date]) -> Optional[datetime.datetime]: dt = dt.astimezone() return dt + + +@dataclass +class UnparsedUnitTest(dbtClassMixin): + name: str + model: str # name of the model being unit tested + given: Sequence[UnitTestInputFixture] + expect: UnitTestOutputFixture + description: str = "" + overrides: Optional[UnitTestOverrides] = None + config: Dict[str, Any] = field(default_factory=dict) + versions: Optional[UnitTestNodeVersions] = None + + @classmethod + def validate(cls, data): + super(UnparsedUnitTest, cls).validate(data) + if data.get("versions", None): + if data["versions"].get("include") and data["versions"].get("exclude"): + raise ValidationError("Unit tests can not both include and exclude versions.") diff --git a/core/dbt/contracts/project.py b/core/dbt/contracts/project.py index d9bd0c6fb89..041d310cc5e 100644 --- a/core/dbt/contracts/project.py +++ b/core/dbt/contracts/project.py @@ -1,17 +1,20 @@ -from dbt.contracts.util import Replaceable, Mergeable, list_str, Identifier -from dbt.contracts.connection import QueryComment, UserConfigContract -from dbt.helper_types import NoValue -from dbt.dataclass_schema import ( - dbtClassMixin, - ValidationError, - HyphenatedDbtClassMixin, - ExtensibleDbtClassMixin, - register_pattern, -) from dataclasses import dataclass, field -from typing import Optional, List, Dict, Union, Any +from typing import Any, ClassVar, Dict, List, Optional, Union + +from mashumaro.jsonschema.annotations import Pattern from mashumaro.types import SerializableType +from typing_extensions import Annotated +from dbt.adapters.contracts.connection import QueryComment +from dbt.contracts.util import Identifier, list_str +from dbt_common.contracts.util import Mergeable +from dbt_common.dataclass_schema import ( + ExtensibleDbtClassMixin, + ValidationError, + dbtClassMixin, + dbtMashConfig, +) +from dbt_common.helper_types import NoValue DEFAULT_SEND_ANONYMOUS_USAGE_STATS = True @@ -25,12 +28,8 @@ def _deserialize(cls, value: str) -> "SemverString": return SemverString(value) -# this supports full semver, -# but also allows for 2 group version numbers, (allows '1.0'). -register_pattern( - SemverString, - r"^(0|[1-9]\d*)\.(0|[1-9]\d*)(\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?)?$", # noqa -) +# This supports full semver, but also allows for 2 group version numbers, (allows '1.0'). +sem_ver_pattern = r"^(0|[1-9]\d*)\.(0|[1-9]\d*)(\.(0|[1-9]\d*)(?:-((?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?)?$" @dataclass @@ -42,13 +41,14 @@ class Quoting(dbtClassMixin, Mergeable): @dataclass -class Package(Replaceable, HyphenatedDbtClassMixin): +class Package(dbtClassMixin): pass @dataclass class LocalPackage(Package): local: str + unrendered: Dict[str, Any] = field(default_factory=dict) # `float` also allows `int`, according to PEP484 (and jsonschema!) @@ -59,14 +59,16 @@ class LocalPackage(Package): class TarballPackage(Package): tarball: str name: str + unrendered: Dict[str, Any] = field(default_factory=dict) @dataclass class GitPackage(Package): git: str revision: Optional[RawVersion] = None - warn_unpinned: Optional[bool] = None + warn_unpinned: Optional[bool] = field(default=None, metadata={"alias": "warn-unpinned"}) subdirectory: Optional[str] = None + unrendered: Dict[str, Any] = field(default_factory=dict) def get_revisions(self) -> List[str]: if self.revision is None: @@ -75,11 +77,22 @@ def get_revisions(self) -> List[str]: return [str(self.revision)] +@dataclass +class PrivatePackage(Package): + private: str + provider: Optional[str] = None + revision: Optional[RawVersion] = None + warn_unpinned: Optional[bool] = field(default=None, metadata={"alias": "warn-unpinned"}) + subdirectory: Optional[str] = None + unrendered: Dict[str, Any] = field(default_factory=dict) + + @dataclass class RegistryPackage(Package): package: str version: Union[RawVersion, List[RawVersion]] install_prerelease: Optional[bool] = False + unrendered: Dict[str, Any] = field(default_factory=dict) def get_versions(self) -> List[str]: if isinstance(self.version, list): @@ -88,23 +101,36 @@ def get_versions(self) -> List[str]: return [str(self.version)] -PackageSpec = Union[LocalPackage, TarballPackage, GitPackage, RegistryPackage] +PackageSpec = Union[LocalPackage, TarballPackage, GitPackage, RegistryPackage, PrivatePackage] @dataclass -class PackageConfig(dbtClassMixin, Replaceable): +class PackageConfig(dbtClassMixin): packages: List[PackageSpec] @classmethod def validate(cls, data): for package in data.get("packages", data): + # This can happen when the target is a variable that is not filled and results in hangs + if isinstance(package, dict): + if package.get("package") == "": + raise ValidationError( + "A hub package is missing the value. It is a required property." + ) + if package.get("local") == "": + raise ValidationError( + "A local package is missing the value. It is a required property." + ) + if package.get("git") == "": + raise ValidationError( + "A git package is missing the value. It is a required property." + ) if isinstance(package, dict) and package.get("package"): if not package["version"]: raise ValidationError( f"{package['package']} is missing the version. When installing from the Hub " "package index, version is a required property" ) - if "/" not in package["package"]: raise ValidationError( f"{package['package']} was not found in the package index. Packages on the index " @@ -124,7 +150,7 @@ def from_project(cls, project): @dataclass -class Downloads(ExtensibleDbtClassMixin, Replaceable): +class Downloads(ExtensibleDbtClassMixin): tarball: str @@ -182,15 +208,18 @@ class RegistryPackageMetadata( @dataclass -class Project(HyphenatedDbtClassMixin, Replaceable): - name: Identifier +class Project(dbtClassMixin): + _hyphenated: ClassVar[bool] = True + # Annotated is used by mashumaro for jsonschema generation + name: Annotated[Identifier, Pattern(r"^[^\d\W]\w*$")] config_version: Optional[int] = 2 - version: Optional[Union[SemverString, float]] = None + # Annotated is used by mashumaro for jsonschema generation + version: Optional[Union[Annotated[SemverString, Pattern(sem_ver_pattern)], float]] = None project_root: Optional[str] = None source_paths: Optional[List[str]] = None model_paths: Optional[List[str]] = None macro_paths: Optional[List[str]] = None - data_paths: Optional[List[str]] = None + data_paths: Optional[List[str]] = None # deprecated seed_paths: Optional[List[str]] = None test_paths: Optional[List[str]] = None analysis_paths: Optional[List[str]] = None @@ -212,8 +241,12 @@ class Project(HyphenatedDbtClassMixin, Replaceable): snapshots: Dict[str, Any] = field(default_factory=dict) analyses: Dict[str, Any] = field(default_factory=dict) sources: Dict[str, Any] = field(default_factory=dict) - tests: Dict[str, Any] = field(default_factory=dict) + tests: Dict[str, Any] = field(default_factory=dict) # deprecated + data_tests: Dict[str, Any] = field(default_factory=dict) + unit_tests: Dict[str, Any] = field(default_factory=dict) metrics: Dict[str, Any] = field(default_factory=dict) + semantic_models: Dict[str, Any] = field(default_factory=dict) + saved_queries: Dict[str, Any] = field(default_factory=dict) exposures: Dict[str, Any] = field(default_factory=dict) vars: Optional[Dict[str, Any]] = field( default=None, @@ -224,6 +257,37 @@ class Project(HyphenatedDbtClassMixin, Replaceable): packages: List[PackageSpec] = field(default_factory=list) query_comment: Optional[Union[QueryComment, NoValue, str]] = field(default_factory=NoValue) restrict_access: bool = False + dbt_cloud: Optional[Dict[str, Any]] = None + flags: Dict[str, Any] = field(default_factory=dict) + + class Config(dbtMashConfig): + # These tell mashumaro to use aliases for jsonschema and for "from_dict" + aliases = { + "config_version": "config-version", + "project_root": "project-root", + "source_paths": "source-paths", + "model_paths": "model-paths", + "macro_paths": "macro-paths", + "data_paths": "data-paths", + "seed_paths": "seed-paths", + "test_paths": "test-paths", + "analysis_paths": "analysis-paths", + "docs_paths": "docs-paths", + "asset_paths": "asset-paths", + "target_path": "target-path", + "snapshot_paths": "snapshot-paths", + "clean_targets": "clean-targets", + "log_path": "log-path", + "packages_install_path": "packages-install-path", + "on_run_start": "on-run-start", + "on_run_end": "on-run-end", + "require_dbt_version": "require-dbt-version", + "query_comment": "query-comment", + "restrict_access": "restrict-access", + "semantic_models": "semantic-models", + "saved_queries": "saved-queries", + "dbt_cloud": "dbt-cloud", + } @classmethod def validate(cls, data): @@ -240,10 +304,18 @@ def validate(cls, data): or not isinstance(entry["search_order"], list) ): raise ValidationError(f"Invalid project dispatch config: {entry}") + if "dbt_cloud" in data and not isinstance(data["dbt_cloud"], dict): + raise ValidationError( + f"Invalid dbt_cloud config. Expected a 'dict' but got '{type(data['dbt_cloud'])}'" + ) + if data.get("tests", None) and data.get("data_tests", None): + raise ValidationError( + "Invalid project config: cannot have both 'tests' and 'data_tests' defined" + ) @dataclass -class UserConfig(ExtensibleDbtClassMixin, Replaceable, UserConfigContract): +class ProjectFlags(ExtensibleDbtClassMixin): cache_selected_only: Optional[bool] = None debug: Optional[bool] = None fail_fast: Optional[bool] = None @@ -265,19 +337,31 @@ class UserConfig(ExtensibleDbtClassMixin, Replaceable, UserConfigContract): warn_error_options: Optional[Dict[str, Union[str, List[str]]]] = None write_json: Optional[bool] = None + # legacy behaviors + require_explicit_package_overrides_for_builtin_materializations: bool = True + require_resource_names_without_spaces: bool = False + source_freshness_run_project_hooks: bool = False + + @property + def project_only_flags(self) -> Dict[str, Any]: + return { + "require_explicit_package_overrides_for_builtin_materializations": self.require_explicit_package_overrides_for_builtin_materializations, + "require_resource_names_without_spaces": self.require_resource_names_without_spaces, + "source_freshness_run_project_hooks": self.source_freshness_run_project_hooks, + } + @dataclass -class ProfileConfig(HyphenatedDbtClassMixin, Replaceable): - profile_name: str = field(metadata={"preserve_underscore": True}) - target_name: str = field(metadata={"preserve_underscore": True}) - user_config: UserConfig = field(metadata={"preserve_underscore": True}) +class ProfileConfig(dbtClassMixin): + profile_name: str + target_name: str threads: int # TODO: make this a dynamic union of some kind? credentials: Optional[Dict[str, Any]] @dataclass -class ConfiguredQuoting(Quoting, Replaceable): +class ConfiguredQuoting(Quoting): identifier: bool = True schema: bool = True database: Optional[bool] = None diff --git a/core/dbt/contracts/relation.py b/core/dbt/contracts/relation.py deleted file mode 100644 index 2cf811f9f6c..00000000000 --- a/core/dbt/contracts/relation.py +++ /dev/null @@ -1,119 +0,0 @@ -from collections.abc import Mapping -from dataclasses import dataclass -from typing import ( - Optional, - Dict, -) -from typing_extensions import Protocol - -from dbt.dataclass_schema import dbtClassMixin, StrEnum - -from dbt.contracts.util import Replaceable -from dbt.exceptions import CompilationError, DataclassNotDictError -from dbt.utils import deep_merge - - -class RelationType(StrEnum): - Table = "table" - View = "view" - CTE = "cte" - MaterializedView = "materialized_view" - External = "external" - - -class ComponentName(StrEnum): - Database = "database" - Schema = "schema" - Identifier = "identifier" - - -class HasQuoting(Protocol): - quoting: Dict[str, bool] - - -class FakeAPIObject(dbtClassMixin, Replaceable, Mapping): - # override the mapping truthiness, len is always >1 - def __bool__(self): - return True - - def __getitem__(self, key): - try: - return getattr(self, key) - except AttributeError: - raise KeyError(key) from None - - def __iter__(self): - raise DataclassNotDictError(self) - - def __len__(self): - raise DataclassNotDictError(self) - - def incorporate(self, **kwargs): - value = self.to_dict(omit_none=True) - value = deep_merge(value, kwargs) - return self.from_dict(value) - - -@dataclass -class Policy(FakeAPIObject): - database: bool = True - schema: bool = True - identifier: bool = True - - def get_part(self, key: ComponentName) -> bool: - if key == ComponentName.Database: - return self.database - elif key == ComponentName.Schema: - return self.schema - elif key == ComponentName.Identifier: - return self.identifier - else: - raise ValueError( - "Got a key of {}, expected one of {}".format(key, list(ComponentName)) - ) - - def replace_dict(self, dct: Dict[ComponentName, bool]): - kwargs: Dict[str, bool] = {} - for k, v in dct.items(): - kwargs[str(k)] = v - return self.replace(**kwargs) - - -@dataclass -class Path(FakeAPIObject): - database: Optional[str] = None - schema: Optional[str] = None - identifier: Optional[str] = None - - def __post_init__(self): - # handle pesky jinja2.Undefined sneaking in here and messing up rende - if not isinstance(self.database, (type(None), str)): - raise CompilationError("Got an invalid path database: {}".format(self.database)) - if not isinstance(self.schema, (type(None), str)): - raise CompilationError("Got an invalid path schema: {}".format(self.schema)) - if not isinstance(self.identifier, (type(None), str)): - raise CompilationError("Got an invalid path identifier: {}".format(self.identifier)) - - def get_lowered_part(self, key: ComponentName) -> Optional[str]: - part = self.get_part(key) - if part is not None: - part = part.lower() - return part - - def get_part(self, key: ComponentName) -> Optional[str]: - if key == ComponentName.Database: - return self.database - elif key == ComponentName.Schema: - return self.schema - elif key == ComponentName.Identifier: - return self.identifier - else: - raise ValueError( - "Got a key of {}, expected one of {}".format(key, list(ComponentName)) - ) - - def replace_dict(self, dct: Dict[ComponentName, str]): - kwargs: Dict[str, str] = {} - for k, v in dct.items(): - kwargs[str(k)] = v - return self.replace(**kwargs) diff --git a/core/dbt/contracts/results.py b/core/dbt/contracts/results.py index aaa036e6a74..79a190087c4 100644 --- a/core/dbt/contracts/results.py +++ b/core/dbt/contracts/results.py @@ -1,501 +1,53 @@ -import threading +# flake8: noqa -from dbt.contracts.graph.unparsed import FreshnessThreshold -from dbt.contracts.graph.nodes import SourceDefinition, ResultNode -from dbt.contracts.util import ( - BaseArtifactMetadata, +# This file is temporary, in order to not break various adapter tests, etc, until +# they are updated to use the new locations. + +from dbt.artifacts.schemas.base import ( ArtifactMixin, + BaseArtifactMetadata, VersionedSchema, - Replaceable, schema_version, ) -from dbt.exceptions import DbtInternalError -from dbt.events.functions import fire_event -from dbt.events.types import TimingInfoCollected -from dbt.events.contextvars import get_node_info -from dbt.events.helpers import datetime_to_json_string -from dbt.logger import TimingProcessor -from dbt.utils import lowercase, cast_to_str, cast_to_int -from dbt.dataclass_schema import dbtClassMixin, StrEnum - -import agate - -from dataclasses import dataclass, field -from datetime import datetime -from typing import ( - Any, - Callable, - Dict, - List, - NamedTuple, - Optional, - Sequence, - Union, +from dbt.artifacts.schemas.catalog import ( + CatalogArtifact, + CatalogKey, + CatalogMetadata, + CatalogResults, + CatalogTable, + ColumnMetadata, + StatsItem, + TableMetadata, ) - -from dbt.clients.system import write_json - - -@dataclass -class TimingInfo(dbtClassMixin): - name: str - started_at: Optional[datetime] = None - completed_at: Optional[datetime] = None - - def begin(self): - self.started_at = datetime.utcnow() - - def end(self): - self.completed_at = datetime.utcnow() - - def to_msg_dict(self): - msg_dict = {"name": self.name} - if self.started_at: - msg_dict["started_at"] = datetime_to_json_string(self.started_at) - if self.completed_at: - msg_dict["completed_at"] = datetime_to_json_string(self.completed_at) - return msg_dict - - -# This is a context manager -class collect_timing_info: - def __init__(self, name: str, callback: Callable[[TimingInfo], None]): - self.timing_info = TimingInfo(name=name) - self.callback = callback - - def __enter__(self): - self.timing_info.begin() - - def __exit__(self, exc_type, exc_value, traceback): - self.timing_info.end() - self.callback(self.timing_info) - # Note: when legacy logger is removed, we can remove the following line - with TimingProcessor(self.timing_info): - fire_event( - TimingInfoCollected( - timing_info=self.timing_info.to_msg_dict(), node_info=get_node_info() - ) - ) - - -class RunningStatus(StrEnum): - Started = "started" - Compiling = "compiling" - Executing = "executing" - - -class NodeStatus(StrEnum): - Success = "success" - Error = "error" - Fail = "fail" - Warn = "warn" - Skipped = "skipped" - Pass = "pass" - RuntimeErr = "runtime error" - - -class RunStatus(StrEnum): - Success = NodeStatus.Success - Error = NodeStatus.Error - Skipped = NodeStatus.Skipped - - -class TestStatus(StrEnum): - __test__ = False - Pass = NodeStatus.Pass - Error = NodeStatus.Error - Fail = NodeStatus.Fail - Warn = NodeStatus.Warn - Skipped = NodeStatus.Skipped - - -class FreshnessStatus(StrEnum): - Pass = NodeStatus.Pass - Warn = NodeStatus.Warn - Error = NodeStatus.Error - RuntimeErr = NodeStatus.RuntimeErr - - -@dataclass -class BaseResult(dbtClassMixin): - status: Union[RunStatus, TestStatus, FreshnessStatus] - timing: List[TimingInfo] - thread_id: str - execution_time: float - adapter_response: Dict[str, Any] - message: Optional[str] - failures: Optional[int] - - @classmethod - def __pre_deserialize__(cls, data): - data = super().__pre_deserialize__(data) - if "message" not in data: - data["message"] = None - if "failures" not in data: - data["failures"] = None - return data - - def to_msg_dict(self): - msg_dict = { - "status": str(self.status), - "message": cast_to_str(self.message), - "thread": self.thread_id, - "execution_time": self.execution_time, - "num_failures": cast_to_int(self.failures), - "timing_info": [ti.to_msg_dict() for ti in self.timing], - "adapter_response": self.adapter_response, - } - return msg_dict - - -@dataclass -class NodeResult(BaseResult): - node: ResultNode - - -@dataclass -class RunResult(NodeResult): - agate_table: Optional[agate.Table] = field( - default=None, metadata={"serialize": lambda x: None, "deserialize": lambda x: None} - ) - - @property - def skipped(self): - return self.status == RunStatus.Skipped - - @classmethod - def from_node(cls, node: ResultNode, status: RunStatus, message: Optional[str]): - thread_id = threading.current_thread().name - return RunResult( - status=status, - thread_id=thread_id, - execution_time=0, - timing=[], - message=message, - node=node, - adapter_response={}, - failures=None, - ) - - -@dataclass -class ExecutionResult(dbtClassMixin): - results: Sequence[BaseResult] - elapsed_time: float - - def __len__(self): - return len(self.results) - - def __iter__(self): - return iter(self.results) - - def __getitem__(self, idx): - return self.results[idx] - - -@dataclass -class RunResultsMetadata(BaseArtifactMetadata): - dbt_schema_version: str = field( - default_factory=lambda: str(RunResultsArtifact.dbt_schema_version) - ) - - -@dataclass -class RunResultOutput(BaseResult): - unique_id: str - - -def process_run_result(result: RunResult) -> RunResultOutput: - return RunResultOutput( - unique_id=result.node.unique_id, - status=result.status, - timing=result.timing, - thread_id=result.thread_id, - execution_time=result.execution_time, - message=result.message, - adapter_response=result.adapter_response, - failures=result.failures, - ) - - -@dataclass -class RunExecutionResult( +from dbt.artifacts.schemas.freshness import ( + FreshnessErrorEnum, + FreshnessExecutionResultArtifact, + FreshnessMetadata, + FreshnessNodeOutput, + FreshnessNodeResult, + FreshnessResult, + PartialSourceFreshnessResult, + SourceFreshnessOutput, + SourceFreshnessResult, + SourceFreshnessRuntimeError, + process_freshness_result, +) +from dbt.artifacts.schemas.results import ( + BaseResult, ExecutionResult, -): - results: Sequence[RunResult] - args: Dict[str, Any] = field(default_factory=dict) - generated_at: datetime = field(default_factory=datetime.utcnow) - - def write(self, path: str): - writable = RunResultsArtifact.from_execution_results( - results=self.results, - elapsed_time=self.elapsed_time, - generated_at=self.generated_at, - args=self.args, - ) - writable.write(path) - - -@dataclass -@schema_version("run-results", 4) -class RunResultsArtifact(ExecutionResult, ArtifactMixin): - results: Sequence[RunResultOutput] - args: Dict[str, Any] = field(default_factory=dict) - - @classmethod - def from_execution_results( - cls, - results: Sequence[RunResult], - elapsed_time: float, - generated_at: datetime, - args: Dict, - ): - processed_results = [ - process_run_result(result) for result in results if isinstance(result, RunResult) - ] - meta = RunResultsMetadata( - dbt_schema_version=str(cls.dbt_schema_version), - generated_at=generated_at, - ) - return cls(metadata=meta, results=processed_results, elapsed_time=elapsed_time, args=args) - - def write(self, path: str): - write_json(path, self.to_dict(omit_none=False)) - - -# due to issues with typing.Union collapsing subclasses, this can't subclass -# PartialResult - - -@dataclass -class SourceFreshnessResult(NodeResult): - node: SourceDefinition - status: FreshnessStatus - max_loaded_at: datetime - snapshotted_at: datetime - age: float - - @property - def skipped(self): - return False - - -class FreshnessErrorEnum(StrEnum): - runtime_error = "runtime error" - - -@dataclass -class SourceFreshnessRuntimeError(dbtClassMixin): - unique_id: str - error: Optional[Union[str, int]] - status: FreshnessErrorEnum - - -@dataclass -class SourceFreshnessOutput(dbtClassMixin): - unique_id: str - max_loaded_at: datetime - snapshotted_at: datetime - max_loaded_at_time_ago_in_s: float - status: FreshnessStatus - criteria: FreshnessThreshold - adapter_response: Dict[str, Any] - timing: List[TimingInfo] - thread_id: str - execution_time: float - - -@dataclass -class PartialSourceFreshnessResult(NodeResult): - status: FreshnessStatus - - @property - def skipped(self): - return False - - -FreshnessNodeResult = Union[PartialSourceFreshnessResult, SourceFreshnessResult] -FreshnessNodeOutput = Union[SourceFreshnessRuntimeError, SourceFreshnessOutput] - - -def process_freshness_result(result: FreshnessNodeResult) -> FreshnessNodeOutput: - unique_id = result.node.unique_id - if result.status == FreshnessStatus.RuntimeErr: - return SourceFreshnessRuntimeError( - unique_id=unique_id, - error=result.message, - status=FreshnessErrorEnum.runtime_error, - ) - - # we know that this must be a SourceFreshnessResult - if not isinstance(result, SourceFreshnessResult): - raise DbtInternalError( - "Got {} instead of a SourceFreshnessResult for a " - "non-error result in freshness execution!".format(type(result)) - ) - # if we're here, we must have a non-None freshness threshold - criteria = result.node.freshness - if criteria is None: - raise DbtInternalError( - "Somehow evaluated a freshness result for a source that has no freshness criteria!" - ) - return SourceFreshnessOutput( - unique_id=unique_id, - max_loaded_at=result.max_loaded_at, - snapshotted_at=result.snapshotted_at, - max_loaded_at_time_ago_in_s=result.age, - status=result.status, - criteria=criteria, - adapter_response=result.adapter_response, - timing=result.timing, - thread_id=result.thread_id, - execution_time=result.execution_time, - ) - - -@dataclass -class FreshnessMetadata(BaseArtifactMetadata): - dbt_schema_version: str = field( - default_factory=lambda: str(FreshnessExecutionResultArtifact.dbt_schema_version) - ) - - -@dataclass -class FreshnessResult(ExecutionResult): - metadata: FreshnessMetadata - results: Sequence[FreshnessNodeResult] - - @classmethod - def from_node_results( - cls, - results: List[FreshnessNodeResult], - elapsed_time: float, - generated_at: datetime, - ): - meta = FreshnessMetadata(generated_at=generated_at) - return cls(metadata=meta, results=results, elapsed_time=elapsed_time) - - def write(self, path): - FreshnessExecutionResultArtifact.from_result(self).write(path) - - -@dataclass -@schema_version("sources", 3) -class FreshnessExecutionResultArtifact( - ArtifactMixin, - VersionedSchema, -): - metadata: FreshnessMetadata - results: Sequence[FreshnessNodeOutput] - elapsed_time: float - - @classmethod - def from_result(cls, base: FreshnessResult): - processed = [process_freshness_result(r) for r in base.results] - return cls( - metadata=base.metadata, - results=processed, - elapsed_time=base.elapsed_time, - ) - - -Primitive = Union[bool, str, float, None] -PrimitiveDict = Dict[str, Primitive] - -CatalogKey = NamedTuple( - "CatalogKey", [("database", Optional[str]), ("schema", str), ("name", str)] + FreshnessStatus, + NodeResult, + NodeStatus, + RunningStatus, + RunStatus, + TestStatus, + TimingInfo, + collect_timing_info, +) +from dbt.artifacts.schemas.run import ( + RunExecutionResult, + RunResult, + RunResultsArtifact, + RunResultsMetadata, + process_run_result, ) - - -@dataclass -class StatsItem(dbtClassMixin): - id: str - label: str - value: Primitive - include: bool - description: Optional[str] = None - - -StatsDict = Dict[str, StatsItem] - - -@dataclass -class ColumnMetadata(dbtClassMixin): - type: str - index: int - name: str - comment: Optional[str] = None - - -ColumnMap = Dict[str, ColumnMetadata] - - -@dataclass -class TableMetadata(dbtClassMixin): - type: str - schema: str - name: str - database: Optional[str] = None - comment: Optional[str] = None - owner: Optional[str] = None - - -@dataclass -class CatalogTable(dbtClassMixin, Replaceable): - metadata: TableMetadata - columns: ColumnMap - stats: StatsDict - # the same table with two unique IDs will just be listed two times - unique_id: Optional[str] = None - - def key(self) -> CatalogKey: - return CatalogKey( - lowercase(self.metadata.database), - self.metadata.schema.lower(), - self.metadata.name.lower(), - ) - - -@dataclass -class CatalogMetadata(BaseArtifactMetadata): - dbt_schema_version: str = field( - default_factory=lambda: str(CatalogArtifact.dbt_schema_version) - ) - - -@dataclass -class CatalogResults(dbtClassMixin): - nodes: Dict[str, CatalogTable] - sources: Dict[str, CatalogTable] - errors: Optional[List[str]] = None - _compile_results: Optional[Any] = None - - def __post_serialize__(self, dct): - dct = super().__post_serialize__(dct) - if "_compile_results" in dct: - del dct["_compile_results"] - return dct - - -@dataclass -@schema_version("catalog", 1) -class CatalogArtifact(CatalogResults, ArtifactMixin): - metadata: CatalogMetadata - - @classmethod - def from_results( - cls, - generated_at: datetime, - nodes: Dict[str, CatalogTable], - sources: Dict[str, CatalogTable], - compile_results: Optional[Any], - errors: Optional[List[str]], - ) -> "CatalogArtifact": - meta = CatalogMetadata(generated_at=generated_at) - return cls( - metadata=meta, - nodes=nodes, - sources=sources, - errors=errors, - _compile_results=compile_results, - ) diff --git a/core/dbt/contracts/selection.py b/core/dbt/contracts/selection.py index 4901cf045cd..0a4d39bede7 100644 --- a/core/dbt/contracts/selection.py +++ b/core/dbt/contracts/selection.py @@ -1,7 +1,7 @@ from dataclasses import dataclass -from dbt.dataclass_schema import dbtClassMixin +from typing import Any, Dict, List, Union -from typing import List, Dict, Any, Union +from dbt_common.dataclass_schema import dbtClassMixin @dataclass diff --git a/core/dbt/contracts/sql.py b/core/dbt/contracts/sql.py index b80304d2565..d7b9bcbd970 100644 --- a/core/dbt/contracts/sql.py +++ b/core/dbt/contracts/sql.py @@ -1,21 +1,13 @@ import uuid from dataclasses import dataclass, field from datetime import datetime -from typing import Optional, List, Any, Dict, Sequence - -from dbt.dataclass_schema import dbtClassMixin +from typing import Any, Dict, List, Optional, Sequence +from dbt.artifacts.schemas.base import VersionedSchema, schema_version +from dbt.artifacts.schemas.results import ExecutionResult, TimingInfo +from dbt.artifacts.schemas.run import RunExecutionResult, RunResult, RunResultsArtifact from dbt.contracts.graph.nodes import ResultNode -from dbt.contracts.results import ( - RunResult, - RunResultsArtifact, - TimingInfo, - ExecutionResult, - RunExecutionResult, -) -from dbt.contracts.util import VersionedSchema, schema_version -from dbt.logger import LogMessage - +from dbt_common.dataclass_schema import dbtClassMixin TaskTags = Optional[Dict[str, Any]] TaskID = uuid.UUID @@ -24,12 +16,7 @@ @dataclass -class RemoteResult(VersionedSchema): - logs: List[LogMessage] - - -@dataclass -class RemoteCompileResultMixin(RemoteResult): +class RemoteCompileResultMixin(VersionedSchema): raw_code: str compiled_code: str node: ResultNode @@ -42,18 +29,19 @@ class RemoteCompileResult(RemoteCompileResultMixin): generated_at: datetime = field(default_factory=datetime.utcnow) @property - def error(self): + def error(self) -> None: + # TODO: Can we delete this? It's never set anywhere else and never accessed return None @dataclass @schema_version("remote-execution-result", 1) -class RemoteExecutionResult(ExecutionResult, RemoteResult): +class RemoteExecutionResult(ExecutionResult): results: Sequence[RunResult] args: Dict[str, Any] = field(default_factory=dict) generated_at: datetime = field(default_factory=datetime.utcnow) - def write(self, path: str): + def write(self, path: str) -> None: writable = RunResultsArtifact.from_execution_results( generated_at=self.generated_at, results=self.results, @@ -66,14 +54,12 @@ def write(self, path: str): def from_local_result( cls, base: RunExecutionResult, - logs: List[LogMessage], ) -> "RemoteExecutionResult": return cls( generated_at=base.generated_at, results=base.results, elapsed_time=base.elapsed_time, args=base.args, - logs=logs, ) diff --git a/core/dbt/contracts/state.py b/core/dbt/contracts/state.py index bd9f389b602..d65fe4d9b4f 100644 --- a/core/dbt/contracts/state.py +++ b/core/dbt/contracts/state.py @@ -1,37 +1,50 @@ from pathlib import Path -from .graph.manifest import WritableManifest -from .results import RunResultsArtifact -from .results import FreshnessExecutionResultArtifact from typing import Optional -from dbt.exceptions import IncompatibleSchemaError + +from dbt.artifacts.exceptions import IncompatibleSchemaError +from dbt.artifacts.schemas.freshness import FreshnessExecutionResultArtifact +from dbt.artifacts.schemas.manifest import WritableManifest +from dbt.artifacts.schemas.run import RunResultsArtifact +from dbt.contracts.graph.manifest import Manifest +from dbt.events.types import WarnStateTargetEqual +from dbt_common.events.functions import fire_event + + +def load_result_state(results_path) -> Optional[RunResultsArtifact]: + if results_path.exists() and results_path.is_file(): + try: + return RunResultsArtifact.read_and_check_versions(str(results_path)) + except IncompatibleSchemaError as exc: + exc.add_filename(str(results_path)) + raise + return None class PreviousState: - def __init__(self, state_path: Path, target_path: Path, project_root: Path): + def __init__(self, state_path: Path, target_path: Path, project_root: Path) -> None: self.state_path: Path = state_path self.target_path: Path = target_path self.project_root: Path = project_root - self.manifest: Optional[WritableManifest] = None + self.manifest: Optional[Manifest] = None self.results: Optional[RunResultsArtifact] = None self.sources: Optional[FreshnessExecutionResultArtifact] = None self.sources_current: Optional[FreshnessExecutionResultArtifact] = None + if self.state_path == self.target_path: + fire_event(WarnStateTargetEqual(state_path=str(self.state_path))) + # Note: if state_path is absolute, project_root will be ignored. manifest_path = self.project_root / self.state_path / "manifest.json" if manifest_path.exists() and manifest_path.is_file(): try: - self.manifest = WritableManifest.read_and_check_versions(str(manifest_path)) + writable_manifest = WritableManifest.read_and_check_versions(str(manifest_path)) + self.manifest = Manifest.from_writable_manifest(writable_manifest) except IncompatibleSchemaError as exc: exc.add_filename(str(manifest_path)) raise results_path = self.project_root / self.state_path / "run_results.json" - if results_path.exists() and results_path.is_file(): - try: - self.results = RunResultsArtifact.read_and_check_versions(str(results_path)) - except IncompatibleSchemaError as exc: - exc.add_filename(str(results_path)) - raise + self.results = load_result_state(results_path) sources_path = self.project_root / self.state_path / "sources.json" if sources_path.exists() and sources_path.is_file(): diff --git a/core/dbt/contracts/util.py b/core/dbt/contracts/util.py index 20ee3fd660c..05157fa006f 100644 --- a/core/dbt/contracts/util.py +++ b/core/dbt/contracts/util.py @@ -1,24 +1,8 @@ -import dataclasses -from datetime import datetime -from typing import List, Tuple, ClassVar, Type, TypeVar, Dict, Any, Optional - -from dbt.clients.system import write_json, read_json -from dbt.exceptions import ( - DbtInternalError, - DbtRuntimeError, - IncompatibleSchemaError, -) -from dbt.version import __version__ - -from dbt.events.functions import get_invocation_id, get_metadata_vars -from dbt.dataclass_schema import dbtClassMixin - -from dbt.dataclass_schema import ( - ValidatedStringMixin, - ValidationError, - register_pattern, -) +from typing import Any, List, Tuple +# Leave imports of `Mergeable` to preserve import paths +from dbt_common.contracts.util import Mergeable # noqa:F401 +from dbt_common.dataclass_schema import ValidatedStringMixin, ValidationError SourceKey = Tuple[str, str] @@ -39,225 +23,14 @@ class Foo: return [] -class Replaceable: - def replace(self, **kwargs): - return dataclasses.replace(self, **kwargs) - - -class Mergeable(Replaceable): - def merged(self, *args): - """Perform a shallow merge, where the last non-None write wins. This is - intended to merge dataclasses that are a collection of optional values. - """ - replacements = {} - cls = type(self) - for arg in args: - for field in dataclasses.fields(cls): - value = getattr(arg, field.name) - if value is not None: - replacements[field.name] = value - - return self.replace(**replacements) - - -class Writable: - def write(self, path: str): - write_json(path, self.to_dict(omit_none=False)) # type: ignore - - -class AdditionalPropertiesMixin: - """Make this class an extensible property. +class Identifier(ValidatedStringMixin): + """Our definition of a valid Identifier is the same as what's valid for an unquoted database table name. - The underlying class definition must include a type definition for a field - named '_extra' that is of type `Dict[str, Any]`. + That is: + 1. It can contain a-z, A-Z, 0-9, and _ + 1. It cannot start with a number """ - ADDITIONAL_PROPERTIES = True - - # This takes attributes in the dictionary that are - # not in the class definitions and puts them in an - # _extra dict in the class - @classmethod - def __pre_deserialize__(cls, data): - # dir() did not work because fields with - # metadata settings are not found - # The original version of this would create the - # object first and then update extra with the - # extra keys, but that won't work here, so - # we're copying the dict so we don't insert the - # _extra in the original data. This also requires - # that Mashumaro actually build the '_extra' field - cls_keys = cls._get_field_names() - new_dict = {} - for key, value in data.items(): - if key not in cls_keys and key != "_extra": - if "_extra" not in new_dict: - new_dict["_extra"] = {} - new_dict["_extra"][key] = value - else: - new_dict[key] = value - data = new_dict - data = super().__pre_deserialize__(data) - return data - - def __post_serialize__(self, dct): - data = super().__post_serialize__(dct) - data.update(self.extra) - if "_extra" in data: - del data["_extra"] - return data - - def replace(self, **kwargs): - dct = self.to_dict(omit_none=False) - dct.update(kwargs) - return self.from_dict(dct) - - @property - def extra(self): - return self._extra - - -class Readable: - @classmethod - def read(cls, path: str): - try: - data = read_json(path) - except (EnvironmentError, ValueError) as exc: - raise DbtRuntimeError( - f'Could not read {cls.__name__} at "{path}" as JSON: {exc}' - ) from exc - - return cls.from_dict(data) # type: ignore - - -BASE_SCHEMAS_URL = "https://schemas.getdbt.com/" -SCHEMA_PATH = "dbt/{name}/v{version}.json" - - -@dataclasses.dataclass -class SchemaVersion: - name: str - version: int - - @property - def path(self) -> str: - return SCHEMA_PATH.format(name=self.name, version=self.version) - - def __str__(self) -> str: - return BASE_SCHEMAS_URL + self.path - - -# This is used in the ManifestMetadata, RunResultsMetadata, RunOperationResultMetadata, -# FreshnessMetadata, and CatalogMetadata classes -@dataclasses.dataclass -class BaseArtifactMetadata(dbtClassMixin): - dbt_schema_version: str - dbt_version: str = __version__ - generated_at: datetime = dataclasses.field(default_factory=datetime.utcnow) - invocation_id: Optional[str] = dataclasses.field(default_factory=get_invocation_id) - env: Dict[str, str] = dataclasses.field(default_factory=get_metadata_vars) - - def __post_serialize__(self, dct): - dct = super().__post_serialize__(dct) - if dct["generated_at"] and dct["generated_at"].endswith("+00:00"): - dct["generated_at"] = dct["generated_at"].replace("+00:00", "") + "Z" - return dct - - -# This is used as a class decorator to set the schema_version in the -# 'dbt_schema_version' class attribute. (It's copied into the metadata objects.) -# Name attributes of SchemaVersion in classes with the 'schema_version' decorator: -# manifest -# run-results -# run-operation-result -# sources -# catalog -# remote-compile-result -# remote-execution-result -# remote-run-result -def schema_version(name: str, version: int): - def inner(cls: Type[VersionedSchema]): - cls.dbt_schema_version = SchemaVersion( - name=name, - version=version, - ) - return cls - - return inner - - -# This is used in the ArtifactMixin and RemoteResult classes -@dataclasses.dataclass -class VersionedSchema(dbtClassMixin): - dbt_schema_version: ClassVar[SchemaVersion] - - @classmethod - def json_schema(cls, embeddable: bool = False) -> Dict[str, Any]: - result = super().json_schema(embeddable=embeddable) - if not embeddable: - result["$id"] = str(cls.dbt_schema_version) - return result - - @classmethod - def is_compatible_version(cls, schema_version): - compatible_versions = [str(cls.dbt_schema_version)] - if hasattr(cls, "compatible_previous_versions"): - for name, version in cls.compatible_previous_versions(): - compatible_versions.append(str(SchemaVersion(name, version))) - return str(schema_version) in compatible_versions - - @classmethod - def read_and_check_versions(cls, path: str): - try: - data = read_json(path) - except (EnvironmentError, ValueError) as exc: - raise DbtRuntimeError( - f'Could not read {cls.__name__} at "{path}" as JSON: {exc}' - ) from exc - - # Check metadata version. There is a class variable 'dbt_schema_version', but - # that doesn't show up in artifacts, where it only exists in the 'metadata' - # dictionary. - if hasattr(cls, "dbt_schema_version"): - if "metadata" in data and "dbt_schema_version" in data["metadata"]: - previous_schema_version = data["metadata"]["dbt_schema_version"] - # cls.dbt_schema_version is a SchemaVersion object - if not cls.is_compatible_version(previous_schema_version): - raise IncompatibleSchemaError( - expected=str(cls.dbt_schema_version), - found=previous_schema_version, - ) - - return cls.upgrade_schema_version(data) - - @classmethod - def upgrade_schema_version(cls, data): - """This will modify the data (dictionary) passed in to match the current - artifact schema code, if necessary. This is the default method, which - just returns the instantiated object via from_dict.""" - return cls.from_dict(data) - - -T = TypeVar("T", bound="ArtifactMixin") - - -# metadata should really be a Generic[T_M] where T_M is a TypeVar bound to -# BaseArtifactMetadata. Unfortunately this isn't possible due to a mypy issue: -# https://github.com/python/mypy/issues/7520 -# This is used in the WritableManifest, RunResultsArtifact, RunOperationResultsArtifact, -# and CatalogArtifact -@dataclasses.dataclass(init=False) -class ArtifactMixin(VersionedSchema, Writable, Readable): - metadata: BaseArtifactMetadata - - @classmethod - def validate(cls, data): - super().validate(data) - if cls.dbt_schema_version is None: - raise DbtInternalError("Cannot call from_dict with no schema version!") - - -class Identifier(ValidatedStringMixin): ValidationRegex = r"^[^\d\W]\w*$" @classmethod @@ -271,6 +44,3 @@ def is_valid(cls, value: Any) -> bool: return False return True - - -register_pattern(Identifier, r"^[^\d\W]\w*$") diff --git a/core/dbt/dataclass_schema.py b/core/dbt/dataclass_schema.py deleted file mode 100644 index 82a60ecdf64..00000000000 --- a/core/dbt/dataclass_schema.py +++ /dev/null @@ -1,167 +0,0 @@ -from typing import ( - Type, - ClassVar, - cast, -) -import re -from dataclasses import fields -from enum import Enum -from datetime import datetime -from dateutil.parser import parse - -from hologram import JsonSchemaMixin, FieldEncoder, ValidationError - -# type: ignore -from mashumaro import DataClassDictMixin -from mashumaro.config import TO_DICT_ADD_OMIT_NONE_FLAG, BaseConfig as MashBaseConfig -from mashumaro.types import SerializableType, SerializationStrategy - - -class DateTimeSerialization(SerializationStrategy): - def serialize(self, value): - out = value.isoformat() - # Assume UTC if timezone is missing - if value.tzinfo is None: - out += "Z" - return out - - def deserialize(self, value): - return value if isinstance(value, datetime) else parse(cast(str, value)) - - -# This class pulls in both JsonSchemaMixin from Hologram and -# DataClassDictMixin from our fork of Mashumaro. The 'to_dict' -# and 'from_dict' methods come from Mashumaro. Building -# jsonschemas for every class and the 'validate' method -# come from Hologram. -class dbtClassMixin(DataClassDictMixin, JsonSchemaMixin): - """The Mixin adds methods to generate a JSON schema and - convert to and from JSON encodable dicts with validation - against the schema - """ - - class Config(MashBaseConfig): - code_generation_options = [ - TO_DICT_ADD_OMIT_NONE_FLAG, - ] - serialization_strategy = { - datetime: DateTimeSerialization(), - } - - _hyphenated: ClassVar[bool] = False - ADDITIONAL_PROPERTIES: ClassVar[bool] = False - - # This is called by the mashumaro to_dict in order to handle - # nested classes. - # Munges the dict that's returned. - def __post_serialize__(self, dct): - if self._hyphenated: - new_dict = {} - for key in dct: - if "_" in key: - new_key = key.replace("_", "-") - new_dict[new_key] = dct[key] - else: - new_dict[key] = dct[key] - dct = new_dict - - return dct - - # This is called by the mashumaro _from_dict method, before - # performing the conversion to a dict - @classmethod - def __pre_deserialize__(cls, data): - # `data` might not be a dict, e.g. for `query_comment`, which accepts - # a dict or a string; only snake-case for dict values. - if cls._hyphenated and isinstance(data, dict): - new_dict = {} - for key in data: - if "-" in key: - new_key = key.replace("-", "_") - new_dict[new_key] = data[key] - else: - new_dict[key] = data[key] - data = new_dict - return data - - # This is used in the hologram._encode_field method, which calls - # a 'to_dict' method which does not have the same parameters in - # hologram and in mashumaro. - def _local_to_dict(self, **kwargs): - args = {} - if "omit_none" in kwargs: - args["omit_none"] = kwargs["omit_none"] - return self.to_dict(**args) - - -class ValidatedStringMixin(str, SerializableType): - ValidationRegex = "" - - @classmethod - def _deserialize(cls, value: str) -> "ValidatedStringMixin": - cls.validate(value) - return ValidatedStringMixin(value) - - def _serialize(self) -> str: - return str(self) - - @classmethod - def validate(cls, value): - res = re.match(cls.ValidationRegex, value) - - if res is None: - raise ValidationError(f"Invalid value: {value}") # TODO - - -# These classes must be in this order or it doesn't work -class StrEnum(str, SerializableType, Enum): - def __str__(self): - return self.value - - # https://docs.python.org/3.6/library/enum.html#using-automatic-values - def _generate_next_value_(name, *_): - return name - - def _serialize(self) -> str: - return self.value - - @classmethod - def _deserialize(cls, value: str): - return cls(value) - - -class HyphenatedDbtClassMixin(dbtClassMixin): - # used by from_dict/to_dict - _hyphenated: ClassVar[bool] = True - - # used by jsonschema validation, _get_fields - @classmethod - def field_mapping(cls): - result = {} - for field in fields(cls): - skip = field.metadata.get("preserve_underscore") - if skip: - continue - - if "_" in field.name: - result[field.name] = field.name.replace("_", "-") - return result - - -class ExtensibleDbtClassMixin(dbtClassMixin): - ADDITIONAL_PROPERTIES = True - - -# This is used by Hologram in jsonschema validation -def register_pattern(base_type: Type, pattern: str) -> None: - """base_type should be a typing.NewType that should always have the given - regex pattern. That means that its underlying type ('__supertype__') had - better be a str! - """ - - class PatternEncoder(FieldEncoder): - @property - def json_schema(self): - return {"type": "string", "pattern": pattern} - - dbtClassMixin.register_field_encoders({base_type: PatternEncoder()}) diff --git a/core/dbt/deprecations.py b/core/dbt/deprecations.py index d69b166043d..b9c4ab87cfc 100644 --- a/core/dbt/deprecations.py +++ b/core/dbt/deprecations.py @@ -1,9 +1,9 @@ import abc -from typing import Optional, Set, List, Dict, ClassVar - -import dbt.exceptions +from typing import Callable, ClassVar, Dict, List, Optional, Set import dbt.tracking +from dbt.events import types as core_types +from dbt_common.events.functions import warn_or_error class DBTDeprecation: @@ -23,7 +23,7 @@ def track_deprecation_warn(self) -> None: @property def event(self) -> abc.ABCMeta: if self._event is not None: - module_path = dbt.events.types + module_path = core_types class_name = self._event try: @@ -36,7 +36,7 @@ def event(self) -> abc.ABCMeta: def show(self, *args, **kwargs) -> None: if self.name not in active_deprecations: event = self.event(**kwargs) - dbt.events.functions.warn_or_error(event) + warn_or_error(event) self.track_deprecation_warn() active_deprecations.add(self.name) @@ -51,6 +51,8 @@ class PackageInstallPathDeprecation(DBTDeprecation): _event = "PackageInstallPathDeprecation" +# deprecations with a pattern of `project-config-*` for the name are not hardcoded +# they are called programatically via the pattern below class ConfigSourcePathDeprecation(DBTDeprecation): _name = "project-config-source-paths" _event = "ConfigSourcePathDeprecation" @@ -61,6 +63,16 @@ class ConfigDataPathDeprecation(DBTDeprecation): _event = "ConfigDataPathDeprecation" +class ConfigLogPathDeprecation(DBTDeprecation): + _name = "project-config-log-path" + _event = "ConfigLogPathDeprecation" + + +class ConfigTargetPathDeprecation(DBTDeprecation): + _name = "project-config-target-path" + _event = "ConfigTargetPathDeprecation" + + def renamed_method(old_name: str, new_name: str): class AdapterDeprecationWarning(DBTDeprecation): _name = "adapter:{}".format(old_name) @@ -81,19 +93,29 @@ class ExposureNameDeprecation(DBTDeprecation): _event = "ExposureNameDeprecation" -class ConfigLogPathDeprecation(DBTDeprecation): - _name = "project-config-log-path" - _event = "ConfigLogPathDeprecation" +class CollectFreshnessReturnSignature(DBTDeprecation): + _name = "collect-freshness-return-signature" + _event = "CollectFreshnessReturnSignature" -class ConfigTargetPathDeprecation(DBTDeprecation): - _name = "project-config-target-path" - _event = "ConfigTargetPathDeprecation" +class ProjectFlagsMovedDeprecation(DBTDeprecation): + _name = "project-flags-moved" + _event = "ProjectFlagsMovedDeprecation" -class CollectFreshnessReturnSignature(DBTDeprecation): - _name = "collect-freshness-return-signature" - _event = "CollectFreshnessReturnSignature" +class PackageMaterializationOverrideDeprecation(DBTDeprecation): + _name = "package-materialization-override" + _event = "PackageMaterializationOverrideDeprecation" + + +class ResourceNamesWithSpacesDeprecation(DBTDeprecation): + _name = "resource-names-with-spaces" + _event = "ResourceNamesWithSpacesDeprecation" + + +class SourceFreshnessProjectHooksNotRun(DBTDeprecation): + _name = "source-freshness-project-hooks" + _event = "SourceFreshnessProjectHooksNotRun" def renamed_env_var(old_name: str, new_name: str): @@ -111,7 +133,7 @@ def cb(): return cb -def warn(name, *args, **kwargs): +def warn(name: str, *args, **kwargs) -> None: if name not in deprecations: # this should (hopefully) never happen raise RuntimeError("Error showing deprecation warning: {}".format(name)) @@ -119,6 +141,13 @@ def warn(name, *args, **kwargs): deprecations[name].show(*args, **kwargs) +def buffer(name: str, *args, **kwargs): + def show_callback(): + deprecations[name].show(*args, **kwargs) + + buffered_deprecations.append(show_callback) + + # these are globally available # since modules are only imported once, active_deprecations is a singleton @@ -129,15 +158,25 @@ def warn(name, *args, **kwargs): PackageInstallPathDeprecation(), ConfigSourcePathDeprecation(), ConfigDataPathDeprecation(), - MetricAttributesRenamed(), ExposureNameDeprecation(), ConfigLogPathDeprecation(), ConfigTargetPathDeprecation(), CollectFreshnessReturnSignature(), + ProjectFlagsMovedDeprecation(), + PackageMaterializationOverrideDeprecation(), + ResourceNamesWithSpacesDeprecation(), + SourceFreshnessProjectHooksNotRun(), ] deprecations: Dict[str, DBTDeprecation] = {d.name: d for d in deprecations_list} +buffered_deprecations: List[Callable] = [] + def reset_deprecations(): active_deprecations.clear() + + +def fire_buffered_deprecations(): + [dep_fn() for dep_fn in buffered_deprecations] + buffered_deprecations.clear() diff --git a/core/dbt/deps/README.md b/core/dbt/deps/README.md index 99c7fd6fb80..bf8d878e254 100644 --- a/core/dbt/deps/README.md +++ b/core/dbt/deps/README.md @@ -16,7 +16,7 @@ Defines the base classes of `PinnedPackage` and `UnpinnedPackage`. `downloads_directory` sets the directory packages will be downloaded to. -`_install` has retry logic if the download or untarring process hit exceptions (see `dbt.utils._connection_exception_retry`). +`_install` has retry logic if the download or untarring process hit exceptions (see `dbt_common.utils.connection_exception_retry`). ## `git.py` diff --git a/core/dbt/deps/base.py b/core/dbt/deps/base.py index f72878422aa..0d6dfaf20ed 100644 --- a/core/dbt/deps/base.py +++ b/core/dbt/deps/base.py @@ -1,16 +1,16 @@ import abc -import os import functools +import os import tempfile from contextlib import contextmanager from pathlib import Path -from typing import List, Optional, Generic, TypeVar +from typing import Dict, Generic, List, Optional, TypeVar -from dbt.clients import system from dbt.contracts.project import ProjectPackageMetadata -from dbt.events.functions import fire_event from dbt.events.types import DepsSetDownloadDirectory -from dbt.utils import _connection_exception_retry as connection_exception_retry +from dbt_common.clients import system +from dbt_common.events.functions import fire_event +from dbt_common.utils.connection import connection_exception_retry DOWNLOADS_PATH = None @@ -84,6 +84,10 @@ def install(self, project, renderer): def nice_version_name(self): raise NotImplementedError + @abc.abstractmethod + def to_dict(self) -> Dict[str, str]: + raise NotImplementedError + def fetch_metadata(self, project, renderer): if not self._cached_metadata: self._cached_metadata = self._fetch_metadata(project, renderer) @@ -122,7 +126,7 @@ def download_and_untar(self, download_url, tar_path, deps_path, package_name): download appears successful but the file did not make it through as expected (generally due to a github incident). Either way we want to retry downloading and untarring to see if we can get a success. Call this within - `_connection_exception_retry` + `connection_exception_retry` """ system.download(download_url, tar_path) diff --git a/core/dbt/deps/git.py b/core/dbt/deps/git.py index 31d83fa6cd4..c7f76423887 100644 --- a/core/dbt/deps/git.py +++ b/core/dbt/deps/git.py @@ -1,18 +1,22 @@ import os -from typing import List, Optional +from typing import Dict, List, Optional -from dbt.clients import git, system +from dbt.clients import git from dbt.config.project import PartialProject, Project from dbt.config.renderer import PackageRenderer -from dbt.contracts.project import ( - ProjectPackageMetadata, - GitPackage, -) +from dbt.contracts.project import GitPackage, ProjectPackageMetadata from dbt.deps.base import PinnedPackage, UnpinnedPackage, get_downloads_path -from dbt.exceptions import ExecutableError, MultipleVersionGitDepsError -from dbt.events.functions import fire_event, warn_or_error -from dbt.events.types import EnsureGitInstalled, DepsUnpinned +from dbt.events.types import DepsScrubbedPackageName, DepsUnpinned, EnsureGitInstalled +from dbt.exceptions import MultipleVersionGitDepsError from dbt.utils import md5 +from dbt_common.clients import system +from dbt_common.events.functions import ( + env_secrets, + fire_event, + scrub_secrets, + warn_or_error, +) +from dbt_common.exceptions import ExecutableError def md5sum(s: str): @@ -20,13 +24,20 @@ def md5sum(s: str): class GitPackageMixin: - def __init__(self, git: str) -> None: + def __init__( + self, + git: str, + git_unrendered: str, + subdirectory: Optional[str] = None, + ) -> None: super().__init__() self.git = git + self.git_unrendered = git_unrendered + self.subdirectory = subdirectory @property def name(self): - return self.git + return f"{self.git}/{self.subdirectory}" if self.subdirectory else self.git def source_type(self) -> str: return "git" @@ -36,15 +47,28 @@ class GitPinnedPackage(GitPackageMixin, PinnedPackage): def __init__( self, git: str, + git_unrendered: str, revision: str, warn_unpinned: bool = True, subdirectory: Optional[str] = None, ) -> None: - super().__init__(git) + super().__init__(git, git_unrendered, subdirectory) self.revision = revision self.warn_unpinned = warn_unpinned self.subdirectory = subdirectory - self._checkout_name = md5sum(self.git) + self._checkout_name = md5sum(self.name) + + def to_dict(self) -> Dict[str, str]: + git_scrubbed = scrub_secrets(self.git_unrendered, env_secrets()) + if self.git_unrendered != git_scrubbed: + warn_or_error(DepsScrubbedPackageName(package_name=git_scrubbed)) + ret = { + "git": git_scrubbed, + "revision": self.revision, + } + if self.subdirectory: + ret["subdirectory"] = self.subdirectory + return ret def get_version(self): return self.revision @@ -82,8 +106,13 @@ def _fetch_metadata( ) -> ProjectPackageMetadata: path = self._checkout() + # raise warning (or error) if this package is not pinned if (self.revision == "HEAD" or self.revision in ("main", "master")) and self.warn_unpinned: - warn_or_error(DepsUnpinned(git=self.git)) + warn_or_error(DepsUnpinned(revision=self.revision, git=self.git)) + + # now overwrite 'revision' with actual commit SHA + self.revision = git.get_current_sha(path) + partial = PartialProject.from_project_root(path) return partial.render_package_metadata(renderer) @@ -102,11 +131,12 @@ class GitUnpinnedPackage(GitPackageMixin, UnpinnedPackage[GitPinnedPackage]): def __init__( self, git: str, + git_unrendered: str, revisions: List[str], warn_unpinned: bool = True, subdirectory: Optional[str] = None, ) -> None: - super().__init__(git) + super().__init__(git, git_unrendered, subdirectory) self.revisions = revisions self.warn_unpinned = warn_unpinned self.subdirectory = subdirectory @@ -119,6 +149,7 @@ def from_contract(cls, contract: GitPackage) -> "GitUnpinnedPackage": warn_unpinned = contract.warn_unpinned is not False return cls( git=contract.git, + git_unrendered=(contract.unrendered.get("git") or contract.git), revisions=revisions, warn_unpinned=warn_unpinned, subdirectory=contract.subdirectory, @@ -129,13 +160,21 @@ def all_names(self) -> List[str]: other = self.git[:-4] else: other = self.git + ".git" - return [self.git, other] + + if self.subdirectory: + git_name = f"{self.git}/{self.subdirectory}" + other = f"{other}/{self.subdirectory}" + else: + git_name = self.git + + return [git_name, other] def incorporate(self, other: "GitUnpinnedPackage") -> "GitUnpinnedPackage": warn_unpinned = self.warn_unpinned and other.warn_unpinned return GitUnpinnedPackage( git=self.git, + git_unrendered=self.git_unrendered, revisions=self.revisions + other.revisions, warn_unpinned=warn_unpinned, subdirectory=self.subdirectory, @@ -146,10 +185,10 @@ def resolved(self) -> GitPinnedPackage: if len(requested) == 0: requested = {"HEAD"} elif len(requested) > 1: - raise MultipleVersionGitDepsError(self.git, requested) - + raise MultipleVersionGitDepsError(self.name, requested) return GitPinnedPackage( git=self.git, + git_unrendered=self.git_unrendered, revision=requested.pop(), warn_unpinned=self.warn_unpinned, subdirectory=self.subdirectory, diff --git a/core/dbt/deps/local.py b/core/dbt/deps/local.py index 93e2a3cc323..869ac0c3055 100644 --- a/core/dbt/deps/local.py +++ b/core/dbt/deps/local.py @@ -1,15 +1,13 @@ import shutil +from typing import Dict -from dbt.clients import system -from dbt.deps.base import PinnedPackage, UnpinnedPackage -from dbt.contracts.project import ( - ProjectPackageMetadata, - LocalPackage, -) -from dbt.events.functions import fire_event -from dbt.events.types import DepsCreatingLocalSymlink, DepsSymlinkNotAvailable from dbt.config.project import PartialProject, Project from dbt.config.renderer import PackageRenderer +from dbt.contracts.project import LocalPackage, ProjectPackageMetadata +from dbt.deps.base import PinnedPackage, UnpinnedPackage +from dbt.events.types import DepsCreatingLocalSymlink, DepsSymlinkNotAvailable +from dbt_common.clients import system +from dbt_common.events.functions import fire_event class LocalPackageMixin: @@ -29,6 +27,11 @@ class LocalPinnedPackage(LocalPackageMixin, PinnedPackage): def __init__(self, local: str) -> None: super().__init__(local) + def to_dict(self) -> Dict[str, str]: + return { + "local": self.local, + } + def get_version(self): return None @@ -51,19 +54,15 @@ def install(self, project, renderer): src_path = self.resolve_path(project) dest_path = self.get_installation_path(project, renderer) - can_create_symlink = system.supports_symlinks() - if system.path_exists(dest_path): if not system.path_is_symlink(dest_path): system.rmdir(dest_path) else: system.remove_file(dest_path) - - if can_create_symlink: + try: fire_event(DepsCreatingLocalSymlink()) system.make_symlink(src_path, dest_path) - - else: + except OSError: fire_event(DepsSymlinkNotAvailable()) shutil.copytree(src_path, dest_path) diff --git a/core/dbt/deps/registry.py b/core/dbt/deps/registry.py index 351a9985206..8943523e3f5 100644 --- a/core/dbt/deps/registry.py +++ b/core/dbt/deps/registry.py @@ -1,20 +1,17 @@ -from typing import List +from typing import Dict, List -from dbt import semver -from dbt.flags import get_flags -from dbt.version import get_installed_version from dbt.clients import registry -from dbt.contracts.project import ( - RegistryPackageMetadata, - RegistryPackage, -) +from dbt.contracts.project import RegistryPackage, RegistryPackageMetadata from dbt.deps.base import PinnedPackage, UnpinnedPackage from dbt.exceptions import ( DependencyError, PackageNotFoundError, PackageVersionNotFoundError, - VersionsNotCompatibleError, ) +from dbt.flags import get_flags +from dbt.version import get_installed_version +from dbt_common import semver +from dbt_common.exceptions import VersionsNotCompatibleError class RegistryPackageMixin: @@ -40,6 +37,12 @@ def __init__(self, package: str, version: str, version_latest: str) -> None: def name(self): return self.package + def to_dict(self) -> Dict[str, str]: + return { + "package": self.package, + "version": self.version, + } + def source_type(self): return "hub" diff --git a/core/dbt/deps/resolver.py b/core/dbt/deps/resolver.py index 52758f6bb5c..b4a0c60ef6c 100644 --- a/core/dbt/deps/resolver.py +++ b/core/dbt/deps/resolver.py @@ -1,29 +1,27 @@ from dataclasses import dataclass, field -from typing import Dict, List, NoReturn, Union, Type, Iterator, Set, Any - -from dbt.exceptions import ( - DuplicateDependencyToRootError, - DuplicateProjectDependencyError, - MismatchedDependencyTypeError, - DbtInternalError, -) +from typing import Any, Dict, Iterator, List, NoReturn, Set, Type from dbt.config import Project from dbt.config.renderer import PackageRenderer -from dbt.deps.base import BasePackage, PinnedPackage, UnpinnedPackage -from dbt.deps.local import LocalUnpinnedPackage -from dbt.deps.tarball import TarballUnpinnedPackage -from dbt.deps.git import GitUnpinnedPackage -from dbt.deps.registry import RegistryUnpinnedPackage - from dbt.contracts.project import ( - LocalPackage, - TarballPackage, GitPackage, + LocalPackage, + PackageSpec, + PrivatePackage, RegistryPackage, + TarballPackage, +) +from dbt.deps.base import BasePackage, PinnedPackage, UnpinnedPackage +from dbt.deps.git import GitUnpinnedPackage +from dbt.deps.local import LocalUnpinnedPackage +from dbt.deps.registry import RegistryUnpinnedPackage +from dbt.deps.tarball import TarballUnpinnedPackage +from dbt.exceptions import ( + DependencyError, + DuplicateDependencyToRootError, + DuplicateProjectDependencyError, + MismatchedDependencyTypeError, ) - -PackageContract = Union[LocalPackage, TarballPackage, GitPackage, RegistryPackage] @dataclass @@ -68,7 +66,7 @@ def incorporate(self, package: UnpinnedPackage): else: self.packages[key] = package - def update_from(self, src: List[PackageContract]) -> None: + def update_from(self, src: List[PackageSpec]) -> None: pkg: UnpinnedPackage for contract in src: if isinstance(contract, LocalPackage): @@ -77,16 +75,18 @@ def update_from(self, src: List[PackageContract]) -> None: pkg = TarballUnpinnedPackage.from_contract(contract) elif isinstance(contract, GitPackage): pkg = GitUnpinnedPackage.from_contract(contract) + elif isinstance(contract, PrivatePackage): + raise DependencyError( + f'Cannot resolve private package {contract.private} because git provider integration is missing. Please use a "git" package instead.' + ) elif isinstance(contract, RegistryPackage): pkg = RegistryUnpinnedPackage.from_contract(contract) else: - raise DbtInternalError("Invalid package type {}".format(type(contract))) + raise DependencyError("Invalid package type {}".format(type(contract))) self.incorporate(pkg) @classmethod - def from_contracts( - cls: Type["PackageListing"], src: List[PackageContract] - ) -> "PackageListing": + def from_contracts(cls: Type["PackageListing"], src: List[PackageSpec]) -> "PackageListing": self = cls({}) self.update_from(src) return self @@ -114,7 +114,7 @@ def _check_for_duplicate_project_names( def resolve_packages( - packages: List[PackageContract], + packages: List[PackageSpec], project: Project, cli_vars: Dict[str, Any], ) -> List[PinnedPackage]: @@ -135,3 +135,15 @@ def resolve_packages( resolved = final.resolved() _check_for_duplicate_project_names(resolved, project, renderer) return resolved + + +def resolve_lock_packages(packages: List[PackageSpec]) -> List[PinnedPackage]: + lock_packages = PackageListing.from_contracts(packages) + final = PackageListing() + + for package in lock_packages: + final.incorporate(package) + + resolved = final.resolved() + + return resolved diff --git a/core/dbt/deps/tarball.py b/core/dbt/deps/tarball.py index 16c9cb0a20d..d7874978e2b 100644 --- a/core/dbt/deps/tarball.py +++ b/core/dbt/deps/tarball.py @@ -1,11 +1,23 @@ -from dbt.contracts.project import RegistryPackageMetadata, TarballPackage -from dbt.deps.base import PinnedPackage, UnpinnedPackage +import functools +import os +from pathlib import Path +from typing import Dict + +from dbt.config.project import PartialProject +from dbt.contracts.project import TarballPackage +from dbt.deps.base import PinnedPackage, UnpinnedPackage, get_downloads_path +from dbt.events.types import DepsScrubbedPackageName +from dbt.exceptions import DependencyError, env_secrets, scrub_secrets +from dbt_common.clients import system +from dbt_common.events.functions import warn_or_error +from dbt_common.utils.connection import connection_exception_retry class TarballPackageMixin: - def __init__(self, tarball: str) -> None: + def __init__(self, tarball: str, tarball_unrendered: str) -> None: super().__init__() self.tarball = tarball + self.tarball_unrendered = tarball_unrendered @property def name(self): @@ -16,16 +28,26 @@ def source_type(self) -> str: class TarballPinnedPackage(TarballPackageMixin, PinnedPackage): - def __init__(self, tarball: str, package: str) -> None: - super().__init__(tarball) - # setup to recycle RegistryPinnedPackage fns + def __init__(self, tarball: str, tarball_unrendered: str, package: str) -> None: + super().__init__(tarball, tarball_unrendered) self.package = package self.version = "tarball" + self.tar_path = os.path.join(Path(get_downloads_path()), self.package) + self.untarred_path = f"{self.tar_path}_untarred" @property def name(self): return self.package + def to_dict(self) -> Dict[str, str]: + tarball_scrubbed = scrub_secrets(self.tarball_unrendered, env_secrets()) + if self.tarball_unrendered != tarball_scrubbed: + warn_or_error(DepsScrubbedPackageName(package_name=tarball_scrubbed)) + return { + "tarball": tarball_scrubbed, + "name": self.package, + } + def get_version(self): return self.version @@ -33,42 +55,66 @@ def nice_version_name(self): return f"tarball (url: {self.tarball})" def _fetch_metadata(self, project, renderer): - """ - recycle RegistryPackageMetadata so that we can use the install and - download_and_untar from RegistryPinnedPackage next. - build RegistryPackageMetadata from info passed via packages.yml since no - 'metadata' service exists in this case. - """ - - dct = { - "name": self.package, - "packages": [], # note: required by RegistryPackageMetadata - "downloads": {"tarball": self.tarball}, - } - - return RegistryPackageMetadata.from_dict(dct) + """Download and untar the project and parse metadata from the project folder.""" + download_untar_fn = functools.partial( + self.download_and_untar, self.tarball, self.tar_path, self.untarred_path, self.name + ) + connection_exception_retry(download_untar_fn, 5) + + tar_contents = os.listdir(self.untarred_path) + if len(tar_contents) != 1: + raise DependencyError( + f"Incorrect structure for package extracted from {self.tarball}." + f"The extracted package needs to follow the structure {self.name}/<package_contents>." + ) + child_folder = os.listdir(self.untarred_path)[0] + + self.untarred_path = os.path.join(self.untarred_path, child_folder) + partial = PartialProject.from_project_root(self.untarred_path) + metadata = partial.render_package_metadata(renderer) + metadata.name = self.package if self.package else metadata.name + return metadata def install(self, project, renderer): - self._install(project, renderer) + download_untar_fn = functools.partial( + self.download_and_untar, self.tarball, self.tar_path, self.untarred_path, self.name + ) + connection_exception_retry(download_untar_fn, 5) + dest_path = self.get_installation_path(project, renderer) + if os.path.exists(dest_path): + if system.path_is_symlink(dest_path): + system.remove_file(dest_path) + else: + system.rmdir(dest_path) + system.move(self.untarred_path, dest_path) class TarballUnpinnedPackage(TarballPackageMixin, UnpinnedPackage[TarballPinnedPackage]): def __init__( self, tarball: str, + tarball_unrendered: str, package: str, ) -> None: - super().__init__(tarball) + super().__init__(tarball, tarball_unrendered) # setup to recycle RegistryPinnedPackage fns self.package = package self.version = "tarball" @classmethod def from_contract(cls, contract: TarballPackage) -> "TarballUnpinnedPackage": - return cls(tarball=contract.tarball, package=contract.name) + return cls( + tarball=contract.tarball, + tarball_unrendered=(contract.unrendered.get("tarball") or contract.tarball), + package=contract.name, + ) def incorporate(self, other: "TarballUnpinnedPackage") -> "TarballUnpinnedPackage": - return TarballUnpinnedPackage(tarball=self.tarball, package=self.package) + return TarballUnpinnedPackage( + tarball=self.tarball, tarball_unrendered=self.tarball_unrendered, package=self.package + ) def resolved(self) -> TarballPinnedPackage: - return TarballPinnedPackage(tarball=self.tarball, package=self.package) + return TarballPinnedPackage( + tarball=self.tarball, tarball_unrendered=self.tarball_unrendered, package=self.package + ) diff --git a/core/dbt/docs/source/_ext/dbt_click.py b/core/dbt/docs/source/_ext/dbt_click.py index 7343cc6a110..f51de96b7f5 100644 --- a/core/dbt/docs/source/_ext/dbt_click.py +++ b/core/dbt/docs/source/_ext/dbt_click.py @@ -1,11 +1,12 @@ +import traceback +import typing as t + import click import click.types as click_t -import dbt.cli.option_types as dbt_t from docutils import nodes from docutils.parsers.rst import Directive -import traceback -import typing as t +import dbt.cli.option_types as dbt_t PARAM_TYPE_MAP = { click_t.BoolParamType: lambda _: "boolean", diff --git a/core/dbt/docs/source/conf.py b/core/dbt/docs/source/conf.py index d9962bbfc8b..db6364a4266 100644 --- a/core/dbt/docs/source/conf.py +++ b/core/dbt/docs/source/conf.py @@ -1,5 +1,5 @@ -import sys import os +import sys import typing as t # Configuration file for the Sphinx documentation builder. diff --git a/core/dbt/events/README.md b/core/dbt/events/README.md index 34a056bbcf3..0f770a12c04 100644 --- a/core/dbt/events/README.md +++ b/core/dbt/events/README.md @@ -2,15 +2,15 @@ The Events module is responsible for communicating internal dbt structures into a consumable interface. Because the "event" classes are based entirely on protobuf definitions, the interface is really clearly defined, whether or not protobufs are used to consume it. We use Betterproto for compiling the protobuf message definitions into Python classes. # Using the Events Module -The event module provides types that represent what is happening in dbt in `events.types`. These types are intended to represent an exhaustive list of all things happening within dbt that will need to be logged, streamed, or printed. To fire an event, `events.functions::fire_event` is the entry point to the module from everywhere in dbt. +The event module provides types that represent what is happening in dbt in `events.types`. These types are intended to represent an exhaustive list of all things happening within dbt that will need to be logged, streamed, or printed. To fire an event, `common.events.functions::fire_event` is the entry point to the module from everywhere in dbt. # Logging When events are processed via `fire_event`, nearly everything is logged. Whether or not the user has enabled the debug flag, all debug messages are still logged to the file. However, some events are particularly time consuming to construct because they return a huge amount of data. Today, the only messages in this category are cache events and are only logged if the `--log-cache-events` flag is on. This is important because these messages should not be created unless they are going to be logged, because they cause a noticable performance degredation. These events use a "fire_event_if" functions. # Adding a New Event * Add a new message in types.proto, and a second message with the same name + "Msg". The "Msg" message should have two fields, an "info" field of EventInfo, and a "data" field referring to the message name without "Msg" -* run the protoc compiler to update types_pb2.py: make proto_types -* Add a wrapping class in core/dbt/event/types.py with a Level superclass plus code and message methods +* run the protoc compiler to update core_types_pb2.py: make core_proto_types +* Add a wrapping class in core/dbt/event/core_types.py with a Level superclass plus code and message methods * Add the class to tests/unit/test_events.py We have switched from using betterproto to using google protobuf, because of a lack of support for Struct fields in betterproto. @@ -35,21 +35,8 @@ class PartialParsingDeletedExposure(DebugLevel): ``` -# Adapter Maintainers -To integrate existing log messages from adapters, you likely have a line of code like this in your adapter already: -```python -from dbt.logger import GLOBAL_LOGGER as logger -``` - -Simply change it to these two lines with your adapter's database name, and all your existing call sites will now use the new system for v1.0: -```python -from dbt.events import AdapterLogger -logger = AdapterLogger("<database name>") -# e.g. AdapterLogger("Snowflake") -``` - -## Compiling types.proto +## Compiling core_types.proto After adding a new message in `types.proto`, either: -- In the repository root directory: `make proto_types` +- In the repository root directory: `make core_proto_types` - In the `core/dbt/events` directory: `protoc -I=. --python_out=. types.proto` diff --git a/core/dbt/events/__init__.py b/core/dbt/events/__init__.py index d0fc24d7bb5..123f242cae5 100644 --- a/core/dbt/events/__init__.py +++ b/core/dbt/events/__init__.py @@ -1 +1,15 @@ -from .adapter_endpoint import AdapterLogger # noqa: F401 +from typing import Any, Dict, Set + +import dbt.adapters.events.types as adapter_dbt_event_types +import dbt.events.types as core_dbt_event_types +import dbt_common.events.types as dbt_event_types + +ALL_EVENT_TYPES: Dict[str, Any] = { + **dbt_event_types.__dict__, + **core_dbt_event_types.__dict__, + **adapter_dbt_event_types.__dict__, +} + +ALL_EVENT_NAMES: Set[str] = set( + [name for name, cls in ALL_EVENT_TYPES.items() if isinstance(cls, type)] +) diff --git a/core/dbt/events/adapter_endpoint.py b/core/dbt/events/adapter_endpoint.py deleted file mode 100644 index 7e5cf0cd1d3..00000000000 --- a/core/dbt/events/adapter_endpoint.py +++ /dev/null @@ -1,58 +0,0 @@ -import traceback -from dataclasses import dataclass -from dbt.events.functions import fire_event -from dbt.events.contextvars import get_node_info -from dbt.events.types import ( - AdapterEventDebug, - AdapterEventInfo, - AdapterEventWarning, - AdapterEventError, -) - - -# N.B. No guarantees for what type param msg is. -@dataclass -class AdapterLogger: - name: str - - def debug(self, msg, *args): - event = AdapterEventDebug( - name=self.name, base_msg=str(msg), args=list(args), node_info=get_node_info() - ) - fire_event(event) - - def info(self, msg, *args): - event = AdapterEventInfo( - name=self.name, base_msg=str(msg), args=list(args), node_info=get_node_info() - ) - fire_event(event) - - def warning(self, msg, *args): - event = AdapterEventWarning( - name=self.name, base_msg=str(msg), args=list(args), node_info=get_node_info() - ) - fire_event(event) - - def error(self, msg, *args): - event = AdapterEventError( - name=self.name, base_msg=str(msg), args=list(args), node_info=get_node_info() - ) - fire_event(event) - - # The default exc_info=True is what makes this method different - def exception(self, msg, *args): - exc_info = str(traceback.format_exc()) - event = AdapterEventError( - name=self.name, - base_msg=str(msg), - args=list(args), - node_info=get_node_info(), - exc_info=exc_info, - ) - fire_event(event) - - def critical(self, msg, *args): - event = AdapterEventError( - name=self.name, base_msg=str(msg), args=list(args), node_info=get_node_info() - ) - fire_event(event) diff --git a/core/dbt/events/base_types.py b/core/dbt/events/base_types.py index 711a0bfad72..a3ae0610849 100644 --- a/core/dbt/events/base_types.py +++ b/core/dbt/events/base_types.py @@ -1,184 +1,37 @@ -from enum import Enum -import os -import threading -from dbt.events import types_pb2 -import sys -from google.protobuf.json_format import ParseDict, MessageToDict, MessageToJson -from google.protobuf.message import Message -from dbt.events.helpers import get_json_string_utcnow -from typing import Optional +# Aliasing common Level classes in order to make custom, but not overly-verbose versions that have PROTO_TYPES_MODULE set to the core-specific generated types_pb2 module +from dbt.events import core_types_pb2 +from dbt_common.events.base_types import BaseEvent +from dbt_common.events.base_types import DebugLevel as CommonDebugLevel +from dbt_common.events.base_types import DynamicLevel as CommonDyanicLevel +from dbt_common.events.base_types import ErrorLevel as CommonErrorLevel +from dbt_common.events.base_types import InfoLevel as CommonInfoLevel +from dbt_common.events.base_types import TestLevel as CommonTestLevel +from dbt_common.events.base_types import WarnLevel as CommonWarnLevel -if sys.version_info >= (3, 8): - from typing import Protocol -else: - from typing_extensions import Protocol +class CoreBaseEvent(BaseEvent): + PROTO_TYPES_MODULE = core_types_pb2 -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -# These base types define the _required structure_ for the concrete event # -# types defined in types.py # -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # - -def get_global_metadata_vars() -> dict: - from dbt.events.functions import get_metadata_vars - - return get_metadata_vars() - - -def get_invocation_id() -> str: - from dbt.events.functions import get_invocation_id - - return get_invocation_id() - - -# exactly one pid per concrete event -def get_pid() -> int: - return os.getpid() - - -# in theory threads can change so we don't cache them. -def get_thread_name() -> str: - return threading.current_thread().name - - -# EventLevel is an Enum, but mixing in the 'str' type is suggested in the Python -# documentation, and provides support for json conversion, which fails otherwise. -class EventLevel(str, Enum): - DEBUG = "debug" - TEST = "test" - INFO = "info" - WARN = "warn" - ERROR = "error" - - -class BaseEvent: - """BaseEvent for proto message generated python events""" - - def __init__(self, *args, **kwargs): - class_name = type(self).__name__ - msg_cls = getattr(types_pb2, class_name) - if class_name == "Formatting" and len(args) > 0: - kwargs["msg"] = args[0] - args = () - assert ( - len(args) == 0 - ), f"[{class_name}] Don't use positional arguments when constructing logging events" - if "base_msg" in kwargs: - kwargs["base_msg"] = str(kwargs["base_msg"]) - if "msg" in kwargs: - kwargs["msg"] = str(kwargs["msg"]) - try: - self.pb_msg = ParseDict(kwargs, msg_cls()) - except Exception: - # Imports need to be here to avoid circular imports - from dbt.events.types import Note - from dbt.events.functions import fire_event - - error_msg = f"[{class_name}]: Unable to parse dict {kwargs}" - # If we're testing throw an error so that we notice failures - if "pytest" in sys.modules: - raise Exception(error_msg) - else: - fire_event(Note(msg=error_msg), level=EventLevel.WARN) - self.pb_msg = msg_cls() - - def __setattr__(self, key, value): - if key == "pb_msg": - super().__setattr__(key, value) - else: - super().__getattribute__("pb_msg").__setattr__(key, value) - - def __getattr__(self, key): - if key == "pb_msg": - return super().__getattribute__(key) - else: - return super().__getattribute__("pb_msg").__getattribute__(key) - - def to_dict(self): - return MessageToDict( - self.pb_msg, preserving_proto_field_name=True, including_default_value_fields=True - ) - - def to_json(self): - return MessageToJson( - self.pb_msg, preserving_proto_field_name=True, including_default_valud_fields=True - ) - - def level_tag(self) -> EventLevel: - return EventLevel.DEBUG - - def message(self) -> str: - raise Exception("message() not implemented for event") - - def code(self) -> str: - raise Exception("code() not implemented for event") - - -class EventInfo(Protocol): - level: str - name: str - ts: str - code: str - - -class EventMsg(Protocol): - info: EventInfo - data: Message - - -def msg_from_base_event(event: BaseEvent, level: Optional[EventLevel] = None): - - msg_class_name = f"{type(event).__name__}Msg" - msg_cls = getattr(types_pb2, msg_class_name) - - # level in EventInfo must be a string, not an EventLevel - msg_level: str = level.value if level else event.level_tag().value - assert msg_level is not None - event_info = { - "level": msg_level, - "msg": event.message(), - "invocation_id": get_invocation_id(), - "extra": get_global_metadata_vars(), - "ts": get_json_string_utcnow(), - "pid": get_pid(), - "thread": get_thread_name(), - "code": event.code(), - "name": type(event).__name__, - } - new_event = ParseDict({"info": event_info}, msg_cls()) - new_event.data.CopyFrom(event.pb_msg) - return new_event - - -# DynamicLevel requires that the level be supplied on the -# event construction call using the "info" function from functions.py -class DynamicLevel(BaseEvent): +class DynamicLevel(CommonDyanicLevel, CoreBaseEvent): pass -class TestLevel(BaseEvent): - __test__ = False - - def level_tag(self) -> EventLevel: - return EventLevel.TEST +class TestLevel(CommonTestLevel, CoreBaseEvent): + pass -class DebugLevel(BaseEvent): - def level_tag(self) -> EventLevel: - return EventLevel.DEBUG +class DebugLevel(CommonDebugLevel, CoreBaseEvent): + pass -class InfoLevel(BaseEvent): - def level_tag(self) -> EventLevel: - return EventLevel.INFO +class InfoLevel(CommonInfoLevel, CoreBaseEvent): + pass -class WarnLevel(BaseEvent): - def level_tag(self) -> EventLevel: - return EventLevel.WARN +class WarnLevel(CommonWarnLevel, CoreBaseEvent): + pass -class ErrorLevel(BaseEvent): - def level_tag(self) -> EventLevel: - return EventLevel.ERROR +class ErrorLevel(CommonErrorLevel, CoreBaseEvent): + pass diff --git a/core/dbt/events/contextvars.py b/core/dbt/events/contextvars.py deleted file mode 100644 index 5bdb78fe4e2..00000000000 --- a/core/dbt/events/contextvars.py +++ /dev/null @@ -1,114 +0,0 @@ -import contextlib -import contextvars - -from typing import Any, Generator, Mapping, Dict - - -LOG_PREFIX = "log_" -TASK_PREFIX = "task_" - -_context_vars: Dict[str, contextvars.ContextVar] = {} - - -def get_contextvars(prefix: str) -> Dict[str, Any]: - rv = {} - ctx = contextvars.copy_context() - - prefix_len = len(prefix) - for k in ctx: - if k.name.startswith(prefix) and ctx[k] is not Ellipsis: - rv[k.name[prefix_len:]] = ctx[k] - - return rv - - -def get_node_info(): - cvars = get_contextvars(LOG_PREFIX) - if "node_info" in cvars: - return cvars["node_info"] - else: - return {} - - -def get_project_root(): - cvars = get_contextvars(TASK_PREFIX) - if "project_root" in cvars: - return cvars["project_root"] - else: - return None - - -def clear_contextvars(prefix: str) -> None: - ctx = contextvars.copy_context() - for k in ctx: - if k.name.startswith(prefix): - k.set(Ellipsis) - - -def set_log_contextvars(**kwargs: Any) -> Mapping[str, contextvars.Token]: - return set_contextvars(LOG_PREFIX, **kwargs) - - -def set_task_contextvars(**kwargs: Any) -> Mapping[str, contextvars.Token]: - return set_contextvars(TASK_PREFIX, **kwargs) - - -# put keys and values into context. Returns the contextvar.Token mapping -# Save and pass to reset_contextvars -def set_contextvars(prefix: str, **kwargs: Any) -> Mapping[str, contextvars.Token]: - cvar_tokens = {} - for k, v in kwargs.items(): - log_key = f"{prefix}{k}" - try: - var = _context_vars[log_key] - except KeyError: - var = contextvars.ContextVar(log_key, default=Ellipsis) - _context_vars[log_key] = var - - cvar_tokens[k] = var.set(v) - - return cvar_tokens - - -# reset by Tokens -def reset_contextvars(prefix: str, **kwargs: contextvars.Token) -> None: - for k, v in kwargs.items(): - log_key = f"{prefix}{k}" - var = _context_vars[log_key] - var.reset(v) - - -# remove from contextvars -def unset_contextvars(prefix: str, *keys: str) -> None: - for k in keys: - if k in _context_vars: - log_key = f"{prefix}{k}" - _context_vars[log_key].set(Ellipsis) - - -# Context manager or decorator to set and unset the context vars -@contextlib.contextmanager -def log_contextvars(**kwargs: Any) -> Generator[None, None, None]: - context = get_contextvars(LOG_PREFIX) - saved = {k: context[k] for k in context.keys() & kwargs.keys()} - - set_contextvars(LOG_PREFIX, **kwargs) - try: - yield - finally: - unset_contextvars(LOG_PREFIX, *kwargs.keys()) - set_contextvars(LOG_PREFIX, **saved) - - -# Context manager for earlier in task.run -@contextlib.contextmanager -def task_contextvars(**kwargs: Any) -> Generator[None, None, None]: - context = get_contextvars(TASK_PREFIX) - saved = {k: context[k] for k in context.keys() & kwargs.keys()} - - set_contextvars(TASK_PREFIX, **kwargs) - try: - yield - finally: - unset_contextvars(TASK_PREFIX, *kwargs.keys()) - set_contextvars(TASK_PREFIX, **saved) diff --git a/core/dbt/events/types.proto b/core/dbt/events/core_types.proto similarity index 67% rename from core/dbt/events/types.proto rename to core/dbt/events/core_types.proto index 79f3be7cc81..ed748dc1e44 100644 --- a/core/dbt/events/types.proto +++ b/core/dbt/events/core_types.proto @@ -6,7 +6,7 @@ import "google/protobuf/timestamp.proto"; import "google/protobuf/struct.proto"; // Common event info -message EventInfo { +message CoreEventInfo { string name = 1; string code = 2; string msg = 3; @@ -19,13 +19,6 @@ message EventInfo { string category = 10; } -// TimingInfo -message TimingInfoMsg { - string name = 1; - google.protobuf.Timestamp started_at = 2; - google.protobuf.Timestamp completed_at = 3; -} - // NodeRelation message NodeRelation { string database = 10; @@ -48,6 +41,13 @@ message NodeInfo { NodeRelation node_relation = 10; } +// TimingInfoMsg +message TimingInfoMsg { + string name = 1; + google.protobuf.Timestamp started_at = 2; + google.protobuf.Timestamp completed_at = 3; +} + // RunResult message RunResultMsg { string status = 1; @@ -59,16 +59,25 @@ message RunResultMsg { int32 num_failures = 7; } -// ReferenceKey -message ReferenceKeyMsg { - string database = 1; - string schema = 2; - string identifier = 3; +//ColumnType +message ColumnType { + string column_name = 1; + string previous_column_type = 2; + string current_column_type = 3; +} + +// ColumnConstraint +message ColumnConstraint { + string column_name = 1; + string constraint_name = 2; + string constraint_type = 3; } -// GenericMessage, used for deserializing only -message GenericMessage { - EventInfo info = 1; +// ModelConstraint +message ModelConstraint { + string constraint_name = 1; + string constraint_type = 2; + repeated string columns = 3; } // A - Pre-project loading @@ -80,7 +89,7 @@ message MainReportVersion { } message MainReportVersionMsg { - EventInfo info = 1; + CoreEventInfo info = 1; MainReportVersion data = 2; } @@ -90,7 +99,7 @@ message MainReportArgs { } message MainReportArgsMsg { - EventInfo info = 1; + CoreEventInfo info = 1; MainReportArgs data = 2; } @@ -100,7 +109,7 @@ message MainTrackingUserState { } message MainTrackingUserStateMsg { - EventInfo info = 1; + CoreEventInfo info = 1; MainTrackingUserState data = 2; } @@ -111,7 +120,7 @@ message MergedFromState { } message MergedFromStateMsg { - EventInfo info = 1; + CoreEventInfo info = 1; MergedFromState data = 2; } @@ -122,7 +131,7 @@ message MissingProfileTarget { } message MissingProfileTargetMsg { - EventInfo info = 1; + CoreEventInfo info = 1; MissingProfileTarget data = 2; } @@ -134,7 +143,7 @@ message InvalidOptionYAML { } message InvalidOptionYAMLMsg { - EventInfo info = 1; + CoreEventInfo info = 1; InvalidOptionYAML data = 2; } @@ -144,7 +153,7 @@ message LogDbtProjectError { } message LogDbtProjectErrorMsg { - EventInfo info = 1; + CoreEventInfo info = 1; LogDbtProjectError data = 2; } @@ -157,7 +166,7 @@ message LogDbtProfileError { } message LogDbtProfileErrorMsg { - EventInfo info = 1; + CoreEventInfo info = 1; LogDbtProfileError data = 2; } @@ -169,7 +178,7 @@ message StarterProjectPath { } message StarterProjectPathMsg { - EventInfo info = 1; + CoreEventInfo info = 1; StarterProjectPath data = 2; } @@ -179,7 +188,7 @@ message ConfigFolderDirectory { } message ConfigFolderDirectoryMsg { - EventInfo info = 1; + CoreEventInfo info = 1; ConfigFolderDirectory data = 2; } @@ -189,7 +198,7 @@ message NoSampleProfileFound { } message NoSampleProfileFoundMsg { - EventInfo info = 1; + CoreEventInfo info = 1; NoSampleProfileFound data = 2; } @@ -200,7 +209,7 @@ message ProfileWrittenWithSample { } message ProfileWrittenWithSampleMsg { - EventInfo info = 1; + CoreEventInfo info = 1; ProfileWrittenWithSample data = 2; } @@ -211,7 +220,7 @@ message ProfileWrittenWithTargetTemplateYAML { } message ProfileWrittenWithTargetTemplateYAMLMsg { - EventInfo info = 1; + CoreEventInfo info = 1; ProfileWrittenWithTargetTemplateYAML data = 2; } @@ -222,7 +231,7 @@ message ProfileWrittenWithProjectTemplateYAML { } message ProfileWrittenWithProjectTemplateYAMLMsg { - EventInfo info = 1; + CoreEventInfo info = 1; ProfileWrittenWithProjectTemplateYAML data = 2; } @@ -231,7 +240,7 @@ message SettingUpProfile { } message SettingUpProfileMsg { - EventInfo info = 1; + CoreEventInfo info = 1; SettingUpProfile data = 2; } @@ -240,7 +249,7 @@ message InvalidProfileTemplateYAML { } message InvalidProfileTemplateYAMLMsg { - EventInfo info = 1; + CoreEventInfo info = 1; InvalidProfileTemplateYAML data = 2; } @@ -250,7 +259,7 @@ message ProjectNameAlreadyExists { } message ProjectNameAlreadyExistsMsg { - EventInfo info = 1; + CoreEventInfo info = 1; ProjectNameAlreadyExists data = 2; } @@ -262,7 +271,7 @@ message ProjectCreated { } message ProjectCreatedMsg { - EventInfo info = 1; + CoreEventInfo info = 1; ProjectCreated data = 2; } @@ -275,7 +284,7 @@ message PackageRedirectDeprecation { } message PackageRedirectDeprecationMsg { - EventInfo info = 1; + CoreEventInfo info = 1; PackageRedirectDeprecation data = 2; } @@ -284,7 +293,7 @@ message PackageInstallPathDeprecation { } message PackageInstallPathDeprecationMsg { - EventInfo info = 1; + CoreEventInfo info = 1; PackageInstallPathDeprecation data = 2; } @@ -295,7 +304,7 @@ message ConfigSourcePathDeprecation { } message ConfigSourcePathDeprecationMsg { - EventInfo info = 1; + CoreEventInfo info = 1; ConfigSourcePathDeprecation data = 2; } @@ -306,28 +315,17 @@ message ConfigDataPathDeprecation { } message ConfigDataPathDeprecationMsg { - EventInfo info = 1; + CoreEventInfo info = 1; ConfigDataPathDeprecation data = 2; } -// D005 -message AdapterDeprecationWarning { - string old_name = 1; - string new_name = 2; -} - -message AdapterDeprecationWarningMsg { - EventInfo info = 1; - AdapterDeprecationWarning data = 2; -} - // D006 message MetricAttributesRenamed { string metric_name = 1; } message MetricAttributesRenamedMsg { - EventInfo info = 1; + CoreEventInfo info = 1; MetricAttributesRenamed data = 2; } @@ -337,7 +335,7 @@ message ExposureNameDeprecation { } message ExposureNameDeprecationMsg { - EventInfo info = 1; + CoreEventInfo info = 1; ExposureNameDeprecation data = 2; } @@ -350,7 +348,7 @@ message InternalDeprecation { } message InternalDeprecationMsg { - EventInfo info = 1; + CoreEventInfo info = 1; InternalDeprecation data = 2; } @@ -361,7 +359,7 @@ message EnvironmentVariableRenamed { } message EnvironmentVariableRenamedMsg { - EventInfo info = 1; + CoreEventInfo info = 1; EnvironmentVariableRenamed data = 2; } @@ -371,7 +369,7 @@ message ConfigLogPathDeprecation { } message ConfigLogPathDeprecationMsg { - EventInfo info = 1; + CoreEventInfo info = 1; ConfigLogPathDeprecation data = 2; } @@ -381,464 +379,84 @@ message ConfigTargetPathDeprecation { } message ConfigTargetPathDeprecationMsg { - EventInfo info = 1; + CoreEventInfo info = 1; ConfigTargetPathDeprecation data = 2; } // D012 -message CollectFreshnessReturnSignature { -} - -message CollectFreshnessReturnSignatureMsg { - EventInfo info = 1; - CollectFreshnessReturnSignature data = 2; -} - -// E - DB Adapter - -// E001 -message AdapterEventDebug { - NodeInfo node_info = 1; - string name = 2; - string base_msg = 3; - google.protobuf.ListValue args = 4; -} - -message AdapterEventDebugMsg { - EventInfo info = 1; - AdapterEventDebug data = 2; -} - -// E002 -message AdapterEventInfo { - NodeInfo node_info = 1; - string name = 2; - string base_msg = 3; - google.protobuf.ListValue args = 4; -} - -message AdapterEventInfoMsg { - EventInfo info = 1; - AdapterEventInfo data = 2; -} - -// E003 -message AdapterEventWarning { - NodeInfo node_info = 1; - string name = 2; - string base_msg = 3; - google.protobuf.ListValue args = 4; -} - -message AdapterEventWarningMsg { - EventInfo info = 1; - AdapterEventWarning data = 2; -} - -// E004 -message AdapterEventError { - NodeInfo node_info = 1; - string name = 2; - string base_msg = 3; - google.protobuf.ListValue args = 4; - string exc_info = 5; -} - -message AdapterEventErrorMsg { - EventInfo info = 1; - AdapterEventError data = 2; -} - -// E005 -message NewConnection { - NodeInfo node_info = 1; - string conn_type = 2; - string conn_name = 3; -} - -message NewConnectionMsg { - EventInfo info = 1; - NewConnection data = 2; -} - -// E006 -message ConnectionReused { - string conn_name = 1; - string orig_conn_name = 2; -} - -message ConnectionReusedMsg { - EventInfo info = 1; - ConnectionReused data = 2; -} - -// E007 -message ConnectionLeftOpenInCleanup { - string conn_name = 1; -} - -message ConnectionLeftOpenInCleanupMsg { - EventInfo info = 1; - ConnectionLeftOpenInCleanup data = 2; -} - -// E008 -message ConnectionClosedInCleanup { - string conn_name = 1; -} - -message ConnectionClosedInCleanupMsg { - EventInfo info = 1; - ConnectionClosedInCleanup data = 2; -} - -// E009 -message RollbackFailed { - NodeInfo node_info = 1; - string conn_name = 2; - string exc_info = 3; -} - -message RollbackFailedMsg { - EventInfo info = 1; - RollbackFailed data = 2; -} - -// E010 -message ConnectionClosed { - NodeInfo node_info = 1; - string conn_name = 2; -} - -message ConnectionClosedMsg { - EventInfo info = 1; - ConnectionClosed data = 2; -} - -// E011 -message ConnectionLeftOpen { - NodeInfo node_info = 1; - string conn_name = 2; -} - -message ConnectionLeftOpenMsg { - EventInfo info = 1; - ConnectionLeftOpen data = 2; -} - -// E012 -message Rollback { - NodeInfo node_info = 1; - string conn_name = 2; -} - -message RollbackMsg { - EventInfo info = 1; - Rollback data = 2; -} - -// E013 -message CacheMiss { - string conn_name = 1; - string database = 2; - string schema = 3; -} - -message CacheMissMsg { - EventInfo info = 1; - CacheMiss data = 2; -} - -// E014 -message ListRelations { - string database = 1; - string schema = 2; - repeated ReferenceKeyMsg relations = 3; -} - -message ListRelationsMsg { - EventInfo info = 1; - ListRelations data = 2; -} - -// E015 -message ConnectionUsed { - NodeInfo node_info = 1; - string conn_type = 2; - string conn_name = 3; -} - -message ConnectionUsedMsg { - EventInfo info = 1; - ConnectionUsed data = 2; -} - -// E016 -message SQLQuery { - NodeInfo node_info = 1; - string conn_name = 2; - string sql = 3; -} - -message SQLQueryMsg { - EventInfo info = 1; - SQLQuery data = 2; -} - -// E017 -message SQLQueryStatus { - NodeInfo node_info = 1; - string status = 2; - float elapsed = 3; -} - -message SQLQueryStatusMsg { - EventInfo info = 1; - SQLQueryStatus data = 2; -} - -// E018 -message SQLCommit { - NodeInfo node_info = 1; - string conn_name = 2; -} - -message SQLCommitMsg { - EventInfo info = 1; - SQLCommit data = 2; -} - -// E019 -message ColTypeChange { - string orig_type = 1; - string new_type = 2; - ReferenceKeyMsg table = 3; -} - -message ColTypeChangeMsg { - EventInfo info = 1; - ColTypeChange data = 2; -} - -// E020 -message SchemaCreation { - ReferenceKeyMsg relation = 1; -} - -message SchemaCreationMsg { - EventInfo info = 1; - SchemaCreation data = 2; -} - -// E021 -message SchemaDrop { - ReferenceKeyMsg relation = 1; -} - -message SchemaDropMsg { - EventInfo info = 1; - SchemaDrop data = 2; -} - -// E022 -message CacheAction { - string action = 1; - ReferenceKeyMsg ref_key = 2; - ReferenceKeyMsg ref_key_2 = 3; - ReferenceKeyMsg ref_key_3 = 4; - repeated ReferenceKeyMsg ref_list = 5; -} - -message CacheActionMsg { - EventInfo info = 1; - CacheAction data = 2; -} - -// Skipping E023, E024, E025, E026, E027, E028, E029, E0230 - -// E031 -message CacheDumpGraph { - map<string, string> dump = 1; - string before_after = 2; - string action = 3; -} - -message CacheDumpGraphMsg { - EventInfo info = 1; - CacheDumpGraph data = 2; -} - - -// Skipping E032, E033, E034 - - - -// E034 -message AdapterRegistered { - string adapter_name = 1; - string adapter_version = 2; -} - -message AdapterRegisteredMsg { - EventInfo info = 1; - AdapterRegistered data = 2; -} - -// E035 -message AdapterImportError { - string exc = 1; -} - -message AdapterImportErrorMsg { - EventInfo info = 1; - AdapterImportError data = 2; -} - -// E036 -message PluginLoadError { - string exc_info = 1; -} - -message PluginLoadErrorMsg { - EventInfo info = 1; - PluginLoadError data = 2; -} - -// E037 -message NewConnectionOpening { - NodeInfo node_info = 1; - string connection_state = 2; -} - -message NewConnectionOpeningMsg { - EventInfo info = 1; - NewConnectionOpening data = 2; -} - -// E038 -message CodeExecution { - string conn_name = 1; - string code_content = 2; -} - -message CodeExecutionMsg { - EventInfo info = 1; - CodeExecution data = 2; -} - -// E039 -message CodeExecutionStatus { - string status = 1; - float elapsed = 2; -} - -message CodeExecutionStatusMsg { - EventInfo info = 1; - CodeExecutionStatus data = 2; -} - -// E040 -message CatalogGenerationError { - string exc = 1; -} - -message CatalogGenerationErrorMsg { - EventInfo info = 1; - CatalogGenerationError data = 2; -} - -// E041 -message WriteCatalogFailure { - int32 num_exceptions = 1; -} - -message WriteCatalogFailureMsg { - EventInfo info = 1; - WriteCatalogFailure data = 2; -} - -// E042 -message CatalogWritten { - string path = 1; -} - -message CatalogWrittenMsg { - EventInfo info = 1; - CatalogWritten data = 2; +message TestsConfigDeprecation { + string deprecated_path = 1; + string exp_path = 2; } -// E043 -message CannotGenerateDocs { +message TestsConfigDeprecationMsg { + CoreEventInfo info = 1; + TestsConfigDeprecation data = 2; } -message CannotGenerateDocsMsg { - EventInfo info = 1; - CannotGenerateDocs data = 2; +// D013 +message ProjectFlagsMovedDeprecation { } -// E044 -message BuildingCatalog { +message ProjectFlagsMovedDeprecationMsg { + CoreEventInfo info = 1; + ProjectFlagsMovedDeprecation data = 2; } -message BuildingCatalogMsg { - EventInfo info = 1; - BuildingCatalog data = 2; +// D014 +message SpacesInResourceNameDeprecation { + string unique_id = 1; + string level = 2; } -// E045 -message DatabaseErrorRunningHook { - string hook_type = 1; +message SpacesInResourceNameDeprecationMsg { + CoreEventInfo info = 1; + SpacesInResourceNameDeprecation data = 2; } -message DatabaseErrorRunningHookMsg { - EventInfo info = 1; - DatabaseErrorRunningHook data = 2; +// D015 +message ResourceNamesWithSpacesDeprecation { + int32 count_invalid_names = 1; + bool show_debug_hint = 2; + string level = 3; } -// E046 -message HooksRunning { - int32 num_hooks = 1; - string hook_type = 2; +message ResourceNamesWithSpacesDeprecationMsg { + CoreEventInfo info = 1; + ResourceNamesWithSpacesDeprecation data = 2; } -message HooksRunningMsg { - EventInfo info = 1; - HooksRunning data = 2; +// D016 +message PackageMaterializationOverrideDeprecation { + string package_name = 1; + string materialization_name = 2; } -// E047 -message FinishedRunningStats { - string stat_line = 1; - string execution = 2; - float execution_time = 3; +message PackageMaterializationOverrideDeprecationMsg { + CoreEventInfo info = 1; + PackageMaterializationOverrideDeprecation data = 2; } -message FinishedRunningStatsMsg { - EventInfo info = 1; - FinishedRunningStats data = 2; -} +// D017 +message SourceFreshnessProjectHooksNotRun {} -// E048 -message ConstraintNotEnforced { - string constraint = 1; - string adapter = 2; +message SourceFreshnessProjectHooksNotRunMsg { + CoreEventInfo info = 1; + SourceFreshnessProjectHooksNotRun data = 2; } -message ConstraintNotEnforcedMsg { - EventInfo info = 1; - ConstraintNotEnforced data = 2; +// I065 +message DeprecatedModel { + string model_name = 1; + string model_version = 2; + string deprecation_date = 3; } -// E049 -message ConstraintNotSupported { - string constraint = 1; - string adapter = 2; +message DeprecatedModelMsg { + CoreEventInfo info = 1; + DeprecatedModel data = 2; } -message ConstraintNotSupportedMsg { - EventInfo info = 1; - ConstraintNotSupported data = 2; -} // I - Project parsing @@ -849,7 +467,7 @@ message InputFileDiffError { } message InputFileDiffErrorMsg { - EventInfo info = 1; + CoreEventInfo info = 1; InputFileDiffError data = 2; } @@ -862,7 +480,7 @@ message InvalidValueForField { } message InvalidValueForFieldMsg { - EventInfo info = 1; + CoreEventInfo info = 1; InvalidValueForField data = 2; } @@ -874,7 +492,7 @@ message ValidationWarning { } message ValidationWarningMsg { - EventInfo info = 1; + CoreEventInfo info = 1; ValidationWarning data = 2; } @@ -884,7 +502,7 @@ message ParsePerfInfoPath { } message ParsePerfInfoPathMsg { - EventInfo info = 1; + CoreEventInfo info = 1; ParsePerfInfoPath data = 2; } @@ -896,7 +514,7 @@ message PartialParsingErrorProcessingFile { } message PartialParsingErrorProcessingFileMsg { - EventInfo info = 1; + CoreEventInfo info = 1; PartialParsingErrorProcessingFile data = 2; } @@ -906,7 +524,7 @@ message PartialParsingError { } message PartialParsingErrorMsg { - EventInfo info = 1; + CoreEventInfo info = 1; PartialParsingError data = 2; } @@ -915,7 +533,7 @@ message PartialParsingSkipParsing { } message PartialParsingSkipParsingMsg { - EventInfo info = 1; + CoreEventInfo info = 1; PartialParsingSkipParsing data = 2; } @@ -927,7 +545,7 @@ message UnableToPartialParse { } message UnableToPartialParseMsg { - EventInfo info = 1; + CoreEventInfo info = 1; UnableToPartialParse data = 2; } @@ -941,7 +559,7 @@ message StateCheckVarsHash { } message StateCheckVarsHashMsg { - EventInfo info = 1; + CoreEventInfo info = 1; StateCheckVarsHash data = 2; } @@ -953,7 +571,7 @@ message PartialParsingNotEnabled { } message PartialParsingNotEnabledMsg { - EventInfo info = 1; + CoreEventInfo info = 1; PartialParsingNotEnabled data = 2; } @@ -965,7 +583,7 @@ message ParsedFileLoadFailed { } message ParsedFileLoadFailedMsg { - EventInfo info = 1; + CoreEventInfo info = 1; ParsedFileLoadFailed data = 2; } @@ -979,7 +597,7 @@ message PartialParsingEnabled { } message PartialParsingEnabledMsg { - EventInfo info = 1; + CoreEventInfo info = 1; PartialParsingEnabled data = 2; } @@ -990,7 +608,7 @@ message PartialParsingFile { } message PartialParsingFileMsg { - EventInfo info = 1; + CoreEventInfo info = 1; PartialParsingFile data = 2; } @@ -1007,7 +625,7 @@ message InvalidDisabledTargetInTestNode { } message InvalidDisabledTargetInTestNodeMsg { - EventInfo info = 1; + CoreEventInfo info = 1; InvalidDisabledTargetInTestNode data = 2; } @@ -1017,7 +635,7 @@ message UnusedResourceConfigPath { } message UnusedResourceConfigPathMsg { - EventInfo info = 1; + CoreEventInfo info = 1; UnusedResourceConfigPath data = 2; } @@ -1028,7 +646,7 @@ message SeedIncreased { } message SeedIncreasedMsg { - EventInfo info = 1; + CoreEventInfo info = 1; SeedIncreased data = 2; } @@ -1039,7 +657,7 @@ message SeedExceedsLimitSamePath { } message SeedExceedsLimitSamePathMsg { - EventInfo info = 1; + CoreEventInfo info = 1; SeedExceedsLimitSamePath data = 2; } @@ -1050,7 +668,7 @@ message SeedExceedsLimitAndPathChanged { } message SeedExceedsLimitAndPathChangedMsg { - EventInfo info = 1; + CoreEventInfo info = 1; SeedExceedsLimitAndPathChanged data = 2; } @@ -1062,7 +680,7 @@ message SeedExceedsLimitChecksumChanged { } message SeedExceedsLimitChecksumChangedMsg { - EventInfo info = 1; + CoreEventInfo info = 1; SeedExceedsLimitChecksumChanged data = 2; } @@ -1072,7 +690,7 @@ message UnusedTables { } message UnusedTablesMsg { - EventInfo info = 1; + CoreEventInfo info = 1; UnusedTables data = 2; } @@ -1086,7 +704,7 @@ message WrongResourceSchemaFile { } message WrongResourceSchemaFileMsg { - EventInfo info = 1; + CoreEventInfo info = 1; WrongResourceSchemaFile data = 2; } @@ -1098,7 +716,7 @@ message NoNodeForYamlKey { } message NoNodeForYamlKeyMsg { - EventInfo info = 1; + CoreEventInfo info = 1; NoNodeForYamlKey data = 2; } @@ -1108,7 +726,7 @@ message MacroNotFoundForPatch { } message MacroNotFoundForPatchMsg { - EventInfo info = 1; + CoreEventInfo info = 1; MacroNotFoundForPatch data = 2; } @@ -1124,7 +742,7 @@ message NodeNotFoundOrDisabled { } message NodeNotFoundOrDisabledMsg { - EventInfo info = 1; + CoreEventInfo info = 1; NodeNotFoundOrDisabled data = 2; } @@ -1135,7 +753,7 @@ message JinjaLogWarning { } message JinjaLogWarningMsg { - EventInfo info = 1; + CoreEventInfo info = 1; JinjaLogWarning data = 2; } @@ -1146,7 +764,7 @@ message JinjaLogInfo { } message JinjaLogInfoMsg { - EventInfo info = 1; + CoreEventInfo info = 1; JinjaLogInfo data = 2; } @@ -1157,7 +775,7 @@ message JinjaLogDebug { } message JinjaLogDebugMsg { - EventInfo info = 1; + CoreEventInfo info = 1; JinjaLogDebug data = 2; } @@ -1171,22 +789,10 @@ message UnpinnedRefNewVersionAvailable { } message UnpinnedRefNewVersionAvailableMsg { - EventInfo info = 1; + CoreEventInfo info = 1; UnpinnedRefNewVersionAvailable data = 2; } -// I065 -message DeprecatedModel { - string model_name = 1; - string model_version = 2; - string deprecation_date = 3; -} - -message DeprecatedModelMsg { - EventInfo info = 1; - DeprecatedModel data = 2; -} - // I066 message UpcomingReferenceDeprecation { string model_name = 1; @@ -1198,7 +804,7 @@ message UpcomingReferenceDeprecation { } message UpcomingReferenceDeprecationMsg { - EventInfo info = 1; + CoreEventInfo info = 1; UpcomingReferenceDeprecation data = 2; } @@ -1213,7 +819,7 @@ message DeprecatedReference { } message DeprecatedReferenceMsg { - EventInfo info = 1; + CoreEventInfo info = 1; DeprecatedReference data = 2; } @@ -1223,7 +829,7 @@ message UnsupportedConstraintMaterialization { } message UnsupportedConstraintMaterializationMsg { - EventInfo info = 1; + CoreEventInfo info = 1; UnsupportedConstraintMaterialization data = 2; } @@ -1234,7 +840,7 @@ message ParseInlineNodeError{ } message ParseInlineNodeErrorMsg { - EventInfo info = 1; + CoreEventInfo info = 1; ParseInlineNodeError data = 2; } @@ -1244,10 +850,52 @@ message SemanticValidationFailure { } message SemanticValidationFailureMsg { - EventInfo info = 1; + CoreEventInfo info = 1; SemanticValidationFailure data = 2; } +// I071 +message UnversionedBreakingChange { + repeated string breaking_changes = 1; + string model_name = 2; + string model_file_path = 3; + bool contract_enforced_disabled = 4; + repeated string columns_removed = 5; + repeated ColumnType column_type_changes = 6; + repeated ColumnConstraint enforced_column_constraint_removed = 7; + repeated ModelConstraint enforced_model_constraint_removed = 8; + repeated string materialization_changed = 9; +} + +message UnversionedBreakingChangeMsg { + CoreEventInfo info = 1; + UnversionedBreakingChange data = 2; +} + +// I072 +message WarnStateTargetEqual { + string state_path = 1; +} + +message WarnStateTargetEqualMsg { + CoreEventInfo info = 1; + WarnStateTargetEqual data = 2; +} + +// I073 +message FreshnessConfigProblem { + string msg = 1; +} + +message FreshnessConfigProblemMsg { + CoreEventInfo info = 1; + FreshnessConfigProblem data = 2; +} + + +// M - Deps generation + + // M - Deps generation @@ -1257,7 +905,7 @@ message GitSparseCheckoutSubdirectory { } message GitSparseCheckoutSubdirectoryMsg { - EventInfo info = 1; + CoreEventInfo info = 1; GitSparseCheckoutSubdirectory data = 2; } @@ -1267,7 +915,7 @@ message GitProgressCheckoutRevision { } message GitProgressCheckoutRevisionMsg { - EventInfo info = 1; + CoreEventInfo info = 1; GitProgressCheckoutRevision data = 2; } @@ -1277,7 +925,7 @@ message GitProgressUpdatingExistingDependency { } message GitProgressUpdatingExistingDependencyMsg { - EventInfo info = 1; + CoreEventInfo info = 1; GitProgressUpdatingExistingDependency data = 2; } @@ -1287,7 +935,7 @@ message GitProgressPullingNewDependency { } message GitProgressPullingNewDependencyMsg { - EventInfo info = 1; + CoreEventInfo info = 1; GitProgressPullingNewDependency data = 2; } @@ -1297,7 +945,7 @@ message GitNothingToDo { } message GitNothingToDoMsg { - EventInfo info = 1; + CoreEventInfo info = 1; GitNothingToDo data = 2; } @@ -1308,7 +956,7 @@ message GitProgressUpdatedCheckoutRange { } message GitProgressUpdatedCheckoutRangeMsg { - EventInfo info = 1; + CoreEventInfo info = 1; GitProgressUpdatedCheckoutRange data = 2; } @@ -1318,7 +966,7 @@ message GitProgressCheckedOutAt { } message GitProgressCheckedOutAtMsg { - EventInfo info = 1; + CoreEventInfo info = 1; GitProgressCheckedOutAt data = 2; } @@ -1328,7 +976,7 @@ message RegistryProgressGETRequest { } message RegistryProgressGETRequestMsg { - EventInfo info = 1; + CoreEventInfo info = 1; RegistryProgressGETRequest data = 2; } @@ -1339,7 +987,7 @@ message RegistryProgressGETResponse { } message RegistryProgressGETResponseMsg { - EventInfo info = 1; + CoreEventInfo info = 1; RegistryProgressGETResponse data = 2; } @@ -1351,7 +999,7 @@ message SelectorReportInvalidSelector { } message SelectorReportInvalidSelectorMsg { - EventInfo info = 1; + CoreEventInfo info = 1; SelectorReportInvalidSelector data = 2; } @@ -1362,7 +1010,7 @@ message DepsNoPackagesFound { } message DepsNoPackagesFoundMsg { - EventInfo info = 1; + CoreEventInfo info = 1; DepsNoPackagesFound data = 2; } @@ -1372,7 +1020,7 @@ message DepsStartPackageInstall { } message DepsStartPackageInstallMsg { - EventInfo info = 1; + CoreEventInfo info = 1; DepsStartPackageInstall data = 2; } @@ -1382,7 +1030,7 @@ message DepsInstallInfo { } message DepsInstallInfoMsg { - EventInfo info = 1; + CoreEventInfo info = 1; DepsInstallInfo data = 2; } @@ -1392,7 +1040,7 @@ message DepsUpdateAvailable { } message DepsUpdateAvailableMsg { - EventInfo info = 1; + CoreEventInfo info = 1; DepsUpdateAvailable data = 2; } @@ -1401,7 +1049,7 @@ message DepsUpToDate { } message DepsUpToDateMsg { - EventInfo info = 1; + CoreEventInfo info = 1; DepsUpToDate data = 2; } @@ -1411,7 +1059,7 @@ message DepsListSubdirectory { } message DepsListSubdirectoryMsg { - EventInfo info = 1; + CoreEventInfo info = 1; DepsListSubdirectory data = 2; } @@ -1421,38 +1069,17 @@ message DepsNotifyUpdatesAvailable { } message DepsNotifyUpdatesAvailableMsg { - EventInfo info = 1; + CoreEventInfo info = 1; DepsNotifyUpdatesAvailable data = 2; } -// M020 -message RetryExternalCall { - int32 attempt = 1; - int32 max = 2; -} - -message RetryExternalCallMsg { - EventInfo info = 1; - RetryExternalCall data = 2; -} - -// M021 -message RecordRetryException { - string exc = 1; -} - -message RecordRetryExceptionMsg { - EventInfo info = 1; - RecordRetryException data = 2; -} - // M022 message RegistryIndexProgressGETRequest { string url = 1; } message RegistryIndexProgressGETRequestMsg { - EventInfo info = 1; + CoreEventInfo info = 1; RegistryIndexProgressGETRequest data = 2; } @@ -1463,7 +1090,7 @@ message RegistryIndexProgressGETResponse { } message RegistryIndexProgressGETResponseMsg { - EventInfo info = 1; + CoreEventInfo info = 1; RegistryIndexProgressGETResponse data = 2; } @@ -1473,7 +1100,7 @@ message RegistryResponseUnexpectedType { } message RegistryResponseUnexpectedTypeMsg { - EventInfo info = 1; + CoreEventInfo info = 1; RegistryResponseUnexpectedType data = 2; } @@ -1483,7 +1110,7 @@ message RegistryResponseMissingTopKeys { } message RegistryResponseMissingTopKeysMsg { - EventInfo info = 1; + CoreEventInfo info = 1; RegistryResponseMissingTopKeys data = 2; } @@ -1493,17 +1120,17 @@ message RegistryResponseMissingNestedKeys { } message RegistryResponseMissingNestedKeysMsg { - EventInfo info = 1; + CoreEventInfo info = 1; RegistryResponseMissingNestedKeys data = 2; } -// m027 +// M027 message RegistryResponseExtraNestedKeys { string response = 1; } message RegistryResponseExtraNestedKeysMsg { - EventInfo info = 1; + CoreEventInfo info = 1; RegistryResponseExtraNestedKeys data = 2; } @@ -1513,7 +1140,7 @@ message DepsSetDownloadDirectory { } message DepsSetDownloadDirectoryMsg { - EventInfo info = 1; + CoreEventInfo info = 1; DepsSetDownloadDirectory data = 2; } @@ -1524,7 +1151,7 @@ message DepsUnpinned { } message DepsUnpinnedMsg { - EventInfo info = 1; + CoreEventInfo info = 1; DepsUnpinned data = 2; } @@ -1534,10 +1161,62 @@ message NoNodesForSelectionCriteria { } message NoNodesForSelectionCriteriaMsg { - EventInfo info = 1; + CoreEventInfo info = 1; NoNodesForSelectionCriteria data = 2; } +// M031 +message DepsLockUpdating{ + string lock_filepath = 1; +} + +message DepsLockUpdatingMsg{ + CoreEventInfo info = 1; + DepsLockUpdating data = 2; +} + +// M032 +message DepsAddPackage{ + string package_name = 1; + string version = 2; + string packages_filepath = 3; +} + +message DepsAddPackageMsg{ + CoreEventInfo info = 1; + DepsAddPackage data = 2; +} + +//M033 +message DepsFoundDuplicatePackage{ + map<string, string> removed_package = 1; +} + +message DepsFoundDuplicatePackageMsg{ + CoreEventInfo info = 1; + DepsFoundDuplicatePackage data = 2; +} + +//M034 +message DepsVersionMissing{ + string source = 1; +} + +message DepsVersionMissingMsg{ + CoreEventInfo info = 1; + DepsVersionMissing data = 2; +} + +//M035 +message DepsScrubbedPackageName{ + string package_name = 1; +} + +message DepsScrubbedPackageNameMsg{ + CoreEventInfo info = 1; + DepsScrubbedPackageName data = 2; +} + // Q - Node execution // Q001 @@ -1546,7 +1225,7 @@ message RunningOperationCaughtError { } message RunningOperationCaughtErrorMsg { - EventInfo info = 1; + CoreEventInfo info = 1; RunningOperationCaughtError data = 2; } @@ -1555,7 +1234,7 @@ message CompileComplete { } message CompileCompleteMsg { - EventInfo info = 1; + CoreEventInfo info = 1; CompileComplete data = 2; } @@ -1564,7 +1243,7 @@ message FreshnessCheckComplete { } message FreshnessCheckCompleteMsg { - EventInfo info = 1; + CoreEventInfo info = 1; FreshnessCheckComplete data = 2; } @@ -1574,7 +1253,7 @@ message SeedHeader { } message SeedHeaderMsg { - EventInfo info = 1; + CoreEventInfo info = 1; SeedHeader data = 2; } @@ -1584,10 +1263,11 @@ message SeedHeaderMsg { message SQLRunnerException { string exc = 1; string exc_info = 2; + NodeInfo node_info = 3; } message SQLRunnerExceptionMsg { - EventInfo info = 1; + CoreEventInfo info = 1; SQLRunnerException data = 2; } @@ -1603,7 +1283,7 @@ message LogTestResult { } message LogTestResultMsg { - EventInfo info = 1; + CoreEventInfo info = 1; LogTestResult data = 2; } @@ -1620,7 +1300,7 @@ message LogStartLine { } message LogStartLineMsg { - EventInfo info = 1; + CoreEventInfo info = 1; LogStartLine data = 2; } @@ -1635,7 +1315,7 @@ message LogModelResult { } message LogModelResultMsg { - EventInfo info = 1; + CoreEventInfo info = 1; LogModelResult data = 2; } @@ -1650,10 +1330,11 @@ message LogSnapshotResult { int32 total = 5; float execution_time = 6; map<string, string> cfg = 7; + string result_message = 8; } message LogSnapshotResultMsg { - EventInfo info = 1; + CoreEventInfo info = 1; LogSnapshotResult data = 2; } @@ -1670,7 +1351,7 @@ message LogSeedResult { } message LogSeedResultMsg { - EventInfo info = 1; + CoreEventInfo info = 1; LogSeedResult data = 2; } @@ -1688,12 +1369,27 @@ message LogFreshnessResult { } message LogFreshnessResultMsg { - EventInfo info = 1; + CoreEventInfo info = 1; LogFreshnessResult data = 2; } +// Q018 +message LogNodeNoOpResult { + NodeInfo node_info = 1; + string description = 2; + string status = 3; + int32 index = 4; + int32 total = 5; + float execution_time = 6; +} + +message LogNodeNoOpResultMsg { + CoreEventInfo info = 1; + LogNodeNoOpResult data = 2; +} -// Skipped Q019, Q020, Q021 + +// Skipped Q020, Q021 // Q022 @@ -1702,7 +1398,7 @@ message LogCancelLine { } message LogCancelLineMsg { - EventInfo info = 1; + CoreEventInfo info = 1; LogCancelLine data = 2; } @@ -1712,7 +1408,7 @@ message DefaultSelector { } message DefaultSelectorMsg { - EventInfo info = 1; + CoreEventInfo info = 1; DefaultSelector data = 2; } @@ -1722,7 +1418,7 @@ message NodeStart { } message NodeStartMsg { - EventInfo info = 1; + CoreEventInfo info = 1; NodeStart data = 2; } @@ -1733,7 +1429,7 @@ message NodeFinished { } message NodeFinishedMsg { - EventInfo info = 1; + CoreEventInfo info = 1; NodeFinished data = 2; } @@ -1743,7 +1439,7 @@ message QueryCancelationUnsupported { } message QueryCancelationUnsupportedMsg { - EventInfo info = 1; + CoreEventInfo info = 1; QueryCancelationUnsupported data = 2; } @@ -1755,7 +1451,7 @@ message ConcurrencyLine { } message ConcurrencyLineMsg { - EventInfo info = 1; + CoreEventInfo info = 1; ConcurrencyLine data = 2; } @@ -1765,7 +1461,7 @@ message WritingInjectedSQLForNode { } message WritingInjectedSQLForNodeMsg { - EventInfo info = 1; + CoreEventInfo info = 1; WritingInjectedSQLForNode data = 2; } @@ -1775,7 +1471,7 @@ message NodeCompiling { } message NodeCompilingMsg { - EventInfo info = 1; + CoreEventInfo info = 1; NodeCompiling data = 2; } @@ -1785,7 +1481,7 @@ message NodeExecuting { } message NodeExecutingMsg { - EventInfo info = 1; + CoreEventInfo info = 1; NodeExecuting data = 2; } @@ -1798,7 +1494,7 @@ message LogHookStartLine { } message LogHookStartLineMsg { - EventInfo info = 1; + CoreEventInfo info = 1; LogHookStartLine data = 2; } @@ -1813,7 +1509,7 @@ message LogHookEndLine { } message LogHookEndLineMsg { - EventInfo info = 1; + CoreEventInfo info = 1; LogHookEndLine data = 2; } @@ -1828,7 +1524,7 @@ message SkippingDetails { } message SkippingDetailsMsg { - EventInfo info = 1; + CoreEventInfo info = 1; SkippingDetails data = 2; } @@ -1837,7 +1533,7 @@ message NothingToDo { } message NothingToDoMsg { - EventInfo info = 1; + CoreEventInfo info = 1; NothingToDo data = 2; } @@ -1847,7 +1543,7 @@ message RunningOperationUncaughtError { } message RunningOperationUncaughtErrorMsg { - EventInfo info = 1; + CoreEventInfo info = 1; RunningOperationUncaughtError data = 2; } @@ -1860,7 +1556,7 @@ message EndRunResult { } message EndRunResultMsg { - EventInfo info = 1; + CoreEventInfo info = 1; EndRunResult data = 2; } @@ -1869,7 +1565,7 @@ message NoNodesSelected { } message NoNodesSelectedMsg { - EventInfo info = 1; + CoreEventInfo info = 1; NoNodesSelected data = 2; } @@ -1882,7 +1578,7 @@ message CommandCompleted { } message CommandCompletedMsg { - EventInfo info = 1; + CoreEventInfo info = 1; CommandCompleted data = 2; } @@ -1896,7 +1592,7 @@ message ShowNode { } message ShowNodeMsg { - EventInfo info = 1; + CoreEventInfo info = 1; ShowNode data = 2; } @@ -1910,10 +1606,21 @@ message CompiledNode { } message CompiledNodeMsg { - EventInfo info = 1; + CoreEventInfo info = 1; CompiledNode data = 2; } +// Q043 +message SnapshotTimestampWarning { + string snapshot_time_data_type = 1; + string updated_at_data_type = 2; +} + +message SnapshotTimestampWarningMsg { + CoreEventInfo info = 1; + SnapshotTimestampWarning data = 2; +} + // W - Node testing // Skipped W001 @@ -1926,7 +1633,7 @@ message CatchableExceptionOnRun { } message CatchableExceptionOnRunMsg { - EventInfo info = 1; + CoreEventInfo info = 1; CatchableExceptionOnRun data = 2; } @@ -1934,10 +1641,11 @@ message CatchableExceptionOnRunMsg { message InternalErrorOnRun { string build_path = 1; string exc = 2; + NodeInfo node_info = 3; } message InternalErrorOnRunMsg { - EventInfo info = 1; + CoreEventInfo info = 1; InternalErrorOnRun data = 2; } @@ -1946,10 +1654,11 @@ message GenericExceptionOnRun { string build_path = 1; string unique_id = 2; string exc = 3; + NodeInfo node_info = 4; } message GenericExceptionOnRunMsg { - EventInfo info = 1; + CoreEventInfo info = 1; GenericExceptionOnRun data = 2; } @@ -1961,7 +1670,7 @@ message NodeConnectionReleaseError { } message NodeConnectionReleaseErrorMsg { - EventInfo info = 1; + CoreEventInfo info = 1; NodeConnectionReleaseError data = 2; } @@ -1971,7 +1680,7 @@ message FoundStats { } message FoundStatsMsg { - EventInfo info = 1; + CoreEventInfo info = 1; FoundStats data = 2; } @@ -1982,7 +1691,7 @@ message MainKeyboardInterrupt { } message MainKeyboardInterruptMsg { - EventInfo info = 1; + CoreEventInfo info = 1; MainKeyboardInterrupt data = 2; } @@ -1992,7 +1701,7 @@ message MainEncounteredError { } message MainEncounteredErrorMsg { - EventInfo info = 1; + CoreEventInfo info = 1; MainEncounteredError data = 2; } @@ -2002,63 +1711,12 @@ message MainStackTrace { } message MainStackTraceMsg { - EventInfo info = 1; + CoreEventInfo info = 1; MainStackTrace data = 2; } // skipping Z004 -// Z005 -message SystemCouldNotWrite { - string path = 1; - string reason = 2; - string exc = 3; -} - -message SystemCouldNotWriteMsg { - EventInfo info = 1; - SystemCouldNotWrite data = 2; -} - -// Z006 -message SystemExecutingCmd { - repeated string cmd = 1; -} - -message SystemExecutingCmdMsg { - EventInfo info = 1; - SystemExecutingCmd data = 2; -} - -// Z007 -message SystemStdOut{ - string bmsg = 1; -} - -message SystemStdOutMsg { - EventInfo info = 1; - SystemStdOut data = 2; -} - -// Z008 -message SystemStdErr { - string bmsg = 1; -} - -message SystemStdErrMsg { - EventInfo info = 1; - SystemStdErr data = 2; -} - -// Z009 -message SystemReportReturnCode { - int32 returncode = 1; -} - -message SystemReportReturnCodeMsg { - EventInfo info = 1; - SystemReportReturnCode data = 2; -} // Z010 message TimingInfoCollected { @@ -2067,7 +1725,7 @@ message TimingInfoCollected { } message TimingInfoCollectedMsg { - EventInfo info = 1; + CoreEventInfo info = 1; TimingInfoCollected data = 2; } @@ -2077,7 +1735,7 @@ message LogDebugStackTrace { } message LogDebugStackTraceMsg { - EventInfo info = 1; + CoreEventInfo info = 1; LogDebugStackTrace data = 2; } @@ -2087,7 +1745,7 @@ message CheckCleanPath { } message CheckCleanPathMsg { - EventInfo info = 1; + CoreEventInfo info = 1; CheckCleanPath data = 2; } @@ -2097,7 +1755,7 @@ message ConfirmCleanPath { } message ConfirmCleanPathMsg { - EventInfo info = 1; + CoreEventInfo info = 1; ConfirmCleanPath data = 2; } @@ -2107,7 +1765,7 @@ message ProtectedCleanPath { } message ProtectedCleanPathMsg { - EventInfo info = 1; + CoreEventInfo info = 1; ProtectedCleanPath data = 2; } @@ -2116,7 +1774,7 @@ message FinishedCleanPaths { } message FinishedCleanPathsMsg { - EventInfo info = 1; + CoreEventInfo info = 1; FinishedCleanPaths data = 2; } @@ -2127,19 +1785,10 @@ message OpenCommand { } message OpenCommandMsg { - EventInfo info = 1; + CoreEventInfo info = 1; OpenCommand data = 2; } -// Z017 -message Formatting { - string msg = 1; -} - -message FormattingMsg { - EventInfo info = 1; - Formatting data = 2; -} // Z018 message ServingDocsPort { @@ -2148,7 +1797,7 @@ message ServingDocsPort { } message ServingDocsPortMsg { - EventInfo info = 1; + CoreEventInfo info = 1; ServingDocsPort data = 2; } @@ -2158,7 +1807,7 @@ message ServingDocsAccessInfo { } message ServingDocsAccessInfoMsg { - EventInfo info = 1; + CoreEventInfo info = 1; ServingDocsAccessInfo data = 2; } @@ -2167,19 +1816,27 @@ message ServingDocsExitInfo { } message ServingDocsExitInfoMsg { - EventInfo info = 1; + CoreEventInfo info = 1; ServingDocsExitInfo data = 2; } +message Group { + string name = 1; + string package_name = 3; + map<string, string> owner = 7; +} + // Z021 message RunResultWarning { string resource_type = 1; string node_name = 2; string path = 3; + NodeInfo node_info = 4; + Group group = 5; } message RunResultWarningMsg { - EventInfo info = 1; + CoreEventInfo info = 1; RunResultWarning data = 2; } @@ -2188,10 +1845,12 @@ message RunResultFailure { string resource_type = 1; string node_name = 2; string path = 3; + NodeInfo node_info = 4; + Group group = 5; } message RunResultFailureMsg { - EventInfo info = 1; + CoreEventInfo info = 1; RunResultFailure data = 2; } @@ -2201,69 +1860,56 @@ message StatsLine { } message StatsLineMsg { - EventInfo info = 1; + CoreEventInfo info = 1; StatsLine data = 2; } // Z024 message RunResultError { string msg = 1; + NodeInfo node_info = 2; + Group group = 3; } message RunResultErrorMsg { - EventInfo info = 1; + CoreEventInfo info = 1; RunResultError data = 2; } // Z025 message RunResultErrorNoMessage { string status = 1; + NodeInfo node_info = 2; } message RunResultErrorNoMessageMsg { - EventInfo info = 1; + CoreEventInfo info = 1; RunResultErrorNoMessage data = 2; } // Z026 message SQLCompiledPath { string path = 1; + NodeInfo node_info = 2; } message SQLCompiledPathMsg { - EventInfo info = 1; + CoreEventInfo info = 1; SQLCompiledPath data = 2; } // Z027 message CheckNodeTestFailure { string relation_name = 1; + NodeInfo node_info = 2; } message CheckNodeTestFailureMsg { - EventInfo info = 1; + CoreEventInfo info = 1; CheckNodeTestFailure data = 2; } -// Z028 -message FirstRunResultError { - string msg = 1; -} - -message FirstRunResultErrorMsg { - EventInfo info = 1; - FirstRunResultError data = 2; -} - -// Z029 -message AfterFirstRunResultError { - string msg = 1; -} - -message AfterFirstRunResultErrorMsg { - EventInfo info = 1; - AfterFirstRunResultError data = 2; -} +// Skipped Z028, Z029 // Z030 message EndOfRunSummary { @@ -2273,11 +1919,23 @@ message EndOfRunSummary { } message EndOfRunSummaryMsg { - EventInfo info = 1; + CoreEventInfo info = 1; EndOfRunSummary data = 2; } -// Skipped Z031, Z032, Z033 +// Skipped Z031, Z032 + +// Z033 +message MarkSkippedChildren { + string unique_id = 1; + string status = 2; + RunResultMsg run_result = 3; +} + +message MarkSkippedChildrenMsg { + CoreEventInfo info = 1; + MarkSkippedChildren data = 2; +} // Z034 message LogSkipBecauseError { @@ -2285,10 +1943,11 @@ message LogSkipBecauseError { string relation = 2; int32 index = 3; int32 total = 4; + string status = 5; } message LogSkipBecauseErrorMsg { - EventInfo info = 1; + CoreEventInfo info = 1; LogSkipBecauseError data = 2; } @@ -2297,7 +1956,7 @@ message EnsureGitInstalled { } message EnsureGitInstalledMsg { - EventInfo info = 1; + CoreEventInfo info = 1; EnsureGitInstalled data = 2; } @@ -2306,7 +1965,7 @@ message DepsCreatingLocalSymlink { } message DepsCreatingLocalSymlinkMsg { - EventInfo info = 1; + CoreEventInfo info = 1; DepsCreatingLocalSymlink data = 2; } @@ -2315,7 +1974,7 @@ message DepsSymlinkNotAvailable { } message DepsSymlinkNotAvailableMsg { - EventInfo info = 1; + CoreEventInfo info = 1; DepsSymlinkNotAvailable data = 2; } @@ -2324,7 +1983,7 @@ message DisableTracking { } message DisableTrackingMsg { - EventInfo info = 1; + CoreEventInfo info = 1; DisableTracking data = 2; } @@ -2334,7 +1993,7 @@ message SendingEvent { } message SendingEventMsg { - EventInfo info = 1; + CoreEventInfo info = 1; SendingEvent data = 2; } @@ -2343,7 +2002,7 @@ message SendEventFailure { } message SendEventFailureMsg { - EventInfo info = 1; + CoreEventInfo info = 1; SendEventFailure data = 2; } @@ -2352,7 +2011,7 @@ message FlushEvents { } message FlushEventsMsg { - EventInfo info = 1; + CoreEventInfo info = 1; FlushEvents data = 2; } @@ -2361,7 +2020,7 @@ message FlushEventsFailure { } message FlushEventsFailureMsg { - EventInfo info = 1; + CoreEventInfo info = 1; FlushEventsFailure data = 2; } @@ -2371,7 +2030,7 @@ message TrackingInitializeFailure { } message TrackingInitializeFailureMsg { - EventInfo info = 1; + CoreEventInfo info = 1; TrackingInitializeFailure data = 2; } @@ -2380,10 +2039,11 @@ message TrackingInitializeFailureMsg { // Z046 message RunResultWarningMessage { string msg = 1; + NodeInfo node_info = 2; } message RunResultWarningMessageMsg { - EventInfo info = 1; + CoreEventInfo info = 1; RunResultWarningMessage data = 2; } @@ -2393,7 +2053,7 @@ message DebugCmdOut { } message DebugCmdOutMsg { - EventInfo info = 1; + CoreEventInfo info = 1; DebugCmdOut data = 2; } @@ -2403,7 +2063,7 @@ message DebugCmdResult { } message DebugCmdResultMsg { - EventInfo info = 1; + CoreEventInfo info = 1; DebugCmdResult data = 2; } @@ -2413,16 +2073,28 @@ message ListCmdOut { } message ListCmdOutMsg { - EventInfo info = 1; + CoreEventInfo info = 1; ListCmdOut data = 2; } -// Z050 -message Note { - string msg = 1; + +// Z051 +message ResourceReport { + string command_name = 2; + bool command_success = 3; + float command_wall_clock_time = 4; + // The process_* metrics reflect the resource consumption of the process as + // a whole when the command completes. When dbt is being used as a library, + // these will reflect the resource consumption of the host process as a whole, + // rather than the resources used exclusively by the command. + float process_user_time = 5; + float process_kernel_time = 6; + int64 process_mem_max_rss = 7; + int64 process_in_blocks = 8; + int64 process_out_blocks = 9; } -message NoteMsg { - EventInfo info = 1; - Note data = 2; +message ResourceReportMsg { + CoreEventInfo info = 1; + ResourceReport data = 2; } diff --git a/core/dbt/events/core_types_pb2.py b/core/dbt/events/core_types_pb2.py new file mode 100644 index 00000000000..74c3befb20f --- /dev/null +++ b/core/dbt/events/core_types_pb2.py @@ -0,0 +1,776 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: core_types.proto +# Protobuf Python Version: 5.26.1 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x10\x63ore_types.proto\x12\x0bproto_types\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"\x99\x02\n\rCoreEventInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\x12\x0b\n\x03msg\x18\x03 \x01(\t\x12\r\n\x05level\x18\x04 \x01(\t\x12\x15\n\rinvocation_id\x18\x05 \x01(\t\x12\x0b\n\x03pid\x18\x06 \x01(\x05\x12\x0e\n\x06thread\x18\x07 \x01(\t\x12&\n\x02ts\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\x05\x65xtra\x18\t \x03(\x0b\x32%.proto_types.CoreEventInfo.ExtraEntry\x12\x10\n\x08\x63\x61tegory\x18\n \x01(\t\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"V\n\x0cNodeRelation\x12\x10\n\x08\x64\x61tabase\x18\n \x01(\t\x12\x0e\n\x06schema\x18\x0b \x01(\t\x12\r\n\x05\x61lias\x18\x0c \x01(\t\x12\x15\n\rrelation_name\x18\r \x01(\t\"\x91\x02\n\x08NodeInfo\x12\x11\n\tnode_path\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x11\n\tunique_id\x18\x03 \x01(\t\x12\x15\n\rresource_type\x18\x04 \x01(\t\x12\x14\n\x0cmaterialized\x18\x05 \x01(\t\x12\x13\n\x0bnode_status\x18\x06 \x01(\t\x12\x17\n\x0fnode_started_at\x18\x07 \x01(\t\x12\x18\n\x10node_finished_at\x18\x08 \x01(\t\x12%\n\x04meta\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x30\n\rnode_relation\x18\n \x01(\x0b\x32\x19.proto_types.NodeRelation\"\x7f\n\rTimingInfoMsg\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\nstarted_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x30\n\x0c\x63ompleted_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"\xd1\x01\n\x0cRunResultMsg\x12\x0e\n\x06status\x18\x01 \x01(\t\x12\x0f\n\x07message\x18\x02 \x01(\t\x12/\n\x0btiming_info\x18\x03 \x03(\x0b\x32\x1a.proto_types.TimingInfoMsg\x12\x0e\n\x06thread\x18\x04 \x01(\t\x12\x16\n\x0e\x65xecution_time\x18\x05 \x01(\x02\x12\x31\n\x10\x61\x64\x61pter_response\x18\x06 \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x14\n\x0cnum_failures\x18\x07 \x01(\x05\"\\\n\nColumnType\x12\x13\n\x0b\x63olumn_name\x18\x01 \x01(\t\x12\x1c\n\x14previous_column_type\x18\x02 \x01(\t\x12\x1b\n\x13\x63urrent_column_type\x18\x03 \x01(\t\"Y\n\x10\x43olumnConstraint\x12\x13\n\x0b\x63olumn_name\x18\x01 \x01(\t\x12\x17\n\x0f\x63onstraint_name\x18\x02 \x01(\t\x12\x17\n\x0f\x63onstraint_type\x18\x03 \x01(\t\"T\n\x0fModelConstraint\x12\x17\n\x0f\x63onstraint_name\x18\x01 \x01(\t\x12\x17\n\x0f\x63onstraint_type\x18\x02 \x01(\t\x12\x0f\n\x07\x63olumns\x18\x03 \x03(\t\"9\n\x11MainReportVersion\x12\x0f\n\x07version\x18\x01 \x01(\t\x12\x13\n\x0blog_version\x18\x02 \x01(\x05\"n\n\x14MainReportVersionMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.MainReportVersion\"r\n\x0eMainReportArgs\x12\x33\n\x04\x61rgs\x18\x01 \x03(\x0b\x32%.proto_types.MainReportArgs.ArgsEntry\x1a+\n\tArgsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"h\n\x11MainReportArgsMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.MainReportArgs\"+\n\x15MainTrackingUserState\x12\x12\n\nuser_state\x18\x01 \x01(\t\"v\n\x18MainTrackingUserStateMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.MainTrackingUserState\"5\n\x0fMergedFromState\x12\x12\n\nnum_merged\x18\x01 \x01(\x05\x12\x0e\n\x06sample\x18\x02 \x03(\t\"j\n\x12MergedFromStateMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.MergedFromState\"A\n\x14MissingProfileTarget\x12\x14\n\x0cprofile_name\x18\x01 \x01(\t\x12\x13\n\x0btarget_name\x18\x02 \x01(\t\"t\n\x17MissingProfileTargetMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.MissingProfileTarget\"(\n\x11InvalidOptionYAML\x12\x13\n\x0boption_name\x18\x01 \x01(\t\"n\n\x14InvalidOptionYAMLMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.InvalidOptionYAML\"!\n\x12LogDbtProjectError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"p\n\x15LogDbtProjectErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.LogDbtProjectError\"3\n\x12LogDbtProfileError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\x12\x10\n\x08profiles\x18\x02 \x03(\t\"p\n\x15LogDbtProfileErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.LogDbtProfileError\"!\n\x12StarterProjectPath\x12\x0b\n\x03\x64ir\x18\x01 \x01(\t\"p\n\x15StarterProjectPathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.StarterProjectPath\"$\n\x15\x43onfigFolderDirectory\x12\x0b\n\x03\x64ir\x18\x01 \x01(\t\"v\n\x18\x43onfigFolderDirectoryMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.ConfigFolderDirectory\"\'\n\x14NoSampleProfileFound\x12\x0f\n\x07\x61\x64\x61pter\x18\x01 \x01(\t\"t\n\x17NoSampleProfileFoundMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.NoSampleProfileFound\"6\n\x18ProfileWrittenWithSample\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\"|\n\x1bProfileWrittenWithSampleMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.ProfileWrittenWithSample\"B\n$ProfileWrittenWithTargetTemplateYAML\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\"\x94\x01\n\'ProfileWrittenWithTargetTemplateYAMLMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12?\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x31.proto_types.ProfileWrittenWithTargetTemplateYAML\"C\n%ProfileWrittenWithProjectTemplateYAML\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\"\x96\x01\n(ProfileWrittenWithProjectTemplateYAMLMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12@\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x32.proto_types.ProfileWrittenWithProjectTemplateYAML\"\x12\n\x10SettingUpProfile\"l\n\x13SettingUpProfileMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.SettingUpProfile\"\x1c\n\x1aInvalidProfileTemplateYAML\"\x80\x01\n\x1dInvalidProfileTemplateYAMLMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.InvalidProfileTemplateYAML\"(\n\x18ProjectNameAlreadyExists\x12\x0c\n\x04name\x18\x01 \x01(\t\"|\n\x1bProjectNameAlreadyExistsMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.ProjectNameAlreadyExists\"K\n\x0eProjectCreated\x12\x14\n\x0cproject_name\x18\x01 \x01(\t\x12\x10\n\x08\x64ocs_url\x18\x02 \x01(\t\x12\x11\n\tslack_url\x18\x03 \x01(\t\"h\n\x11ProjectCreatedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.ProjectCreated\"@\n\x1aPackageRedirectDeprecation\x12\x10\n\x08old_name\x18\x01 \x01(\t\x12\x10\n\x08new_name\x18\x02 \x01(\t\"\x80\x01\n\x1dPackageRedirectDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.PackageRedirectDeprecation\"\x1f\n\x1dPackageInstallPathDeprecation\"\x86\x01\n PackageInstallPathDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x38\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32*.proto_types.PackageInstallPathDeprecation\"H\n\x1b\x43onfigSourcePathDeprecation\x12\x17\n\x0f\x64\x65precated_path\x18\x01 \x01(\t\x12\x10\n\x08\x65xp_path\x18\x02 \x01(\t\"\x82\x01\n\x1e\x43onfigSourcePathDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.ConfigSourcePathDeprecation\"F\n\x19\x43onfigDataPathDeprecation\x12\x17\n\x0f\x64\x65precated_path\x18\x01 \x01(\t\x12\x10\n\x08\x65xp_path\x18\x02 \x01(\t\"~\n\x1c\x43onfigDataPathDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.ConfigDataPathDeprecation\".\n\x17MetricAttributesRenamed\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\"z\n\x1aMetricAttributesRenamedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.MetricAttributesRenamed\"+\n\x17\x45xposureNameDeprecation\x12\x10\n\x08\x65xposure\x18\x01 \x01(\t\"z\n\x1a\x45xposureNameDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.ExposureNameDeprecation\"^\n\x13InternalDeprecation\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06reason\x18\x02 \x01(\t\x12\x18\n\x10suggested_action\x18\x03 \x01(\t\x12\x0f\n\x07version\x18\x04 \x01(\t\"r\n\x16InternalDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.InternalDeprecation\"@\n\x1a\x45nvironmentVariableRenamed\x12\x10\n\x08old_name\x18\x01 \x01(\t\x12\x10\n\x08new_name\x18\x02 \x01(\t\"\x80\x01\n\x1d\x45nvironmentVariableRenamedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.EnvironmentVariableRenamed\"3\n\x18\x43onfigLogPathDeprecation\x12\x17\n\x0f\x64\x65precated_path\x18\x01 \x01(\t\"|\n\x1b\x43onfigLogPathDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.ConfigLogPathDeprecation\"6\n\x1b\x43onfigTargetPathDeprecation\x12\x17\n\x0f\x64\x65precated_path\x18\x01 \x01(\t\"\x82\x01\n\x1e\x43onfigTargetPathDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.ConfigTargetPathDeprecation\"C\n\x16TestsConfigDeprecation\x12\x17\n\x0f\x64\x65precated_path\x18\x01 \x01(\t\x12\x10\n\x08\x65xp_path\x18\x02 \x01(\t\"x\n\x19TestsConfigDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.TestsConfigDeprecation\"\x1e\n\x1cProjectFlagsMovedDeprecation\"\x84\x01\n\x1fProjectFlagsMovedDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x37\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32).proto_types.ProjectFlagsMovedDeprecation\"C\n\x1fSpacesInResourceNameDeprecation\x12\x11\n\tunique_id\x18\x01 \x01(\t\x12\r\n\x05level\x18\x02 \x01(\t\"\x8a\x01\n\"SpacesInResourceNameDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.SpacesInResourceNameDeprecation\"i\n\"ResourceNamesWithSpacesDeprecation\x12\x1b\n\x13\x63ount_invalid_names\x18\x01 \x01(\x05\x12\x17\n\x0fshow_debug_hint\x18\x02 \x01(\x08\x12\r\n\x05level\x18\x03 \x01(\t\"\x90\x01\n%ResourceNamesWithSpacesDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12=\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32/.proto_types.ResourceNamesWithSpacesDeprecation\"_\n)PackageMaterializationOverrideDeprecation\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x1c\n\x14materialization_name\x18\x02 \x01(\t\"\x9e\x01\n,PackageMaterializationOverrideDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x44\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x36.proto_types.PackageMaterializationOverrideDeprecation\"#\n!SourceFreshnessProjectHooksNotRun\"\x8e\x01\n$SourceFreshnessProjectHooksNotRunMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12<\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32..proto_types.SourceFreshnessProjectHooksNotRun\"V\n\x0f\x44\x65precatedModel\x12\x12\n\nmodel_name\x18\x01 \x01(\t\x12\x15\n\rmodel_version\x18\x02 \x01(\t\x12\x18\n\x10\x64\x65precation_date\x18\x03 \x01(\t\"j\n\x12\x44\x65precatedModelMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.DeprecatedModel\"7\n\x12InputFileDiffError\x12\x10\n\x08\x63\x61tegory\x18\x01 \x01(\t\x12\x0f\n\x07\x66ile_id\x18\x02 \x01(\t\"p\n\x15InputFileDiffErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.InputFileDiffError\"?\n\x14InvalidValueForField\x12\x12\n\nfield_name\x18\x01 \x01(\t\x12\x13\n\x0b\x66ield_value\x18\x02 \x01(\t\"t\n\x17InvalidValueForFieldMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.InvalidValueForField\"Q\n\x11ValidationWarning\x12\x15\n\rresource_type\x18\x01 \x01(\t\x12\x12\n\nfield_name\x18\x02 \x01(\t\x12\x11\n\tnode_name\x18\x03 \x01(\t\"n\n\x14ValidationWarningMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.ValidationWarning\"!\n\x11ParsePerfInfoPath\x12\x0c\n\x04path\x18\x01 \x01(\t\"n\n\x14ParsePerfInfoPathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.ParsePerfInfoPath\"1\n!PartialParsingErrorProcessingFile\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\"\x8e\x01\n$PartialParsingErrorProcessingFileMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12<\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32..proto_types.PartialParsingErrorProcessingFile\"\x86\x01\n\x13PartialParsingError\x12?\n\x08\x65xc_info\x18\x01 \x03(\x0b\x32-.proto_types.PartialParsingError.ExcInfoEntry\x1a.\n\x0c\x45xcInfoEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"r\n\x16PartialParsingErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.PartialParsingError\"\x1b\n\x19PartialParsingSkipParsing\"~\n\x1cPartialParsingSkipParsingMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.PartialParsingSkipParsing\"&\n\x14UnableToPartialParse\x12\x0e\n\x06reason\x18\x01 \x01(\t\"t\n\x17UnableToPartialParseMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.UnableToPartialParse\"f\n\x12StateCheckVarsHash\x12\x10\n\x08\x63hecksum\x18\x01 \x01(\t\x12\x0c\n\x04vars\x18\x02 \x01(\t\x12\x0f\n\x07profile\x18\x03 \x01(\t\x12\x0e\n\x06target\x18\x04 \x01(\t\x12\x0f\n\x07version\x18\x05 \x01(\t\"p\n\x15StateCheckVarsHashMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.StateCheckVarsHash\"\x1a\n\x18PartialParsingNotEnabled\"|\n\x1bPartialParsingNotEnabledMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.PartialParsingNotEnabled\"C\n\x14ParsedFileLoadFailed\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x0b\n\x03\x65xc\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"t\n\x17ParsedFileLoadFailedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.ParsedFileLoadFailed\"H\n\x15PartialParsingEnabled\x12\x0f\n\x07\x64\x65leted\x18\x01 \x01(\x05\x12\r\n\x05\x61\x64\x64\x65\x64\x18\x02 \x01(\x05\x12\x0f\n\x07\x63hanged\x18\x03 \x01(\x05\"v\n\x18PartialParsingEnabledMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.PartialParsingEnabled\"8\n\x12PartialParsingFile\x12\x0f\n\x07\x66ile_id\x18\x01 \x01(\t\x12\x11\n\toperation\x18\x02 \x01(\t\"p\n\x15PartialParsingFileMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.PartialParsingFile\"\xaf\x01\n\x1fInvalidDisabledTargetInTestNode\x12\x1b\n\x13resource_type_title\x18\x01 \x01(\t\x12\x11\n\tunique_id\x18\x02 \x01(\t\x12\x1a\n\x12original_file_path\x18\x03 \x01(\t\x12\x13\n\x0btarget_kind\x18\x04 \x01(\t\x12\x13\n\x0btarget_name\x18\x05 \x01(\t\x12\x16\n\x0etarget_package\x18\x06 \x01(\t\"\x8a\x01\n\"InvalidDisabledTargetInTestNodeMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.InvalidDisabledTargetInTestNode\"7\n\x18UnusedResourceConfigPath\x12\x1b\n\x13unused_config_paths\x18\x01 \x03(\t\"|\n\x1bUnusedResourceConfigPathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.UnusedResourceConfigPath\"3\n\rSeedIncreased\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"f\n\x10SeedIncreasedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.SeedIncreased\">\n\x18SeedExceedsLimitSamePath\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"|\n\x1bSeedExceedsLimitSamePathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.SeedExceedsLimitSamePath\"D\n\x1eSeedExceedsLimitAndPathChanged\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"\x88\x01\n!SeedExceedsLimitAndPathChangedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x39\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32+.proto_types.SeedExceedsLimitAndPathChanged\"\\\n\x1fSeedExceedsLimitChecksumChanged\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x15\n\rchecksum_name\x18\x03 \x01(\t\"\x8a\x01\n\"SeedExceedsLimitChecksumChangedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.SeedExceedsLimitChecksumChanged\"%\n\x0cUnusedTables\x12\x15\n\runused_tables\x18\x01 \x03(\t\"d\n\x0fUnusedTablesMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.UnusedTables\"\x87\x01\n\x17WrongResourceSchemaFile\x12\x12\n\npatch_name\x18\x01 \x01(\t\x12\x15\n\rresource_type\x18\x02 \x01(\t\x12\x1c\n\x14plural_resource_type\x18\x03 \x01(\t\x12\x10\n\x08yaml_key\x18\x04 \x01(\t\x12\x11\n\tfile_path\x18\x05 \x01(\t\"z\n\x1aWrongResourceSchemaFileMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.WrongResourceSchemaFile\"K\n\x10NoNodeForYamlKey\x12\x12\n\npatch_name\x18\x01 \x01(\t\x12\x10\n\x08yaml_key\x18\x02 \x01(\t\x12\x11\n\tfile_path\x18\x03 \x01(\t\"l\n\x13NoNodeForYamlKeyMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.NoNodeForYamlKey\"+\n\x15MacroNotFoundForPatch\x12\x12\n\npatch_name\x18\x01 \x01(\t\"v\n\x18MacroNotFoundForPatchMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.MacroNotFoundForPatch\"\xb8\x01\n\x16NodeNotFoundOrDisabled\x12\x1a\n\x12original_file_path\x18\x01 \x01(\t\x12\x11\n\tunique_id\x18\x02 \x01(\t\x12\x1b\n\x13resource_type_title\x18\x03 \x01(\t\x12\x13\n\x0btarget_name\x18\x04 \x01(\t\x12\x13\n\x0btarget_kind\x18\x05 \x01(\t\x12\x16\n\x0etarget_package\x18\x06 \x01(\t\x12\x10\n\x08\x64isabled\x18\x07 \x01(\t\"x\n\x19NodeNotFoundOrDisabledMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.NodeNotFoundOrDisabled\"H\n\x0fJinjaLogWarning\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0b\n\x03msg\x18\x02 \x01(\t\"j\n\x12JinjaLogWarningMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.JinjaLogWarning\"E\n\x0cJinjaLogInfo\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0b\n\x03msg\x18\x02 \x01(\t\"d\n\x0fJinjaLogInfoMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.JinjaLogInfo\"F\n\rJinjaLogDebug\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0b\n\x03msg\x18\x02 \x01(\t\"f\n\x10JinjaLogDebugMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.JinjaLogDebug\"\xae\x01\n\x1eUnpinnedRefNewVersionAvailable\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x15\n\rref_node_name\x18\x02 \x01(\t\x12\x18\n\x10ref_node_package\x18\x03 \x01(\t\x12\x18\n\x10ref_node_version\x18\x04 \x01(\t\x12\x17\n\x0fref_max_version\x18\x05 \x01(\t\"\x88\x01\n!UnpinnedRefNewVersionAvailableMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x39\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32+.proto_types.UnpinnedRefNewVersionAvailable\"\xc6\x01\n\x1cUpcomingReferenceDeprecation\x12\x12\n\nmodel_name\x18\x01 \x01(\t\x12\x19\n\x11ref_model_package\x18\x02 \x01(\t\x12\x16\n\x0eref_model_name\x18\x03 \x01(\t\x12\x19\n\x11ref_model_version\x18\x04 \x01(\t\x12 \n\x18ref_model_latest_version\x18\x05 \x01(\t\x12\"\n\x1aref_model_deprecation_date\x18\x06 \x01(\t\"\x84\x01\n\x1fUpcomingReferenceDeprecationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x37\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32).proto_types.UpcomingReferenceDeprecation\"\xbd\x01\n\x13\x44\x65precatedReference\x12\x12\n\nmodel_name\x18\x01 \x01(\t\x12\x19\n\x11ref_model_package\x18\x02 \x01(\t\x12\x16\n\x0eref_model_name\x18\x03 \x01(\t\x12\x19\n\x11ref_model_version\x18\x04 \x01(\t\x12 \n\x18ref_model_latest_version\x18\x05 \x01(\t\x12\"\n\x1aref_model_deprecation_date\x18\x06 \x01(\t\"r\n\x16\x44\x65precatedReferenceMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.DeprecatedReference\"<\n$UnsupportedConstraintMaterialization\x12\x14\n\x0cmaterialized\x18\x01 \x01(\t\"\x94\x01\n\'UnsupportedConstraintMaterializationMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12?\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x31.proto_types.UnsupportedConstraintMaterialization\"M\n\x14ParseInlineNodeError\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0b\n\x03\x65xc\x18\x02 \x01(\t\"t\n\x17ParseInlineNodeErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.ParseInlineNodeError\"(\n\x19SemanticValidationFailure\x12\x0b\n\x03msg\x18\x02 \x01(\t\"~\n\x1cSemanticValidationFailureMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.SemanticValidationFailure\"\x8a\x03\n\x19UnversionedBreakingChange\x12\x18\n\x10\x62reaking_changes\x18\x01 \x03(\t\x12\x12\n\nmodel_name\x18\x02 \x01(\t\x12\x17\n\x0fmodel_file_path\x18\x03 \x01(\t\x12\"\n\x1a\x63ontract_enforced_disabled\x18\x04 \x01(\x08\x12\x17\n\x0f\x63olumns_removed\x18\x05 \x03(\t\x12\x34\n\x13\x63olumn_type_changes\x18\x06 \x03(\x0b\x32\x17.proto_types.ColumnType\x12I\n\"enforced_column_constraint_removed\x18\x07 \x03(\x0b\x32\x1d.proto_types.ColumnConstraint\x12G\n!enforced_model_constraint_removed\x18\x08 \x03(\x0b\x32\x1c.proto_types.ModelConstraint\x12\x1f\n\x17materialization_changed\x18\t \x03(\t\"~\n\x1cUnversionedBreakingChangeMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.UnversionedBreakingChange\"*\n\x14WarnStateTargetEqual\x12\x12\n\nstate_path\x18\x01 \x01(\t\"t\n\x17WarnStateTargetEqualMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.WarnStateTargetEqual\"%\n\x16\x46reshnessConfigProblem\x12\x0b\n\x03msg\x18\x01 \x01(\t\"x\n\x19\x46reshnessConfigProblemMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.FreshnessConfigProblem\"/\n\x1dGitSparseCheckoutSubdirectory\x12\x0e\n\x06subdir\x18\x01 \x01(\t\"\x86\x01\n GitSparseCheckoutSubdirectoryMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x38\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32*.proto_types.GitSparseCheckoutSubdirectory\"/\n\x1bGitProgressCheckoutRevision\x12\x10\n\x08revision\x18\x01 \x01(\t\"\x82\x01\n\x1eGitProgressCheckoutRevisionMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.GitProgressCheckoutRevision\"4\n%GitProgressUpdatingExistingDependency\x12\x0b\n\x03\x64ir\x18\x01 \x01(\t\"\x96\x01\n(GitProgressUpdatingExistingDependencyMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12@\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x32.proto_types.GitProgressUpdatingExistingDependency\".\n\x1fGitProgressPullingNewDependency\x12\x0b\n\x03\x64ir\x18\x01 \x01(\t\"\x8a\x01\n\"GitProgressPullingNewDependencyMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.GitProgressPullingNewDependency\"\x1d\n\x0eGitNothingToDo\x12\x0b\n\x03sha\x18\x01 \x01(\t\"h\n\x11GitNothingToDoMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.GitNothingToDo\"E\n\x1fGitProgressUpdatedCheckoutRange\x12\x11\n\tstart_sha\x18\x01 \x01(\t\x12\x0f\n\x07\x65nd_sha\x18\x02 \x01(\t\"\x8a\x01\n\"GitProgressUpdatedCheckoutRangeMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.GitProgressUpdatedCheckoutRange\"*\n\x17GitProgressCheckedOutAt\x12\x0f\n\x07\x65nd_sha\x18\x01 \x01(\t\"z\n\x1aGitProgressCheckedOutAtMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.GitProgressCheckedOutAt\")\n\x1aRegistryProgressGETRequest\x12\x0b\n\x03url\x18\x01 \x01(\t\"\x80\x01\n\x1dRegistryProgressGETRequestMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.RegistryProgressGETRequest\"=\n\x1bRegistryProgressGETResponse\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\x11\n\tresp_code\x18\x02 \x01(\x05\"\x82\x01\n\x1eRegistryProgressGETResponseMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.RegistryProgressGETResponse\"_\n\x1dSelectorReportInvalidSelector\x12\x17\n\x0fvalid_selectors\x18\x01 \x01(\t\x12\x13\n\x0bspec_method\x18\x02 \x01(\t\x12\x10\n\x08raw_spec\x18\x03 \x01(\t\"\x86\x01\n SelectorReportInvalidSelectorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x38\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32*.proto_types.SelectorReportInvalidSelector\"\x15\n\x13\x44\x65psNoPackagesFound\"r\n\x16\x44\x65psNoPackagesFoundMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.DepsNoPackagesFound\"/\n\x17\x44\x65psStartPackageInstall\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\"z\n\x1a\x44\x65psStartPackageInstallMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.DepsStartPackageInstall\"\'\n\x0f\x44\x65psInstallInfo\x12\x14\n\x0cversion_name\x18\x01 \x01(\t\"j\n\x12\x44\x65psInstallInfoMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.DepsInstallInfo\"-\n\x13\x44\x65psUpdateAvailable\x12\x16\n\x0eversion_latest\x18\x01 \x01(\t\"r\n\x16\x44\x65psUpdateAvailableMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.DepsUpdateAvailable\"\x0e\n\x0c\x44\x65psUpToDate\"d\n\x0f\x44\x65psUpToDateMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.DepsUpToDate\",\n\x14\x44\x65psListSubdirectory\x12\x14\n\x0csubdirectory\x18\x01 \x01(\t\"t\n\x17\x44\x65psListSubdirectoryMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.DepsListSubdirectory\".\n\x1a\x44\x65psNotifyUpdatesAvailable\x12\x10\n\x08packages\x18\x01 \x03(\t\"\x80\x01\n\x1d\x44\x65psNotifyUpdatesAvailableMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.DepsNotifyUpdatesAvailable\".\n\x1fRegistryIndexProgressGETRequest\x12\x0b\n\x03url\x18\x01 \x01(\t\"\x8a\x01\n\"RegistryIndexProgressGETRequestMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.RegistryIndexProgressGETRequest\"B\n RegistryIndexProgressGETResponse\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\x11\n\tresp_code\x18\x02 \x01(\x05\"\x8c\x01\n#RegistryIndexProgressGETResponseMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12;\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32-.proto_types.RegistryIndexProgressGETResponse\"2\n\x1eRegistryResponseUnexpectedType\x12\x10\n\x08response\x18\x01 \x01(\t\"\x88\x01\n!RegistryResponseUnexpectedTypeMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x39\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32+.proto_types.RegistryResponseUnexpectedType\"2\n\x1eRegistryResponseMissingTopKeys\x12\x10\n\x08response\x18\x01 \x01(\t\"\x88\x01\n!RegistryResponseMissingTopKeysMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x39\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32+.proto_types.RegistryResponseMissingTopKeys\"5\n!RegistryResponseMissingNestedKeys\x12\x10\n\x08response\x18\x01 \x01(\t\"\x8e\x01\n$RegistryResponseMissingNestedKeysMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12<\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32..proto_types.RegistryResponseMissingNestedKeys\"3\n\x1fRegistryResponseExtraNestedKeys\x12\x10\n\x08response\x18\x01 \x01(\t\"\x8a\x01\n\"RegistryResponseExtraNestedKeysMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.RegistryResponseExtraNestedKeys\"(\n\x18\x44\x65psSetDownloadDirectory\x12\x0c\n\x04path\x18\x01 \x01(\t\"|\n\x1b\x44\x65psSetDownloadDirectoryMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.DepsSetDownloadDirectory\"-\n\x0c\x44\x65psUnpinned\x12\x10\n\x08revision\x18\x01 \x01(\t\x12\x0b\n\x03git\x18\x02 \x01(\t\"d\n\x0f\x44\x65psUnpinnedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.DepsUnpinned\"/\n\x1bNoNodesForSelectionCriteria\x12\x10\n\x08spec_raw\x18\x01 \x01(\t\"\x82\x01\n\x1eNoNodesForSelectionCriteriaMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.NoNodesForSelectionCriteria\")\n\x10\x44\x65psLockUpdating\x12\x15\n\rlock_filepath\x18\x01 \x01(\t\"l\n\x13\x44\x65psLockUpdatingMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.DepsLockUpdating\"R\n\x0e\x44\x65psAddPackage\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12\x19\n\x11packages_filepath\x18\x03 \x01(\t\"h\n\x11\x44\x65psAddPackageMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.DepsAddPackage\"\xa7\x01\n\x19\x44\x65psFoundDuplicatePackage\x12S\n\x0fremoved_package\x18\x01 \x03(\x0b\x32:.proto_types.DepsFoundDuplicatePackage.RemovedPackageEntry\x1a\x35\n\x13RemovedPackageEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"~\n\x1c\x44\x65psFoundDuplicatePackageMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.DepsFoundDuplicatePackage\"$\n\x12\x44\x65psVersionMissing\x12\x0e\n\x06source\x18\x01 \x01(\t\"p\n\x15\x44\x65psVersionMissingMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.DepsVersionMissing\"/\n\x17\x44\x65psScrubbedPackageName\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\"z\n\x1a\x44\x65psScrubbedPackageNameMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.DepsScrubbedPackageName\"*\n\x1bRunningOperationCaughtError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"\x82\x01\n\x1eRunningOperationCaughtErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.RunningOperationCaughtError\"\x11\n\x0f\x43ompileComplete\"j\n\x12\x43ompileCompleteMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.CompileComplete\"\x18\n\x16\x46reshnessCheckComplete\"x\n\x19\x46reshnessCheckCompleteMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.FreshnessCheckComplete\"\x1c\n\nSeedHeader\x12\x0e\n\x06header\x18\x01 \x01(\t\"`\n\rSeedHeaderMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.SeedHeader\"]\n\x12SQLRunnerException\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x02 \x01(\t\x12(\n\tnode_info\x18\x03 \x01(\x0b\x32\x15.proto_types.NodeInfo\"p\n\x15SQLRunnerExceptionMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.SQLRunnerException\"\xa8\x01\n\rLogTestResult\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\t\x12\r\n\x05index\x18\x04 \x01(\x05\x12\x12\n\nnum_models\x18\x05 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x06 \x01(\x02\x12\x14\n\x0cnum_failures\x18\x07 \x01(\x05\"f\n\x10LogTestResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.LogTestResult\"k\n\x0cLogStartLine\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\r\n\x05index\x18\x03 \x01(\x05\x12\r\n\x05total\x18\x04 \x01(\x05\"d\n\x0fLogStartLineMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.LogStartLine\"\x95\x01\n\x0eLogModelResult\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\t\x12\r\n\x05index\x18\x04 \x01(\x05\x12\r\n\x05total\x18\x05 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x06 \x01(\x02\"h\n\x11LogModelResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.LogModelResult\"\x92\x02\n\x11LogSnapshotResult\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\t\x12\r\n\x05index\x18\x04 \x01(\x05\x12\r\n\x05total\x18\x05 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x06 \x01(\x02\x12\x34\n\x03\x63\x66g\x18\x07 \x03(\x0b\x32\'.proto_types.LogSnapshotResult.CfgEntry\x12\x16\n\x0eresult_message\x18\x08 \x01(\t\x1a*\n\x08\x43\x66gEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"n\n\x14LogSnapshotResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.LogSnapshotResult\"\xb9\x01\n\rLogSeedResult\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0e\n\x06status\x18\x02 \x01(\t\x12\x16\n\x0eresult_message\x18\x03 \x01(\t\x12\r\n\x05index\x18\x04 \x01(\x05\x12\r\n\x05total\x18\x05 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x06 \x01(\x02\x12\x0e\n\x06schema\x18\x07 \x01(\t\x12\x10\n\x08relation\x18\x08 \x01(\t\"f\n\x10LogSeedResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.LogSeedResult\"\xad\x01\n\x12LogFreshnessResult\x12\x0e\n\x06status\x18\x01 \x01(\t\x12(\n\tnode_info\x18\x02 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\r\n\x05index\x18\x03 \x01(\x05\x12\r\n\x05total\x18\x04 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x05 \x01(\x02\x12\x13\n\x0bsource_name\x18\x06 \x01(\t\x12\x12\n\ntable_name\x18\x07 \x01(\t\"p\n\x15LogFreshnessResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.LogFreshnessResult\"\x98\x01\n\x11LogNodeNoOpResult\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\t\x12\r\n\x05index\x18\x04 \x01(\x05\x12\r\n\x05total\x18\x05 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x06 \x01(\x02\"n\n\x14LogNodeNoOpResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.LogNodeNoOpResult\"\"\n\rLogCancelLine\x12\x11\n\tconn_name\x18\x01 \x01(\t\"f\n\x10LogCancelLineMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.LogCancelLine\"\x1f\n\x0f\x44\x65\x66\x61ultSelector\x12\x0c\n\x04name\x18\x01 \x01(\t\"j\n\x12\x44\x65\x66\x61ultSelectorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.DefaultSelector\"5\n\tNodeStart\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\"^\n\x0cNodeStartMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.NodeStart\"g\n\x0cNodeFinished\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12-\n\nrun_result\x18\x02 \x01(\x0b\x32\x19.proto_types.RunResultMsg\"d\n\x0fNodeFinishedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.NodeFinished\"+\n\x1bQueryCancelationUnsupported\x12\x0c\n\x04type\x18\x01 \x01(\t\"\x82\x01\n\x1eQueryCancelationUnsupportedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.QueryCancelationUnsupported\"O\n\x0f\x43oncurrencyLine\x12\x13\n\x0bnum_threads\x18\x01 \x01(\x05\x12\x13\n\x0btarget_name\x18\x02 \x01(\t\x12\x12\n\nnode_count\x18\x03 \x01(\x05\"j\n\x12\x43oncurrencyLineMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.ConcurrencyLine\"E\n\x19WritingInjectedSQLForNode\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\"~\n\x1cWritingInjectedSQLForNodeMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.WritingInjectedSQLForNode\"9\n\rNodeCompiling\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\"f\n\x10NodeCompilingMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.NodeCompiling\"9\n\rNodeExecuting\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\"f\n\x10NodeExecutingMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.NodeExecuting\"m\n\x10LogHookStartLine\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x11\n\tstatement\x18\x02 \x01(\t\x12\r\n\x05index\x18\x03 \x01(\x05\x12\r\n\x05total\x18\x04 \x01(\x05\"l\n\x13LogHookStartLineMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.LogHookStartLine\"\x93\x01\n\x0eLogHookEndLine\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x11\n\tstatement\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\t\x12\r\n\x05index\x18\x04 \x01(\x05\x12\r\n\x05total\x18\x05 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x06 \x01(\x02\"h\n\x11LogHookEndLineMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.LogHookEndLine\"\x93\x01\n\x0fSkippingDetails\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x15\n\rresource_type\x18\x02 \x01(\t\x12\x0e\n\x06schema\x18\x03 \x01(\t\x12\x11\n\tnode_name\x18\x04 \x01(\t\x12\r\n\x05index\x18\x05 \x01(\x05\x12\r\n\x05total\x18\x06 \x01(\x05\"j\n\x12SkippingDetailsMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.SkippingDetails\"\r\n\x0bNothingToDo\"b\n\x0eNothingToDoMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.NothingToDo\",\n\x1dRunningOperationUncaughtError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"\x86\x01\n RunningOperationUncaughtErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x38\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32*.proto_types.RunningOperationUncaughtError\"\x93\x01\n\x0c\x45ndRunResult\x12*\n\x07results\x18\x01 \x03(\x0b\x32\x19.proto_types.RunResultMsg\x12\x14\n\x0c\x65lapsed_time\x18\x02 \x01(\x02\x12\x30\n\x0cgenerated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07success\x18\x04 \x01(\x08\"d\n\x0f\x45ndRunResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.EndRunResult\"\x11\n\x0fNoNodesSelected\"j\n\x12NoNodesSelectedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.NoNodesSelected\"w\n\x10\x43ommandCompleted\x12\x0f\n\x07\x63ommand\x18\x01 \x01(\t\x12\x0f\n\x07success\x18\x02 \x01(\x08\x12\x30\n\x0c\x63ompleted_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07\x65lapsed\x18\x04 \x01(\x02\"l\n\x13\x43ommandCompletedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.CommandCompleted\"k\n\x08ShowNode\x12\x11\n\tnode_name\x18\x01 \x01(\t\x12\x0f\n\x07preview\x18\x02 \x01(\t\x12\x11\n\tis_inline\x18\x03 \x01(\x08\x12\x15\n\routput_format\x18\x04 \x01(\t\x12\x11\n\tunique_id\x18\x05 \x01(\t\"\\\n\x0bShowNodeMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.ShowNode\"p\n\x0c\x43ompiledNode\x12\x11\n\tnode_name\x18\x01 \x01(\t\x12\x10\n\x08\x63ompiled\x18\x02 \x01(\t\x12\x11\n\tis_inline\x18\x03 \x01(\x08\x12\x15\n\routput_format\x18\x04 \x01(\t\x12\x11\n\tunique_id\x18\x05 \x01(\t\"d\n\x0f\x43ompiledNodeMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.CompiledNode\"Y\n\x18SnapshotTimestampWarning\x12\x1f\n\x17snapshot_time_data_type\x18\x01 \x01(\t\x12\x1c\n\x14updated_at_data_type\x18\x02 \x01(\t\"|\n\x1bSnapshotTimestampWarningMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.SnapshotTimestampWarning\"b\n\x17\x43\x61tchableExceptionOnRun\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0b\n\x03\x65xc\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"z\n\x1a\x43\x61tchableExceptionOnRunMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.CatchableExceptionOnRun\"_\n\x12InternalErrorOnRun\x12\x12\n\nbuild_path\x18\x01 \x01(\t\x12\x0b\n\x03\x65xc\x18\x02 \x01(\t\x12(\n\tnode_info\x18\x03 \x01(\x0b\x32\x15.proto_types.NodeInfo\"p\n\x15InternalErrorOnRunMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.InternalErrorOnRun\"u\n\x15GenericExceptionOnRun\x12\x12\n\nbuild_path\x18\x01 \x01(\t\x12\x11\n\tunique_id\x18\x02 \x01(\t\x12\x0b\n\x03\x65xc\x18\x03 \x01(\t\x12(\n\tnode_info\x18\x04 \x01(\x0b\x32\x15.proto_types.NodeInfo\"v\n\x18GenericExceptionOnRunMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.GenericExceptionOnRun\"N\n\x1aNodeConnectionReleaseError\x12\x11\n\tnode_name\x18\x01 \x01(\t\x12\x0b\n\x03\x65xc\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"\x80\x01\n\x1dNodeConnectionReleaseErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.NodeConnectionReleaseError\"\x1f\n\nFoundStats\x12\x11\n\tstat_line\x18\x01 \x01(\t\"`\n\rFoundStatsMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.FoundStats\"\x17\n\x15MainKeyboardInterrupt\"v\n\x18MainKeyboardInterruptMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.MainKeyboardInterrupt\"#\n\x14MainEncounteredError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"t\n\x17MainEncounteredErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.MainEncounteredError\"%\n\x0eMainStackTrace\x12\x13\n\x0bstack_trace\x18\x01 \x01(\t\"h\n\x11MainStackTraceMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.MainStackTrace\"p\n\x13TimingInfoCollected\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12/\n\x0btiming_info\x18\x02 \x01(\x0b\x32\x1a.proto_types.TimingInfoMsg\"r\n\x16TimingInfoCollectedMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.TimingInfoCollected\"&\n\x12LogDebugStackTrace\x12\x10\n\x08\x65xc_info\x18\x01 \x01(\t\"p\n\x15LogDebugStackTraceMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.LogDebugStackTrace\"\x1e\n\x0e\x43heckCleanPath\x12\x0c\n\x04path\x18\x01 \x01(\t\"h\n\x11\x43heckCleanPathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CheckCleanPath\" \n\x10\x43onfirmCleanPath\x12\x0c\n\x04path\x18\x01 \x01(\t\"l\n\x13\x43onfirmCleanPathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConfirmCleanPath\"\"\n\x12ProtectedCleanPath\x12\x0c\n\x04path\x18\x01 \x01(\t\"p\n\x15ProtectedCleanPathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.ProtectedCleanPath\"\x14\n\x12\x46inishedCleanPaths\"p\n\x15\x46inishedCleanPathsMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.FinishedCleanPaths\"5\n\x0bOpenCommand\x12\x10\n\x08open_cmd\x18\x01 \x01(\t\x12\x14\n\x0cprofiles_dir\x18\x02 \x01(\t\"b\n\x0eOpenCommandMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.OpenCommand\"0\n\x0fServingDocsPort\x12\x0f\n\x07\x61\x64\x64ress\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x05\"j\n\x12ServingDocsPortMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.ServingDocsPort\"%\n\x15ServingDocsAccessInfo\x12\x0c\n\x04port\x18\x01 \x01(\t\"v\n\x18ServingDocsAccessInfoMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.ServingDocsAccessInfo\"\x15\n\x13ServingDocsExitInfo\"r\n\x16ServingDocsExitInfoMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.ServingDocsExitInfo\"\x87\x01\n\x05Group\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0cpackage_name\x18\x03 \x01(\t\x12,\n\x05owner\x18\x07 \x03(\x0b\x32\x1d.proto_types.Group.OwnerEntry\x1a,\n\nOwnerEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x97\x01\n\x10RunResultWarning\x12\x15\n\rresource_type\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x0c\n\x04path\x18\x03 \x01(\t\x12(\n\tnode_info\x18\x04 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12!\n\x05group\x18\x05 \x01(\x0b\x32\x12.proto_types.Group\"l\n\x13RunResultWarningMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.RunResultWarning\"\x97\x01\n\x10RunResultFailure\x12\x15\n\rresource_type\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x0c\n\x04path\x18\x03 \x01(\t\x12(\n\tnode_info\x18\x04 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12!\n\x05group\x18\x05 \x01(\x0b\x32\x12.proto_types.Group\"l\n\x13RunResultFailureMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.RunResultFailure\"k\n\tStatsLine\x12\x30\n\x05stats\x18\x01 \x03(\x0b\x32!.proto_types.StatsLine.StatsEntry\x1a,\n\nStatsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"^\n\x0cStatsLineMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.StatsLine\"j\n\x0eRunResultError\x12\x0b\n\x03msg\x18\x01 \x01(\t\x12(\n\tnode_info\x18\x02 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12!\n\x05group\x18\x03 \x01(\x0b\x32\x12.proto_types.Group\"h\n\x11RunResultErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.RunResultError\"S\n\x17RunResultErrorNoMessage\x12\x0e\n\x06status\x18\x01 \x01(\t\x12(\n\tnode_info\x18\x02 \x01(\x0b\x32\x15.proto_types.NodeInfo\"z\n\x1aRunResultErrorNoMessageMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.RunResultErrorNoMessage\"I\n\x0fSQLCompiledPath\x12\x0c\n\x04path\x18\x01 \x01(\t\x12(\n\tnode_info\x18\x02 \x01(\x0b\x32\x15.proto_types.NodeInfo\"j\n\x12SQLCompiledPathMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.SQLCompiledPath\"W\n\x14\x43heckNodeTestFailure\x12\x15\n\rrelation_name\x18\x01 \x01(\t\x12(\n\tnode_info\x18\x02 \x01(\x0b\x32\x15.proto_types.NodeInfo\"t\n\x17\x43heckNodeTestFailureMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.CheckNodeTestFailure\"W\n\x0f\x45ndOfRunSummary\x12\x12\n\nnum_errors\x18\x01 \x01(\x05\x12\x14\n\x0cnum_warnings\x18\x02 \x01(\x05\x12\x1a\n\x12keyboard_interrupt\x18\x03 \x01(\x08\"j\n\x12\x45ndOfRunSummaryMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.EndOfRunSummary\"g\n\x13MarkSkippedChildren\x12\x11\n\tunique_id\x18\x01 \x01(\t\x12\x0e\n\x06status\x18\x02 \x01(\t\x12-\n\nrun_result\x18\x03 \x01(\x0b\x32\x19.proto_types.RunResultMsg\"r\n\x16MarkSkippedChildrenMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.MarkSkippedChildren\"e\n\x13LogSkipBecauseError\x12\x0e\n\x06schema\x18\x01 \x01(\t\x12\x10\n\x08relation\x18\x02 \x01(\t\x12\r\n\x05index\x18\x03 \x01(\x05\x12\r\n\x05total\x18\x04 \x01(\x05\x12\x0e\n\x06status\x18\x05 \x01(\t\"r\n\x16LogSkipBecauseErrorMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.LogSkipBecauseError\"\x14\n\x12\x45nsureGitInstalled\"p\n\x15\x45nsureGitInstalledMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.EnsureGitInstalled\"\x1a\n\x18\x44\x65psCreatingLocalSymlink\"|\n\x1b\x44\x65psCreatingLocalSymlinkMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.DepsCreatingLocalSymlink\"\x19\n\x17\x44\x65psSymlinkNotAvailable\"z\n\x1a\x44\x65psSymlinkNotAvailableMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.DepsSymlinkNotAvailable\"\x11\n\x0f\x44isableTracking\"j\n\x12\x44isableTrackingMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.DisableTracking\"\x1e\n\x0cSendingEvent\x12\x0e\n\x06kwargs\x18\x01 \x01(\t\"d\n\x0fSendingEventMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.SendingEvent\"\x12\n\x10SendEventFailure\"l\n\x13SendEventFailureMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.SendEventFailure\"\r\n\x0b\x46lushEvents\"b\n\x0e\x46lushEventsMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.FlushEvents\"\x14\n\x12\x46lushEventsFailure\"p\n\x15\x46lushEventsFailureMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.FlushEventsFailure\"-\n\x19TrackingInitializeFailure\x12\x10\n\x08\x65xc_info\x18\x01 \x01(\t\"~\n\x1cTrackingInitializeFailureMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.TrackingInitializeFailure\"P\n\x17RunResultWarningMessage\x12\x0b\n\x03msg\x18\x01 \x01(\t\x12(\n\tnode_info\x18\x02 \x01(\x0b\x32\x15.proto_types.NodeInfo\"z\n\x1aRunResultWarningMessageMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.RunResultWarningMessage\"\x1a\n\x0b\x44\x65\x62ugCmdOut\x12\x0b\n\x03msg\x18\x01 \x01(\t\"b\n\x0e\x44\x65\x62ugCmdOutMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.DebugCmdOut\"\x1d\n\x0e\x44\x65\x62ugCmdResult\x12\x0b\n\x03msg\x18\x01 \x01(\t\"h\n\x11\x44\x65\x62ugCmdResultMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.DebugCmdResult\"\x19\n\nListCmdOut\x12\x0b\n\x03msg\x18\x01 \x01(\t\"`\n\rListCmdOutMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.ListCmdOut\"\xec\x01\n\x0eResourceReport\x12\x14\n\x0c\x63ommand_name\x18\x02 \x01(\t\x12\x17\n\x0f\x63ommand_success\x18\x03 \x01(\x08\x12\x1f\n\x17\x63ommand_wall_clock_time\x18\x04 \x01(\x02\x12\x19\n\x11process_user_time\x18\x05 \x01(\x02\x12\x1b\n\x13process_kernel_time\x18\x06 \x01(\x02\x12\x1b\n\x13process_mem_max_rss\x18\x07 \x01(\x03\x12\x19\n\x11process_in_blocks\x18\x08 \x01(\x03\x12\x1a\n\x12process_out_blocks\x18\t \x01(\x03\"h\n\x11ResourceReportMsg\x12(\n\x04info\x18\x01 \x01(\x0b\x32\x1a.proto_types.CoreEventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.ResourceReportb\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'core_types_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + DESCRIPTOR._loaded_options = None + _globals['_COREEVENTINFO_EXTRAENTRY']._loaded_options = None + _globals['_COREEVENTINFO_EXTRAENTRY']._serialized_options = b'8\001' + _globals['_MAINREPORTARGS_ARGSENTRY']._loaded_options = None + _globals['_MAINREPORTARGS_ARGSENTRY']._serialized_options = b'8\001' + _globals['_PARTIALPARSINGERROR_EXCINFOENTRY']._loaded_options = None + _globals['_PARTIALPARSINGERROR_EXCINFOENTRY']._serialized_options = b'8\001' + _globals['_DEPSFOUNDDUPLICATEPACKAGE_REMOVEDPACKAGEENTRY']._loaded_options = None + _globals['_DEPSFOUNDDUPLICATEPACKAGE_REMOVEDPACKAGEENTRY']._serialized_options = b'8\001' + _globals['_LOGSNAPSHOTRESULT_CFGENTRY']._loaded_options = None + _globals['_LOGSNAPSHOTRESULT_CFGENTRY']._serialized_options = b'8\001' + _globals['_GROUP_OWNERENTRY']._loaded_options = None + _globals['_GROUP_OWNERENTRY']._serialized_options = b'8\001' + _globals['_STATSLINE_STATSENTRY']._loaded_options = None + _globals['_STATSLINE_STATSENTRY']._serialized_options = b'8\001' + _globals['_COREEVENTINFO']._serialized_start=97 + _globals['_COREEVENTINFO']._serialized_end=378 + _globals['_COREEVENTINFO_EXTRAENTRY']._serialized_start=334 + _globals['_COREEVENTINFO_EXTRAENTRY']._serialized_end=378 + _globals['_NODERELATION']._serialized_start=380 + _globals['_NODERELATION']._serialized_end=466 + _globals['_NODEINFO']._serialized_start=469 + _globals['_NODEINFO']._serialized_end=742 + _globals['_TIMINGINFOMSG']._serialized_start=744 + _globals['_TIMINGINFOMSG']._serialized_end=871 + _globals['_RUNRESULTMSG']._serialized_start=874 + _globals['_RUNRESULTMSG']._serialized_end=1083 + _globals['_COLUMNTYPE']._serialized_start=1085 + _globals['_COLUMNTYPE']._serialized_end=1177 + _globals['_COLUMNCONSTRAINT']._serialized_start=1179 + _globals['_COLUMNCONSTRAINT']._serialized_end=1268 + _globals['_MODELCONSTRAINT']._serialized_start=1270 + _globals['_MODELCONSTRAINT']._serialized_end=1354 + _globals['_MAINREPORTVERSION']._serialized_start=1356 + _globals['_MAINREPORTVERSION']._serialized_end=1413 + _globals['_MAINREPORTVERSIONMSG']._serialized_start=1415 + _globals['_MAINREPORTVERSIONMSG']._serialized_end=1525 + _globals['_MAINREPORTARGS']._serialized_start=1527 + _globals['_MAINREPORTARGS']._serialized_end=1641 + _globals['_MAINREPORTARGS_ARGSENTRY']._serialized_start=1598 + _globals['_MAINREPORTARGS_ARGSENTRY']._serialized_end=1641 + _globals['_MAINREPORTARGSMSG']._serialized_start=1643 + _globals['_MAINREPORTARGSMSG']._serialized_end=1747 + _globals['_MAINTRACKINGUSERSTATE']._serialized_start=1749 + _globals['_MAINTRACKINGUSERSTATE']._serialized_end=1792 + _globals['_MAINTRACKINGUSERSTATEMSG']._serialized_start=1794 + _globals['_MAINTRACKINGUSERSTATEMSG']._serialized_end=1912 + _globals['_MERGEDFROMSTATE']._serialized_start=1914 + _globals['_MERGEDFROMSTATE']._serialized_end=1967 + _globals['_MERGEDFROMSTATEMSG']._serialized_start=1969 + _globals['_MERGEDFROMSTATEMSG']._serialized_end=2075 + _globals['_MISSINGPROFILETARGET']._serialized_start=2077 + _globals['_MISSINGPROFILETARGET']._serialized_end=2142 + _globals['_MISSINGPROFILETARGETMSG']._serialized_start=2144 + _globals['_MISSINGPROFILETARGETMSG']._serialized_end=2260 + _globals['_INVALIDOPTIONYAML']._serialized_start=2262 + _globals['_INVALIDOPTIONYAML']._serialized_end=2302 + _globals['_INVALIDOPTIONYAMLMSG']._serialized_start=2304 + _globals['_INVALIDOPTIONYAMLMSG']._serialized_end=2414 + _globals['_LOGDBTPROJECTERROR']._serialized_start=2416 + _globals['_LOGDBTPROJECTERROR']._serialized_end=2449 + _globals['_LOGDBTPROJECTERRORMSG']._serialized_start=2451 + _globals['_LOGDBTPROJECTERRORMSG']._serialized_end=2563 + _globals['_LOGDBTPROFILEERROR']._serialized_start=2565 + _globals['_LOGDBTPROFILEERROR']._serialized_end=2616 + _globals['_LOGDBTPROFILEERRORMSG']._serialized_start=2618 + _globals['_LOGDBTPROFILEERRORMSG']._serialized_end=2730 + _globals['_STARTERPROJECTPATH']._serialized_start=2732 + _globals['_STARTERPROJECTPATH']._serialized_end=2765 + _globals['_STARTERPROJECTPATHMSG']._serialized_start=2767 + _globals['_STARTERPROJECTPATHMSG']._serialized_end=2879 + _globals['_CONFIGFOLDERDIRECTORY']._serialized_start=2881 + _globals['_CONFIGFOLDERDIRECTORY']._serialized_end=2917 + _globals['_CONFIGFOLDERDIRECTORYMSG']._serialized_start=2919 + _globals['_CONFIGFOLDERDIRECTORYMSG']._serialized_end=3037 + _globals['_NOSAMPLEPROFILEFOUND']._serialized_start=3039 + _globals['_NOSAMPLEPROFILEFOUND']._serialized_end=3078 + _globals['_NOSAMPLEPROFILEFOUNDMSG']._serialized_start=3080 + _globals['_NOSAMPLEPROFILEFOUNDMSG']._serialized_end=3196 + _globals['_PROFILEWRITTENWITHSAMPLE']._serialized_start=3198 + _globals['_PROFILEWRITTENWITHSAMPLE']._serialized_end=3252 + _globals['_PROFILEWRITTENWITHSAMPLEMSG']._serialized_start=3254 + _globals['_PROFILEWRITTENWITHSAMPLEMSG']._serialized_end=3378 + _globals['_PROFILEWRITTENWITHTARGETTEMPLATEYAML']._serialized_start=3380 + _globals['_PROFILEWRITTENWITHTARGETTEMPLATEYAML']._serialized_end=3446 + _globals['_PROFILEWRITTENWITHTARGETTEMPLATEYAMLMSG']._serialized_start=3449 + _globals['_PROFILEWRITTENWITHTARGETTEMPLATEYAMLMSG']._serialized_end=3597 + _globals['_PROFILEWRITTENWITHPROJECTTEMPLATEYAML']._serialized_start=3599 + _globals['_PROFILEWRITTENWITHPROJECTTEMPLATEYAML']._serialized_end=3666 + _globals['_PROFILEWRITTENWITHPROJECTTEMPLATEYAMLMSG']._serialized_start=3669 + _globals['_PROFILEWRITTENWITHPROJECTTEMPLATEYAMLMSG']._serialized_end=3819 + _globals['_SETTINGUPPROFILE']._serialized_start=3821 + _globals['_SETTINGUPPROFILE']._serialized_end=3839 + _globals['_SETTINGUPPROFILEMSG']._serialized_start=3841 + _globals['_SETTINGUPPROFILEMSG']._serialized_end=3949 + _globals['_INVALIDPROFILETEMPLATEYAML']._serialized_start=3951 + _globals['_INVALIDPROFILETEMPLATEYAML']._serialized_end=3979 + _globals['_INVALIDPROFILETEMPLATEYAMLMSG']._serialized_start=3982 + _globals['_INVALIDPROFILETEMPLATEYAMLMSG']._serialized_end=4110 + _globals['_PROJECTNAMEALREADYEXISTS']._serialized_start=4112 + _globals['_PROJECTNAMEALREADYEXISTS']._serialized_end=4152 + _globals['_PROJECTNAMEALREADYEXISTSMSG']._serialized_start=4154 + _globals['_PROJECTNAMEALREADYEXISTSMSG']._serialized_end=4278 + _globals['_PROJECTCREATED']._serialized_start=4280 + _globals['_PROJECTCREATED']._serialized_end=4355 + _globals['_PROJECTCREATEDMSG']._serialized_start=4357 + _globals['_PROJECTCREATEDMSG']._serialized_end=4461 + _globals['_PACKAGEREDIRECTDEPRECATION']._serialized_start=4463 + _globals['_PACKAGEREDIRECTDEPRECATION']._serialized_end=4527 + _globals['_PACKAGEREDIRECTDEPRECATIONMSG']._serialized_start=4530 + _globals['_PACKAGEREDIRECTDEPRECATIONMSG']._serialized_end=4658 + _globals['_PACKAGEINSTALLPATHDEPRECATION']._serialized_start=4660 + _globals['_PACKAGEINSTALLPATHDEPRECATION']._serialized_end=4691 + _globals['_PACKAGEINSTALLPATHDEPRECATIONMSG']._serialized_start=4694 + _globals['_PACKAGEINSTALLPATHDEPRECATIONMSG']._serialized_end=4828 + _globals['_CONFIGSOURCEPATHDEPRECATION']._serialized_start=4830 + _globals['_CONFIGSOURCEPATHDEPRECATION']._serialized_end=4902 + _globals['_CONFIGSOURCEPATHDEPRECATIONMSG']._serialized_start=4905 + _globals['_CONFIGSOURCEPATHDEPRECATIONMSG']._serialized_end=5035 + _globals['_CONFIGDATAPATHDEPRECATION']._serialized_start=5037 + _globals['_CONFIGDATAPATHDEPRECATION']._serialized_end=5107 + _globals['_CONFIGDATAPATHDEPRECATIONMSG']._serialized_start=5109 + _globals['_CONFIGDATAPATHDEPRECATIONMSG']._serialized_end=5235 + _globals['_METRICATTRIBUTESRENAMED']._serialized_start=5237 + _globals['_METRICATTRIBUTESRENAMED']._serialized_end=5283 + _globals['_METRICATTRIBUTESRENAMEDMSG']._serialized_start=5285 + _globals['_METRICATTRIBUTESRENAMEDMSG']._serialized_end=5407 + _globals['_EXPOSURENAMEDEPRECATION']._serialized_start=5409 + _globals['_EXPOSURENAMEDEPRECATION']._serialized_end=5452 + _globals['_EXPOSURENAMEDEPRECATIONMSG']._serialized_start=5454 + _globals['_EXPOSURENAMEDEPRECATIONMSG']._serialized_end=5576 + _globals['_INTERNALDEPRECATION']._serialized_start=5578 + _globals['_INTERNALDEPRECATION']._serialized_end=5672 + _globals['_INTERNALDEPRECATIONMSG']._serialized_start=5674 + _globals['_INTERNALDEPRECATIONMSG']._serialized_end=5788 + _globals['_ENVIRONMENTVARIABLERENAMED']._serialized_start=5790 + _globals['_ENVIRONMENTVARIABLERENAMED']._serialized_end=5854 + _globals['_ENVIRONMENTVARIABLERENAMEDMSG']._serialized_start=5857 + _globals['_ENVIRONMENTVARIABLERENAMEDMSG']._serialized_end=5985 + _globals['_CONFIGLOGPATHDEPRECATION']._serialized_start=5987 + _globals['_CONFIGLOGPATHDEPRECATION']._serialized_end=6038 + _globals['_CONFIGLOGPATHDEPRECATIONMSG']._serialized_start=6040 + _globals['_CONFIGLOGPATHDEPRECATIONMSG']._serialized_end=6164 + _globals['_CONFIGTARGETPATHDEPRECATION']._serialized_start=6166 + _globals['_CONFIGTARGETPATHDEPRECATION']._serialized_end=6220 + _globals['_CONFIGTARGETPATHDEPRECATIONMSG']._serialized_start=6223 + _globals['_CONFIGTARGETPATHDEPRECATIONMSG']._serialized_end=6353 + _globals['_TESTSCONFIGDEPRECATION']._serialized_start=6355 + _globals['_TESTSCONFIGDEPRECATION']._serialized_end=6422 + _globals['_TESTSCONFIGDEPRECATIONMSG']._serialized_start=6424 + _globals['_TESTSCONFIGDEPRECATIONMSG']._serialized_end=6544 + _globals['_PROJECTFLAGSMOVEDDEPRECATION']._serialized_start=6546 + _globals['_PROJECTFLAGSMOVEDDEPRECATION']._serialized_end=6576 + _globals['_PROJECTFLAGSMOVEDDEPRECATIONMSG']._serialized_start=6579 + _globals['_PROJECTFLAGSMOVEDDEPRECATIONMSG']._serialized_end=6711 + _globals['_SPACESINRESOURCENAMEDEPRECATION']._serialized_start=6713 + _globals['_SPACESINRESOURCENAMEDEPRECATION']._serialized_end=6780 + _globals['_SPACESINRESOURCENAMEDEPRECATIONMSG']._serialized_start=6783 + _globals['_SPACESINRESOURCENAMEDEPRECATIONMSG']._serialized_end=6921 + _globals['_RESOURCENAMESWITHSPACESDEPRECATION']._serialized_start=6923 + _globals['_RESOURCENAMESWITHSPACESDEPRECATION']._serialized_end=7028 + _globals['_RESOURCENAMESWITHSPACESDEPRECATIONMSG']._serialized_start=7031 + _globals['_RESOURCENAMESWITHSPACESDEPRECATIONMSG']._serialized_end=7175 + _globals['_PACKAGEMATERIALIZATIONOVERRIDEDEPRECATION']._serialized_start=7177 + _globals['_PACKAGEMATERIALIZATIONOVERRIDEDEPRECATION']._serialized_end=7272 + _globals['_PACKAGEMATERIALIZATIONOVERRIDEDEPRECATIONMSG']._serialized_start=7275 + _globals['_PACKAGEMATERIALIZATIONOVERRIDEDEPRECATIONMSG']._serialized_end=7433 + _globals['_SOURCEFRESHNESSPROJECTHOOKSNOTRUN']._serialized_start=7435 + _globals['_SOURCEFRESHNESSPROJECTHOOKSNOTRUN']._serialized_end=7470 + _globals['_SOURCEFRESHNESSPROJECTHOOKSNOTRUNMSG']._serialized_start=7473 + _globals['_SOURCEFRESHNESSPROJECTHOOKSNOTRUNMSG']._serialized_end=7615 + _globals['_DEPRECATEDMODEL']._serialized_start=7617 + _globals['_DEPRECATEDMODEL']._serialized_end=7703 + _globals['_DEPRECATEDMODELMSG']._serialized_start=7705 + _globals['_DEPRECATEDMODELMSG']._serialized_end=7811 + _globals['_INPUTFILEDIFFERROR']._serialized_start=7813 + _globals['_INPUTFILEDIFFERROR']._serialized_end=7868 + _globals['_INPUTFILEDIFFERRORMSG']._serialized_start=7870 + _globals['_INPUTFILEDIFFERRORMSG']._serialized_end=7982 + _globals['_INVALIDVALUEFORFIELD']._serialized_start=7984 + _globals['_INVALIDVALUEFORFIELD']._serialized_end=8047 + _globals['_INVALIDVALUEFORFIELDMSG']._serialized_start=8049 + _globals['_INVALIDVALUEFORFIELDMSG']._serialized_end=8165 + _globals['_VALIDATIONWARNING']._serialized_start=8167 + _globals['_VALIDATIONWARNING']._serialized_end=8248 + _globals['_VALIDATIONWARNINGMSG']._serialized_start=8250 + _globals['_VALIDATIONWARNINGMSG']._serialized_end=8360 + _globals['_PARSEPERFINFOPATH']._serialized_start=8362 + _globals['_PARSEPERFINFOPATH']._serialized_end=8395 + _globals['_PARSEPERFINFOPATHMSG']._serialized_start=8397 + _globals['_PARSEPERFINFOPATHMSG']._serialized_end=8507 + _globals['_PARTIALPARSINGERRORPROCESSINGFILE']._serialized_start=8509 + _globals['_PARTIALPARSINGERRORPROCESSINGFILE']._serialized_end=8558 + _globals['_PARTIALPARSINGERRORPROCESSINGFILEMSG']._serialized_start=8561 + _globals['_PARTIALPARSINGERRORPROCESSINGFILEMSG']._serialized_end=8703 + _globals['_PARTIALPARSINGERROR']._serialized_start=8706 + _globals['_PARTIALPARSINGERROR']._serialized_end=8840 + _globals['_PARTIALPARSINGERROR_EXCINFOENTRY']._serialized_start=8794 + _globals['_PARTIALPARSINGERROR_EXCINFOENTRY']._serialized_end=8840 + _globals['_PARTIALPARSINGERRORMSG']._serialized_start=8842 + _globals['_PARTIALPARSINGERRORMSG']._serialized_end=8956 + _globals['_PARTIALPARSINGSKIPPARSING']._serialized_start=8958 + _globals['_PARTIALPARSINGSKIPPARSING']._serialized_end=8985 + _globals['_PARTIALPARSINGSKIPPARSINGMSG']._serialized_start=8987 + _globals['_PARTIALPARSINGSKIPPARSINGMSG']._serialized_end=9113 + _globals['_UNABLETOPARTIALPARSE']._serialized_start=9115 + _globals['_UNABLETOPARTIALPARSE']._serialized_end=9153 + _globals['_UNABLETOPARTIALPARSEMSG']._serialized_start=9155 + _globals['_UNABLETOPARTIALPARSEMSG']._serialized_end=9271 + _globals['_STATECHECKVARSHASH']._serialized_start=9273 + _globals['_STATECHECKVARSHASH']._serialized_end=9375 + _globals['_STATECHECKVARSHASHMSG']._serialized_start=9377 + _globals['_STATECHECKVARSHASHMSG']._serialized_end=9489 + _globals['_PARTIALPARSINGNOTENABLED']._serialized_start=9491 + _globals['_PARTIALPARSINGNOTENABLED']._serialized_end=9517 + _globals['_PARTIALPARSINGNOTENABLEDMSG']._serialized_start=9519 + _globals['_PARTIALPARSINGNOTENABLEDMSG']._serialized_end=9643 + _globals['_PARSEDFILELOADFAILED']._serialized_start=9645 + _globals['_PARSEDFILELOADFAILED']._serialized_end=9712 + _globals['_PARSEDFILELOADFAILEDMSG']._serialized_start=9714 + _globals['_PARSEDFILELOADFAILEDMSG']._serialized_end=9830 + _globals['_PARTIALPARSINGENABLED']._serialized_start=9832 + _globals['_PARTIALPARSINGENABLED']._serialized_end=9904 + _globals['_PARTIALPARSINGENABLEDMSG']._serialized_start=9906 + _globals['_PARTIALPARSINGENABLEDMSG']._serialized_end=10024 + _globals['_PARTIALPARSINGFILE']._serialized_start=10026 + _globals['_PARTIALPARSINGFILE']._serialized_end=10082 + _globals['_PARTIALPARSINGFILEMSG']._serialized_start=10084 + _globals['_PARTIALPARSINGFILEMSG']._serialized_end=10196 + _globals['_INVALIDDISABLEDTARGETINTESTNODE']._serialized_start=10199 + _globals['_INVALIDDISABLEDTARGETINTESTNODE']._serialized_end=10374 + _globals['_INVALIDDISABLEDTARGETINTESTNODEMSG']._serialized_start=10377 + _globals['_INVALIDDISABLEDTARGETINTESTNODEMSG']._serialized_end=10515 + _globals['_UNUSEDRESOURCECONFIGPATH']._serialized_start=10517 + _globals['_UNUSEDRESOURCECONFIGPATH']._serialized_end=10572 + _globals['_UNUSEDRESOURCECONFIGPATHMSG']._serialized_start=10574 + _globals['_UNUSEDRESOURCECONFIGPATHMSG']._serialized_end=10698 + _globals['_SEEDINCREASED']._serialized_start=10700 + _globals['_SEEDINCREASED']._serialized_end=10751 + _globals['_SEEDINCREASEDMSG']._serialized_start=10753 + _globals['_SEEDINCREASEDMSG']._serialized_end=10855 + _globals['_SEEDEXCEEDSLIMITSAMEPATH']._serialized_start=10857 + _globals['_SEEDEXCEEDSLIMITSAMEPATH']._serialized_end=10919 + _globals['_SEEDEXCEEDSLIMITSAMEPATHMSG']._serialized_start=10921 + _globals['_SEEDEXCEEDSLIMITSAMEPATHMSG']._serialized_end=11045 + _globals['_SEEDEXCEEDSLIMITANDPATHCHANGED']._serialized_start=11047 + _globals['_SEEDEXCEEDSLIMITANDPATHCHANGED']._serialized_end=11115 + _globals['_SEEDEXCEEDSLIMITANDPATHCHANGEDMSG']._serialized_start=11118 + _globals['_SEEDEXCEEDSLIMITANDPATHCHANGEDMSG']._serialized_end=11254 + _globals['_SEEDEXCEEDSLIMITCHECKSUMCHANGED']._serialized_start=11256 + _globals['_SEEDEXCEEDSLIMITCHECKSUMCHANGED']._serialized_end=11348 + _globals['_SEEDEXCEEDSLIMITCHECKSUMCHANGEDMSG']._serialized_start=11351 + _globals['_SEEDEXCEEDSLIMITCHECKSUMCHANGEDMSG']._serialized_end=11489 + _globals['_UNUSEDTABLES']._serialized_start=11491 + _globals['_UNUSEDTABLES']._serialized_end=11528 + _globals['_UNUSEDTABLESMSG']._serialized_start=11530 + _globals['_UNUSEDTABLESMSG']._serialized_end=11630 + _globals['_WRONGRESOURCESCHEMAFILE']._serialized_start=11633 + _globals['_WRONGRESOURCESCHEMAFILE']._serialized_end=11768 + _globals['_WRONGRESOURCESCHEMAFILEMSG']._serialized_start=11770 + _globals['_WRONGRESOURCESCHEMAFILEMSG']._serialized_end=11892 + _globals['_NONODEFORYAMLKEY']._serialized_start=11894 + _globals['_NONODEFORYAMLKEY']._serialized_end=11969 + _globals['_NONODEFORYAMLKEYMSG']._serialized_start=11971 + _globals['_NONODEFORYAMLKEYMSG']._serialized_end=12079 + _globals['_MACRONOTFOUNDFORPATCH']._serialized_start=12081 + _globals['_MACRONOTFOUNDFORPATCH']._serialized_end=12124 + _globals['_MACRONOTFOUNDFORPATCHMSG']._serialized_start=12126 + _globals['_MACRONOTFOUNDFORPATCHMSG']._serialized_end=12244 + _globals['_NODENOTFOUNDORDISABLED']._serialized_start=12247 + _globals['_NODENOTFOUNDORDISABLED']._serialized_end=12431 + _globals['_NODENOTFOUNDORDISABLEDMSG']._serialized_start=12433 + _globals['_NODENOTFOUNDORDISABLEDMSG']._serialized_end=12553 + _globals['_JINJALOGWARNING']._serialized_start=12555 + _globals['_JINJALOGWARNING']._serialized_end=12627 + _globals['_JINJALOGWARNINGMSG']._serialized_start=12629 + _globals['_JINJALOGWARNINGMSG']._serialized_end=12735 + _globals['_JINJALOGINFO']._serialized_start=12737 + _globals['_JINJALOGINFO']._serialized_end=12806 + _globals['_JINJALOGINFOMSG']._serialized_start=12808 + _globals['_JINJALOGINFOMSG']._serialized_end=12908 + _globals['_JINJALOGDEBUG']._serialized_start=12910 + _globals['_JINJALOGDEBUG']._serialized_end=12980 + _globals['_JINJALOGDEBUGMSG']._serialized_start=12982 + _globals['_JINJALOGDEBUGMSG']._serialized_end=13084 + _globals['_UNPINNEDREFNEWVERSIONAVAILABLE']._serialized_start=13087 + _globals['_UNPINNEDREFNEWVERSIONAVAILABLE']._serialized_end=13261 + _globals['_UNPINNEDREFNEWVERSIONAVAILABLEMSG']._serialized_start=13264 + _globals['_UNPINNEDREFNEWVERSIONAVAILABLEMSG']._serialized_end=13400 + _globals['_UPCOMINGREFERENCEDEPRECATION']._serialized_start=13403 + _globals['_UPCOMINGREFERENCEDEPRECATION']._serialized_end=13601 + _globals['_UPCOMINGREFERENCEDEPRECATIONMSG']._serialized_start=13604 + _globals['_UPCOMINGREFERENCEDEPRECATIONMSG']._serialized_end=13736 + _globals['_DEPRECATEDREFERENCE']._serialized_start=13739 + _globals['_DEPRECATEDREFERENCE']._serialized_end=13928 + _globals['_DEPRECATEDREFERENCEMSG']._serialized_start=13930 + _globals['_DEPRECATEDREFERENCEMSG']._serialized_end=14044 + _globals['_UNSUPPORTEDCONSTRAINTMATERIALIZATION']._serialized_start=14046 + _globals['_UNSUPPORTEDCONSTRAINTMATERIALIZATION']._serialized_end=14106 + _globals['_UNSUPPORTEDCONSTRAINTMATERIALIZATIONMSG']._serialized_start=14109 + _globals['_UNSUPPORTEDCONSTRAINTMATERIALIZATIONMSG']._serialized_end=14257 + _globals['_PARSEINLINENODEERROR']._serialized_start=14259 + _globals['_PARSEINLINENODEERROR']._serialized_end=14336 + _globals['_PARSEINLINENODEERRORMSG']._serialized_start=14338 + _globals['_PARSEINLINENODEERRORMSG']._serialized_end=14454 + _globals['_SEMANTICVALIDATIONFAILURE']._serialized_start=14456 + _globals['_SEMANTICVALIDATIONFAILURE']._serialized_end=14496 + _globals['_SEMANTICVALIDATIONFAILUREMSG']._serialized_start=14498 + _globals['_SEMANTICVALIDATIONFAILUREMSG']._serialized_end=14624 + _globals['_UNVERSIONEDBREAKINGCHANGE']._serialized_start=14627 + _globals['_UNVERSIONEDBREAKINGCHANGE']._serialized_end=15021 + _globals['_UNVERSIONEDBREAKINGCHANGEMSG']._serialized_start=15023 + _globals['_UNVERSIONEDBREAKINGCHANGEMSG']._serialized_end=15149 + _globals['_WARNSTATETARGETEQUAL']._serialized_start=15151 + _globals['_WARNSTATETARGETEQUAL']._serialized_end=15193 + _globals['_WARNSTATETARGETEQUALMSG']._serialized_start=15195 + _globals['_WARNSTATETARGETEQUALMSG']._serialized_end=15311 + _globals['_FRESHNESSCONFIGPROBLEM']._serialized_start=15313 + _globals['_FRESHNESSCONFIGPROBLEM']._serialized_end=15350 + _globals['_FRESHNESSCONFIGPROBLEMMSG']._serialized_start=15352 + _globals['_FRESHNESSCONFIGPROBLEMMSG']._serialized_end=15472 + _globals['_GITSPARSECHECKOUTSUBDIRECTORY']._serialized_start=15474 + _globals['_GITSPARSECHECKOUTSUBDIRECTORY']._serialized_end=15521 + _globals['_GITSPARSECHECKOUTSUBDIRECTORYMSG']._serialized_start=15524 + _globals['_GITSPARSECHECKOUTSUBDIRECTORYMSG']._serialized_end=15658 + _globals['_GITPROGRESSCHECKOUTREVISION']._serialized_start=15660 + _globals['_GITPROGRESSCHECKOUTREVISION']._serialized_end=15707 + _globals['_GITPROGRESSCHECKOUTREVISIONMSG']._serialized_start=15710 + _globals['_GITPROGRESSCHECKOUTREVISIONMSG']._serialized_end=15840 + _globals['_GITPROGRESSUPDATINGEXISTINGDEPENDENCY']._serialized_start=15842 + _globals['_GITPROGRESSUPDATINGEXISTINGDEPENDENCY']._serialized_end=15894 + _globals['_GITPROGRESSUPDATINGEXISTINGDEPENDENCYMSG']._serialized_start=15897 + _globals['_GITPROGRESSUPDATINGEXISTINGDEPENDENCYMSG']._serialized_end=16047 + _globals['_GITPROGRESSPULLINGNEWDEPENDENCY']._serialized_start=16049 + _globals['_GITPROGRESSPULLINGNEWDEPENDENCY']._serialized_end=16095 + _globals['_GITPROGRESSPULLINGNEWDEPENDENCYMSG']._serialized_start=16098 + _globals['_GITPROGRESSPULLINGNEWDEPENDENCYMSG']._serialized_end=16236 + _globals['_GITNOTHINGTODO']._serialized_start=16238 + _globals['_GITNOTHINGTODO']._serialized_end=16267 + _globals['_GITNOTHINGTODOMSG']._serialized_start=16269 + _globals['_GITNOTHINGTODOMSG']._serialized_end=16373 + _globals['_GITPROGRESSUPDATEDCHECKOUTRANGE']._serialized_start=16375 + _globals['_GITPROGRESSUPDATEDCHECKOUTRANGE']._serialized_end=16444 + _globals['_GITPROGRESSUPDATEDCHECKOUTRANGEMSG']._serialized_start=16447 + _globals['_GITPROGRESSUPDATEDCHECKOUTRANGEMSG']._serialized_end=16585 + _globals['_GITPROGRESSCHECKEDOUTAT']._serialized_start=16587 + _globals['_GITPROGRESSCHECKEDOUTAT']._serialized_end=16629 + _globals['_GITPROGRESSCHECKEDOUTATMSG']._serialized_start=16631 + _globals['_GITPROGRESSCHECKEDOUTATMSG']._serialized_end=16753 + _globals['_REGISTRYPROGRESSGETREQUEST']._serialized_start=16755 + _globals['_REGISTRYPROGRESSGETREQUEST']._serialized_end=16796 + _globals['_REGISTRYPROGRESSGETREQUESTMSG']._serialized_start=16799 + _globals['_REGISTRYPROGRESSGETREQUESTMSG']._serialized_end=16927 + _globals['_REGISTRYPROGRESSGETRESPONSE']._serialized_start=16929 + _globals['_REGISTRYPROGRESSGETRESPONSE']._serialized_end=16990 + _globals['_REGISTRYPROGRESSGETRESPONSEMSG']._serialized_start=16993 + _globals['_REGISTRYPROGRESSGETRESPONSEMSG']._serialized_end=17123 + _globals['_SELECTORREPORTINVALIDSELECTOR']._serialized_start=17125 + _globals['_SELECTORREPORTINVALIDSELECTOR']._serialized_end=17220 + _globals['_SELECTORREPORTINVALIDSELECTORMSG']._serialized_start=17223 + _globals['_SELECTORREPORTINVALIDSELECTORMSG']._serialized_end=17357 + _globals['_DEPSNOPACKAGESFOUND']._serialized_start=17359 + _globals['_DEPSNOPACKAGESFOUND']._serialized_end=17380 + _globals['_DEPSNOPACKAGESFOUNDMSG']._serialized_start=17382 + _globals['_DEPSNOPACKAGESFOUNDMSG']._serialized_end=17496 + _globals['_DEPSSTARTPACKAGEINSTALL']._serialized_start=17498 + _globals['_DEPSSTARTPACKAGEINSTALL']._serialized_end=17545 + _globals['_DEPSSTARTPACKAGEINSTALLMSG']._serialized_start=17547 + _globals['_DEPSSTARTPACKAGEINSTALLMSG']._serialized_end=17669 + _globals['_DEPSINSTALLINFO']._serialized_start=17671 + _globals['_DEPSINSTALLINFO']._serialized_end=17710 + _globals['_DEPSINSTALLINFOMSG']._serialized_start=17712 + _globals['_DEPSINSTALLINFOMSG']._serialized_end=17818 + _globals['_DEPSUPDATEAVAILABLE']._serialized_start=17820 + _globals['_DEPSUPDATEAVAILABLE']._serialized_end=17865 + _globals['_DEPSUPDATEAVAILABLEMSG']._serialized_start=17867 + _globals['_DEPSUPDATEAVAILABLEMSG']._serialized_end=17981 + _globals['_DEPSUPTODATE']._serialized_start=17983 + _globals['_DEPSUPTODATE']._serialized_end=17997 + _globals['_DEPSUPTODATEMSG']._serialized_start=17999 + _globals['_DEPSUPTODATEMSG']._serialized_end=18099 + _globals['_DEPSLISTSUBDIRECTORY']._serialized_start=18101 + _globals['_DEPSLISTSUBDIRECTORY']._serialized_end=18145 + _globals['_DEPSLISTSUBDIRECTORYMSG']._serialized_start=18147 + _globals['_DEPSLISTSUBDIRECTORYMSG']._serialized_end=18263 + _globals['_DEPSNOTIFYUPDATESAVAILABLE']._serialized_start=18265 + _globals['_DEPSNOTIFYUPDATESAVAILABLE']._serialized_end=18311 + _globals['_DEPSNOTIFYUPDATESAVAILABLEMSG']._serialized_start=18314 + _globals['_DEPSNOTIFYUPDATESAVAILABLEMSG']._serialized_end=18442 + _globals['_REGISTRYINDEXPROGRESSGETREQUEST']._serialized_start=18444 + _globals['_REGISTRYINDEXPROGRESSGETREQUEST']._serialized_end=18490 + _globals['_REGISTRYINDEXPROGRESSGETREQUESTMSG']._serialized_start=18493 + _globals['_REGISTRYINDEXPROGRESSGETREQUESTMSG']._serialized_end=18631 + _globals['_REGISTRYINDEXPROGRESSGETRESPONSE']._serialized_start=18633 + _globals['_REGISTRYINDEXPROGRESSGETRESPONSE']._serialized_end=18699 + _globals['_REGISTRYINDEXPROGRESSGETRESPONSEMSG']._serialized_start=18702 + _globals['_REGISTRYINDEXPROGRESSGETRESPONSEMSG']._serialized_end=18842 + _globals['_REGISTRYRESPONSEUNEXPECTEDTYPE']._serialized_start=18844 + _globals['_REGISTRYRESPONSEUNEXPECTEDTYPE']._serialized_end=18894 + _globals['_REGISTRYRESPONSEUNEXPECTEDTYPEMSG']._serialized_start=18897 + _globals['_REGISTRYRESPONSEUNEXPECTEDTYPEMSG']._serialized_end=19033 + _globals['_REGISTRYRESPONSEMISSINGTOPKEYS']._serialized_start=19035 + _globals['_REGISTRYRESPONSEMISSINGTOPKEYS']._serialized_end=19085 + _globals['_REGISTRYRESPONSEMISSINGTOPKEYSMSG']._serialized_start=19088 + _globals['_REGISTRYRESPONSEMISSINGTOPKEYSMSG']._serialized_end=19224 + _globals['_REGISTRYRESPONSEMISSINGNESTEDKEYS']._serialized_start=19226 + _globals['_REGISTRYRESPONSEMISSINGNESTEDKEYS']._serialized_end=19279 + _globals['_REGISTRYRESPONSEMISSINGNESTEDKEYSMSG']._serialized_start=19282 + _globals['_REGISTRYRESPONSEMISSINGNESTEDKEYSMSG']._serialized_end=19424 + _globals['_REGISTRYRESPONSEEXTRANESTEDKEYS']._serialized_start=19426 + _globals['_REGISTRYRESPONSEEXTRANESTEDKEYS']._serialized_end=19477 + _globals['_REGISTRYRESPONSEEXTRANESTEDKEYSMSG']._serialized_start=19480 + _globals['_REGISTRYRESPONSEEXTRANESTEDKEYSMSG']._serialized_end=19618 + _globals['_DEPSSETDOWNLOADDIRECTORY']._serialized_start=19620 + _globals['_DEPSSETDOWNLOADDIRECTORY']._serialized_end=19660 + _globals['_DEPSSETDOWNLOADDIRECTORYMSG']._serialized_start=19662 + _globals['_DEPSSETDOWNLOADDIRECTORYMSG']._serialized_end=19786 + _globals['_DEPSUNPINNED']._serialized_start=19788 + _globals['_DEPSUNPINNED']._serialized_end=19833 + _globals['_DEPSUNPINNEDMSG']._serialized_start=19835 + _globals['_DEPSUNPINNEDMSG']._serialized_end=19935 + _globals['_NONODESFORSELECTIONCRITERIA']._serialized_start=19937 + _globals['_NONODESFORSELECTIONCRITERIA']._serialized_end=19984 + _globals['_NONODESFORSELECTIONCRITERIAMSG']._serialized_start=19987 + _globals['_NONODESFORSELECTIONCRITERIAMSG']._serialized_end=20117 + _globals['_DEPSLOCKUPDATING']._serialized_start=20119 + _globals['_DEPSLOCKUPDATING']._serialized_end=20160 + _globals['_DEPSLOCKUPDATINGMSG']._serialized_start=20162 + _globals['_DEPSLOCKUPDATINGMSG']._serialized_end=20270 + _globals['_DEPSADDPACKAGE']._serialized_start=20272 + _globals['_DEPSADDPACKAGE']._serialized_end=20354 + _globals['_DEPSADDPACKAGEMSG']._serialized_start=20356 + _globals['_DEPSADDPACKAGEMSG']._serialized_end=20460 + _globals['_DEPSFOUNDDUPLICATEPACKAGE']._serialized_start=20463 + _globals['_DEPSFOUNDDUPLICATEPACKAGE']._serialized_end=20630 + _globals['_DEPSFOUNDDUPLICATEPACKAGE_REMOVEDPACKAGEENTRY']._serialized_start=20577 + _globals['_DEPSFOUNDDUPLICATEPACKAGE_REMOVEDPACKAGEENTRY']._serialized_end=20630 + _globals['_DEPSFOUNDDUPLICATEPACKAGEMSG']._serialized_start=20632 + _globals['_DEPSFOUNDDUPLICATEPACKAGEMSG']._serialized_end=20758 + _globals['_DEPSVERSIONMISSING']._serialized_start=20760 + _globals['_DEPSVERSIONMISSING']._serialized_end=20796 + _globals['_DEPSVERSIONMISSINGMSG']._serialized_start=20798 + _globals['_DEPSVERSIONMISSINGMSG']._serialized_end=20910 + _globals['_DEPSSCRUBBEDPACKAGENAME']._serialized_start=20912 + _globals['_DEPSSCRUBBEDPACKAGENAME']._serialized_end=20959 + _globals['_DEPSSCRUBBEDPACKAGENAMEMSG']._serialized_start=20961 + _globals['_DEPSSCRUBBEDPACKAGENAMEMSG']._serialized_end=21083 + _globals['_RUNNINGOPERATIONCAUGHTERROR']._serialized_start=21085 + _globals['_RUNNINGOPERATIONCAUGHTERROR']._serialized_end=21127 + _globals['_RUNNINGOPERATIONCAUGHTERRORMSG']._serialized_start=21130 + _globals['_RUNNINGOPERATIONCAUGHTERRORMSG']._serialized_end=21260 + _globals['_COMPILECOMPLETE']._serialized_start=21262 + _globals['_COMPILECOMPLETE']._serialized_end=21279 + _globals['_COMPILECOMPLETEMSG']._serialized_start=21281 + _globals['_COMPILECOMPLETEMSG']._serialized_end=21387 + _globals['_FRESHNESSCHECKCOMPLETE']._serialized_start=21389 + _globals['_FRESHNESSCHECKCOMPLETE']._serialized_end=21413 + _globals['_FRESHNESSCHECKCOMPLETEMSG']._serialized_start=21415 + _globals['_FRESHNESSCHECKCOMPLETEMSG']._serialized_end=21535 + _globals['_SEEDHEADER']._serialized_start=21537 + _globals['_SEEDHEADER']._serialized_end=21565 + _globals['_SEEDHEADERMSG']._serialized_start=21567 + _globals['_SEEDHEADERMSG']._serialized_end=21663 + _globals['_SQLRUNNEREXCEPTION']._serialized_start=21665 + _globals['_SQLRUNNEREXCEPTION']._serialized_end=21758 + _globals['_SQLRUNNEREXCEPTIONMSG']._serialized_start=21760 + _globals['_SQLRUNNEREXCEPTIONMSG']._serialized_end=21872 + _globals['_LOGTESTRESULT']._serialized_start=21875 + _globals['_LOGTESTRESULT']._serialized_end=22043 + _globals['_LOGTESTRESULTMSG']._serialized_start=22045 + _globals['_LOGTESTRESULTMSG']._serialized_end=22147 + _globals['_LOGSTARTLINE']._serialized_start=22149 + _globals['_LOGSTARTLINE']._serialized_end=22256 + _globals['_LOGSTARTLINEMSG']._serialized_start=22258 + _globals['_LOGSTARTLINEMSG']._serialized_end=22358 + _globals['_LOGMODELRESULT']._serialized_start=22361 + _globals['_LOGMODELRESULT']._serialized_end=22510 + _globals['_LOGMODELRESULTMSG']._serialized_start=22512 + _globals['_LOGMODELRESULTMSG']._serialized_end=22616 + _globals['_LOGSNAPSHOTRESULT']._serialized_start=22619 + _globals['_LOGSNAPSHOTRESULT']._serialized_end=22893 + _globals['_LOGSNAPSHOTRESULT_CFGENTRY']._serialized_start=22851 + _globals['_LOGSNAPSHOTRESULT_CFGENTRY']._serialized_end=22893 + _globals['_LOGSNAPSHOTRESULTMSG']._serialized_start=22895 + _globals['_LOGSNAPSHOTRESULTMSG']._serialized_end=23005 + _globals['_LOGSEEDRESULT']._serialized_start=23008 + _globals['_LOGSEEDRESULT']._serialized_end=23193 + _globals['_LOGSEEDRESULTMSG']._serialized_start=23195 + _globals['_LOGSEEDRESULTMSG']._serialized_end=23297 + _globals['_LOGFRESHNESSRESULT']._serialized_start=23300 + _globals['_LOGFRESHNESSRESULT']._serialized_end=23473 + _globals['_LOGFRESHNESSRESULTMSG']._serialized_start=23475 + _globals['_LOGFRESHNESSRESULTMSG']._serialized_end=23587 + _globals['_LOGNODENOOPRESULT']._serialized_start=23590 + _globals['_LOGNODENOOPRESULT']._serialized_end=23742 + _globals['_LOGNODENOOPRESULTMSG']._serialized_start=23744 + _globals['_LOGNODENOOPRESULTMSG']._serialized_end=23854 + _globals['_LOGCANCELLINE']._serialized_start=23856 + _globals['_LOGCANCELLINE']._serialized_end=23890 + _globals['_LOGCANCELLINEMSG']._serialized_start=23892 + _globals['_LOGCANCELLINEMSG']._serialized_end=23994 + _globals['_DEFAULTSELECTOR']._serialized_start=23996 + _globals['_DEFAULTSELECTOR']._serialized_end=24027 + _globals['_DEFAULTSELECTORMSG']._serialized_start=24029 + _globals['_DEFAULTSELECTORMSG']._serialized_end=24135 + _globals['_NODESTART']._serialized_start=24137 + _globals['_NODESTART']._serialized_end=24190 + _globals['_NODESTARTMSG']._serialized_start=24192 + _globals['_NODESTARTMSG']._serialized_end=24286 + _globals['_NODEFINISHED']._serialized_start=24288 + _globals['_NODEFINISHED']._serialized_end=24391 + _globals['_NODEFINISHEDMSG']._serialized_start=24393 + _globals['_NODEFINISHEDMSG']._serialized_end=24493 + _globals['_QUERYCANCELATIONUNSUPPORTED']._serialized_start=24495 + _globals['_QUERYCANCELATIONUNSUPPORTED']._serialized_end=24538 + _globals['_QUERYCANCELATIONUNSUPPORTEDMSG']._serialized_start=24541 + _globals['_QUERYCANCELATIONUNSUPPORTEDMSG']._serialized_end=24671 + _globals['_CONCURRENCYLINE']._serialized_start=24673 + _globals['_CONCURRENCYLINE']._serialized_end=24752 + _globals['_CONCURRENCYLINEMSG']._serialized_start=24754 + _globals['_CONCURRENCYLINEMSG']._serialized_end=24860 + _globals['_WRITINGINJECTEDSQLFORNODE']._serialized_start=24862 + _globals['_WRITINGINJECTEDSQLFORNODE']._serialized_end=24931 + _globals['_WRITINGINJECTEDSQLFORNODEMSG']._serialized_start=24933 + _globals['_WRITINGINJECTEDSQLFORNODEMSG']._serialized_end=25059 + _globals['_NODECOMPILING']._serialized_start=25061 + _globals['_NODECOMPILING']._serialized_end=25118 + _globals['_NODECOMPILINGMSG']._serialized_start=25120 + _globals['_NODECOMPILINGMSG']._serialized_end=25222 + _globals['_NODEEXECUTING']._serialized_start=25224 + _globals['_NODEEXECUTING']._serialized_end=25281 + _globals['_NODEEXECUTINGMSG']._serialized_start=25283 + _globals['_NODEEXECUTINGMSG']._serialized_end=25385 + _globals['_LOGHOOKSTARTLINE']._serialized_start=25387 + _globals['_LOGHOOKSTARTLINE']._serialized_end=25496 + _globals['_LOGHOOKSTARTLINEMSG']._serialized_start=25498 + _globals['_LOGHOOKSTARTLINEMSG']._serialized_end=25606 + _globals['_LOGHOOKENDLINE']._serialized_start=25609 + _globals['_LOGHOOKENDLINE']._serialized_end=25756 + _globals['_LOGHOOKENDLINEMSG']._serialized_start=25758 + _globals['_LOGHOOKENDLINEMSG']._serialized_end=25862 + _globals['_SKIPPINGDETAILS']._serialized_start=25865 + _globals['_SKIPPINGDETAILS']._serialized_end=26012 + _globals['_SKIPPINGDETAILSMSG']._serialized_start=26014 + _globals['_SKIPPINGDETAILSMSG']._serialized_end=26120 + _globals['_NOTHINGTODO']._serialized_start=26122 + _globals['_NOTHINGTODO']._serialized_end=26135 + _globals['_NOTHINGTODOMSG']._serialized_start=26137 + _globals['_NOTHINGTODOMSG']._serialized_end=26235 + _globals['_RUNNINGOPERATIONUNCAUGHTERROR']._serialized_start=26237 + _globals['_RUNNINGOPERATIONUNCAUGHTERROR']._serialized_end=26281 + _globals['_RUNNINGOPERATIONUNCAUGHTERRORMSG']._serialized_start=26284 + _globals['_RUNNINGOPERATIONUNCAUGHTERRORMSG']._serialized_end=26418 + _globals['_ENDRUNRESULT']._serialized_start=26421 + _globals['_ENDRUNRESULT']._serialized_end=26568 + _globals['_ENDRUNRESULTMSG']._serialized_start=26570 + _globals['_ENDRUNRESULTMSG']._serialized_end=26670 + _globals['_NONODESSELECTED']._serialized_start=26672 + _globals['_NONODESSELECTED']._serialized_end=26689 + _globals['_NONODESSELECTEDMSG']._serialized_start=26691 + _globals['_NONODESSELECTEDMSG']._serialized_end=26797 + _globals['_COMMANDCOMPLETED']._serialized_start=26799 + _globals['_COMMANDCOMPLETED']._serialized_end=26918 + _globals['_COMMANDCOMPLETEDMSG']._serialized_start=26920 + _globals['_COMMANDCOMPLETEDMSG']._serialized_end=27028 + _globals['_SHOWNODE']._serialized_start=27030 + _globals['_SHOWNODE']._serialized_end=27137 + _globals['_SHOWNODEMSG']._serialized_start=27139 + _globals['_SHOWNODEMSG']._serialized_end=27231 + _globals['_COMPILEDNODE']._serialized_start=27233 + _globals['_COMPILEDNODE']._serialized_end=27345 + _globals['_COMPILEDNODEMSG']._serialized_start=27347 + _globals['_COMPILEDNODEMSG']._serialized_end=27447 + _globals['_SNAPSHOTTIMESTAMPWARNING']._serialized_start=27449 + _globals['_SNAPSHOTTIMESTAMPWARNING']._serialized_end=27538 + _globals['_SNAPSHOTTIMESTAMPWARNINGMSG']._serialized_start=27540 + _globals['_SNAPSHOTTIMESTAMPWARNINGMSG']._serialized_end=27664 + _globals['_CATCHABLEEXCEPTIONONRUN']._serialized_start=27666 + _globals['_CATCHABLEEXCEPTIONONRUN']._serialized_end=27764 + _globals['_CATCHABLEEXCEPTIONONRUNMSG']._serialized_start=27766 + _globals['_CATCHABLEEXCEPTIONONRUNMSG']._serialized_end=27888 + _globals['_INTERNALERRORONRUN']._serialized_start=27890 + _globals['_INTERNALERRORONRUN']._serialized_end=27985 + _globals['_INTERNALERRORONRUNMSG']._serialized_start=27987 + _globals['_INTERNALERRORONRUNMSG']._serialized_end=28099 + _globals['_GENERICEXCEPTIONONRUN']._serialized_start=28101 + _globals['_GENERICEXCEPTIONONRUN']._serialized_end=28218 + _globals['_GENERICEXCEPTIONONRUNMSG']._serialized_start=28220 + _globals['_GENERICEXCEPTIONONRUNMSG']._serialized_end=28338 + _globals['_NODECONNECTIONRELEASEERROR']._serialized_start=28340 + _globals['_NODECONNECTIONRELEASEERROR']._serialized_end=28418 + _globals['_NODECONNECTIONRELEASEERRORMSG']._serialized_start=28421 + _globals['_NODECONNECTIONRELEASEERRORMSG']._serialized_end=28549 + _globals['_FOUNDSTATS']._serialized_start=28551 + _globals['_FOUNDSTATS']._serialized_end=28582 + _globals['_FOUNDSTATSMSG']._serialized_start=28584 + _globals['_FOUNDSTATSMSG']._serialized_end=28680 + _globals['_MAINKEYBOARDINTERRUPT']._serialized_start=28682 + _globals['_MAINKEYBOARDINTERRUPT']._serialized_end=28705 + _globals['_MAINKEYBOARDINTERRUPTMSG']._serialized_start=28707 + _globals['_MAINKEYBOARDINTERRUPTMSG']._serialized_end=28825 + _globals['_MAINENCOUNTEREDERROR']._serialized_start=28827 + _globals['_MAINENCOUNTEREDERROR']._serialized_end=28862 + _globals['_MAINENCOUNTEREDERRORMSG']._serialized_start=28864 + _globals['_MAINENCOUNTEREDERRORMSG']._serialized_end=28980 + _globals['_MAINSTACKTRACE']._serialized_start=28982 + _globals['_MAINSTACKTRACE']._serialized_end=29019 + _globals['_MAINSTACKTRACEMSG']._serialized_start=29021 + _globals['_MAINSTACKTRACEMSG']._serialized_end=29125 + _globals['_TIMINGINFOCOLLECTED']._serialized_start=29127 + _globals['_TIMINGINFOCOLLECTED']._serialized_end=29239 + _globals['_TIMINGINFOCOLLECTEDMSG']._serialized_start=29241 + _globals['_TIMINGINFOCOLLECTEDMSG']._serialized_end=29355 + _globals['_LOGDEBUGSTACKTRACE']._serialized_start=29357 + _globals['_LOGDEBUGSTACKTRACE']._serialized_end=29395 + _globals['_LOGDEBUGSTACKTRACEMSG']._serialized_start=29397 + _globals['_LOGDEBUGSTACKTRACEMSG']._serialized_end=29509 + _globals['_CHECKCLEANPATH']._serialized_start=29511 + _globals['_CHECKCLEANPATH']._serialized_end=29541 + _globals['_CHECKCLEANPATHMSG']._serialized_start=29543 + _globals['_CHECKCLEANPATHMSG']._serialized_end=29647 + _globals['_CONFIRMCLEANPATH']._serialized_start=29649 + _globals['_CONFIRMCLEANPATH']._serialized_end=29681 + _globals['_CONFIRMCLEANPATHMSG']._serialized_start=29683 + _globals['_CONFIRMCLEANPATHMSG']._serialized_end=29791 + _globals['_PROTECTEDCLEANPATH']._serialized_start=29793 + _globals['_PROTECTEDCLEANPATH']._serialized_end=29827 + _globals['_PROTECTEDCLEANPATHMSG']._serialized_start=29829 + _globals['_PROTECTEDCLEANPATHMSG']._serialized_end=29941 + _globals['_FINISHEDCLEANPATHS']._serialized_start=29943 + _globals['_FINISHEDCLEANPATHS']._serialized_end=29963 + _globals['_FINISHEDCLEANPATHSMSG']._serialized_start=29965 + _globals['_FINISHEDCLEANPATHSMSG']._serialized_end=30077 + _globals['_OPENCOMMAND']._serialized_start=30079 + _globals['_OPENCOMMAND']._serialized_end=30132 + _globals['_OPENCOMMANDMSG']._serialized_start=30134 + _globals['_OPENCOMMANDMSG']._serialized_end=30232 + _globals['_SERVINGDOCSPORT']._serialized_start=30234 + _globals['_SERVINGDOCSPORT']._serialized_end=30282 + _globals['_SERVINGDOCSPORTMSG']._serialized_start=30284 + _globals['_SERVINGDOCSPORTMSG']._serialized_end=30390 + _globals['_SERVINGDOCSACCESSINFO']._serialized_start=30392 + _globals['_SERVINGDOCSACCESSINFO']._serialized_end=30429 + _globals['_SERVINGDOCSACCESSINFOMSG']._serialized_start=30431 + _globals['_SERVINGDOCSACCESSINFOMSG']._serialized_end=30549 + _globals['_SERVINGDOCSEXITINFO']._serialized_start=30551 + _globals['_SERVINGDOCSEXITINFO']._serialized_end=30572 + _globals['_SERVINGDOCSEXITINFOMSG']._serialized_start=30574 + _globals['_SERVINGDOCSEXITINFOMSG']._serialized_end=30688 + _globals['_GROUP']._serialized_start=30691 + _globals['_GROUP']._serialized_end=30826 + _globals['_GROUP_OWNERENTRY']._serialized_start=30782 + _globals['_GROUP_OWNERENTRY']._serialized_end=30826 + _globals['_RUNRESULTWARNING']._serialized_start=30829 + _globals['_RUNRESULTWARNING']._serialized_end=30980 + _globals['_RUNRESULTWARNINGMSG']._serialized_start=30982 + _globals['_RUNRESULTWARNINGMSG']._serialized_end=31090 + _globals['_RUNRESULTFAILURE']._serialized_start=31093 + _globals['_RUNRESULTFAILURE']._serialized_end=31244 + _globals['_RUNRESULTFAILUREMSG']._serialized_start=31246 + _globals['_RUNRESULTFAILUREMSG']._serialized_end=31354 + _globals['_STATSLINE']._serialized_start=31356 + _globals['_STATSLINE']._serialized_end=31463 + _globals['_STATSLINE_STATSENTRY']._serialized_start=31419 + _globals['_STATSLINE_STATSENTRY']._serialized_end=31463 + _globals['_STATSLINEMSG']._serialized_start=31465 + _globals['_STATSLINEMSG']._serialized_end=31559 + _globals['_RUNRESULTERROR']._serialized_start=31561 + _globals['_RUNRESULTERROR']._serialized_end=31667 + _globals['_RUNRESULTERRORMSG']._serialized_start=31669 + _globals['_RUNRESULTERRORMSG']._serialized_end=31773 + _globals['_RUNRESULTERRORNOMESSAGE']._serialized_start=31775 + _globals['_RUNRESULTERRORNOMESSAGE']._serialized_end=31858 + _globals['_RUNRESULTERRORNOMESSAGEMSG']._serialized_start=31860 + _globals['_RUNRESULTERRORNOMESSAGEMSG']._serialized_end=31982 + _globals['_SQLCOMPILEDPATH']._serialized_start=31984 + _globals['_SQLCOMPILEDPATH']._serialized_end=32057 + _globals['_SQLCOMPILEDPATHMSG']._serialized_start=32059 + _globals['_SQLCOMPILEDPATHMSG']._serialized_end=32165 + _globals['_CHECKNODETESTFAILURE']._serialized_start=32167 + _globals['_CHECKNODETESTFAILURE']._serialized_end=32254 + _globals['_CHECKNODETESTFAILUREMSG']._serialized_start=32256 + _globals['_CHECKNODETESTFAILUREMSG']._serialized_end=32372 + _globals['_ENDOFRUNSUMMARY']._serialized_start=32374 + _globals['_ENDOFRUNSUMMARY']._serialized_end=32461 + _globals['_ENDOFRUNSUMMARYMSG']._serialized_start=32463 + _globals['_ENDOFRUNSUMMARYMSG']._serialized_end=32569 + _globals['_MARKSKIPPEDCHILDREN']._serialized_start=32571 + _globals['_MARKSKIPPEDCHILDREN']._serialized_end=32674 + _globals['_MARKSKIPPEDCHILDRENMSG']._serialized_start=32676 + _globals['_MARKSKIPPEDCHILDRENMSG']._serialized_end=32790 + _globals['_LOGSKIPBECAUSEERROR']._serialized_start=32792 + _globals['_LOGSKIPBECAUSEERROR']._serialized_end=32893 + _globals['_LOGSKIPBECAUSEERRORMSG']._serialized_start=32895 + _globals['_LOGSKIPBECAUSEERRORMSG']._serialized_end=33009 + _globals['_ENSUREGITINSTALLED']._serialized_start=33011 + _globals['_ENSUREGITINSTALLED']._serialized_end=33031 + _globals['_ENSUREGITINSTALLEDMSG']._serialized_start=33033 + _globals['_ENSUREGITINSTALLEDMSG']._serialized_end=33145 + _globals['_DEPSCREATINGLOCALSYMLINK']._serialized_start=33147 + _globals['_DEPSCREATINGLOCALSYMLINK']._serialized_end=33173 + _globals['_DEPSCREATINGLOCALSYMLINKMSG']._serialized_start=33175 + _globals['_DEPSCREATINGLOCALSYMLINKMSG']._serialized_end=33299 + _globals['_DEPSSYMLINKNOTAVAILABLE']._serialized_start=33301 + _globals['_DEPSSYMLINKNOTAVAILABLE']._serialized_end=33326 + _globals['_DEPSSYMLINKNOTAVAILABLEMSG']._serialized_start=33328 + _globals['_DEPSSYMLINKNOTAVAILABLEMSG']._serialized_end=33450 + _globals['_DISABLETRACKING']._serialized_start=33452 + _globals['_DISABLETRACKING']._serialized_end=33469 + _globals['_DISABLETRACKINGMSG']._serialized_start=33471 + _globals['_DISABLETRACKINGMSG']._serialized_end=33577 + _globals['_SENDINGEVENT']._serialized_start=33579 + _globals['_SENDINGEVENT']._serialized_end=33609 + _globals['_SENDINGEVENTMSG']._serialized_start=33611 + _globals['_SENDINGEVENTMSG']._serialized_end=33711 + _globals['_SENDEVENTFAILURE']._serialized_start=33713 + _globals['_SENDEVENTFAILURE']._serialized_end=33731 + _globals['_SENDEVENTFAILUREMSG']._serialized_start=33733 + _globals['_SENDEVENTFAILUREMSG']._serialized_end=33841 + _globals['_FLUSHEVENTS']._serialized_start=33843 + _globals['_FLUSHEVENTS']._serialized_end=33856 + _globals['_FLUSHEVENTSMSG']._serialized_start=33858 + _globals['_FLUSHEVENTSMSG']._serialized_end=33956 + _globals['_FLUSHEVENTSFAILURE']._serialized_start=33958 + _globals['_FLUSHEVENTSFAILURE']._serialized_end=33978 + _globals['_FLUSHEVENTSFAILUREMSG']._serialized_start=33980 + _globals['_FLUSHEVENTSFAILUREMSG']._serialized_end=34092 + _globals['_TRACKINGINITIALIZEFAILURE']._serialized_start=34094 + _globals['_TRACKINGINITIALIZEFAILURE']._serialized_end=34139 + _globals['_TRACKINGINITIALIZEFAILUREMSG']._serialized_start=34141 + _globals['_TRACKINGINITIALIZEFAILUREMSG']._serialized_end=34267 + _globals['_RUNRESULTWARNINGMESSAGE']._serialized_start=34269 + _globals['_RUNRESULTWARNINGMESSAGE']._serialized_end=34349 + _globals['_RUNRESULTWARNINGMESSAGEMSG']._serialized_start=34351 + _globals['_RUNRESULTWARNINGMESSAGEMSG']._serialized_end=34473 + _globals['_DEBUGCMDOUT']._serialized_start=34475 + _globals['_DEBUGCMDOUT']._serialized_end=34501 + _globals['_DEBUGCMDOUTMSG']._serialized_start=34503 + _globals['_DEBUGCMDOUTMSG']._serialized_end=34601 + _globals['_DEBUGCMDRESULT']._serialized_start=34603 + _globals['_DEBUGCMDRESULT']._serialized_end=34632 + _globals['_DEBUGCMDRESULTMSG']._serialized_start=34634 + _globals['_DEBUGCMDRESULTMSG']._serialized_end=34738 + _globals['_LISTCMDOUT']._serialized_start=34740 + _globals['_LISTCMDOUT']._serialized_end=34765 + _globals['_LISTCMDOUTMSG']._serialized_start=34767 + _globals['_LISTCMDOUTMSG']._serialized_end=34863 + _globals['_RESOURCEREPORT']._serialized_start=34866 + _globals['_RESOURCEREPORT']._serialized_end=35102 + _globals['_RESOURCEREPORTMSG']._serialized_start=35104 + _globals['_RESOURCEREPORTMSG']._serialized_end=35208 +# @@protoc_insertion_point(module_scope) diff --git a/core/dbt/events/eventmgr.py b/core/dbt/events/eventmgr.py deleted file mode 100644 index e25a29317d1..00000000000 --- a/core/dbt/events/eventmgr.py +++ /dev/null @@ -1,220 +0,0 @@ -import os -from colorama import Style -from dataclasses import dataclass -from datetime import datetime -from enum import Enum -import json -import logging -from logging.handlers import RotatingFileHandler -import threading -import traceback -from typing import Any, Callable, List, Optional, TextIO -from uuid import uuid4 -from dbt.events.format import timestamp_to_datetime_string - -from dbt.events.base_types import BaseEvent, EventLevel, msg_from_base_event, EventMsg -import dbt.utils - -# A Filter is a function which takes a BaseEvent and returns True if the event -# should be logged, False otherwise. -Filter = Callable[[EventMsg], bool] - - -# Default filter which logs every event -def NoFilter(_: EventMsg) -> bool: - return True - - -# A Scrubber removes secrets from an input string, returning a sanitized string. -Scrubber = Callable[[str], str] - - -# Provide a pass-through scrubber implementation, also used as a default -def NoScrubber(s: str) -> str: - return s - - -class LineFormat(Enum): - PlainText = 1 - DebugText = 2 - Json = 3 - - -# Map from dbt event levels to python log levels -_log_level_map = { - EventLevel.DEBUG: 10, - EventLevel.TEST: 10, - EventLevel.INFO: 20, - EventLevel.WARN: 30, - EventLevel.ERROR: 40, -} - - -# We need this function for now because the numeric log severity levels in -# Python do not match those for logbook, so we have to explicitly call the -# correct function by name. -def send_to_logger(l, level: str, log_line: str): - if level == "test": - l.debug(log_line) - elif level == "debug": - l.debug(log_line) - elif level == "info": - l.info(log_line) - elif level == "warn": - l.warning(log_line) - elif level == "error": - l.error(log_line) - else: - raise AssertionError( - f"While attempting to log {log_line}, encountered the unhandled level: {level}" - ) - - -@dataclass -class LoggerConfig: - name: str - filter: Filter = NoFilter - scrubber: Scrubber = NoScrubber - line_format: LineFormat = LineFormat.PlainText - level: EventLevel = EventLevel.WARN - use_colors: bool = False - output_stream: Optional[TextIO] = None - output_file_name: Optional[str] = None - output_file_max_bytes: Optional[int] = 10 * 1024 * 1024 # 10 mb - logger: Optional[Any] = None - - -class _Logger: - def __init__(self, event_manager: "EventManager", config: LoggerConfig) -> None: - self.name: str = config.name - self.filter: Filter = config.filter - self.scrubber: Scrubber = config.scrubber - self.level: EventLevel = config.level - self.event_manager: EventManager = event_manager - self._python_logger: Optional[logging.Logger] = config.logger - - if config.output_stream is not None: - stream_handler = logging.StreamHandler(config.output_stream) - self._python_logger = self._get_python_log_for_handler(stream_handler) - - if config.output_file_name: - file_handler = RotatingFileHandler( - filename=str(config.output_file_name), - encoding="utf8", - maxBytes=config.output_file_max_bytes, # type: ignore - backupCount=5, - ) - self._python_logger = self._get_python_log_for_handler(file_handler) - - def _get_python_log_for_handler(self, handler: logging.Handler): - log = logging.getLogger(self.name) - log.setLevel(_log_level_map[self.level]) - handler.setFormatter(logging.Formatter(fmt="%(message)s")) - log.handlers.clear() - log.propagate = False - log.addHandler(handler) - return log - - def create_line(self, msg: EventMsg) -> str: - raise NotImplementedError() - - def write_line(self, msg: EventMsg): - line = self.create_line(msg) - if self._python_logger is not None: - send_to_logger(self._python_logger, msg.info.level, line) - - def flush(self): - if self._python_logger is not None: - for handler in self._python_logger.handlers: - handler.flush() - - -class _TextLogger(_Logger): - def __init__(self, event_manager: "EventManager", config: LoggerConfig) -> None: - super().__init__(event_manager, config) - self.use_colors = config.use_colors - self.use_debug_format = config.line_format == LineFormat.DebugText - - def create_line(self, msg: EventMsg) -> str: - return self.create_debug_line(msg) if self.use_debug_format else self.create_info_line(msg) - - def create_info_line(self, msg: EventMsg) -> str: - ts: str = datetime.utcnow().strftime("%H:%M:%S") - scrubbed_msg: str = self.scrubber(msg.info.msg) # type: ignore - return f"{self._get_color_tag()}{ts} {scrubbed_msg}" - - def create_debug_line(self, msg: EventMsg) -> str: - log_line: str = "" - # Create a separator if this is the beginning of an invocation - # TODO: This is an ugly hack, get rid of it if we can - ts: str = timestamp_to_datetime_string(msg.info.ts) - if msg.info.name == "MainReportVersion": - separator = 30 * "=" - log_line = f"\n\n{separator} {ts} | {self.event_manager.invocation_id} {separator}\n" - scrubbed_msg: str = self.scrubber(msg.info.msg) # type: ignore - level = msg.info.level - log_line += ( - f"{self._get_color_tag()}{ts} [{level:<5}]{self._get_thread_name()} {scrubbed_msg}" - ) - return log_line - - def _get_color_tag(self) -> str: - return "" if not self.use_colors else Style.RESET_ALL - - def _get_thread_name(self) -> str: - thread_name = "" - if threading.current_thread().name: - thread_name = threading.current_thread().name - thread_name = thread_name[:10] - thread_name = thread_name.ljust(10, " ") - thread_name = f" [{thread_name}]:" - return thread_name - - -class _JsonLogger(_Logger): - def create_line(self, msg: EventMsg) -> str: - from dbt.events.functions import msg_to_dict - - msg_dict = msg_to_dict(msg) - raw_log_line = json.dumps(msg_dict, sort_keys=True, cls=dbt.utils.ForgivingJSONEncoder) - line = self.scrubber(raw_log_line) # type: ignore - return line - - -class EventManager: - def __init__(self) -> None: - self.loggers: List[_Logger] = [] - self.callbacks: List[Callable[[EventMsg], None]] = [] - self.invocation_id: str = str(uuid4()) - - def fire_event(self, e: BaseEvent, level: Optional[EventLevel] = None) -> None: - msg = msg_from_base_event(e, level=level) - - if os.environ.get("DBT_TEST_BINARY_SERIALIZATION"): - print(f"--- {msg.info.name}") - try: - msg.SerializeToString() - except Exception as exc: - raise Exception( - f"{msg.info.name} is not serializable to binary. Originating exception: {exc}, {traceback.format_exc()}" - ) - - for logger in self.loggers: - if logger.filter(msg): # type: ignore - logger.write_line(msg) - - for callback in self.callbacks: - callback(msg) - - def add_logger(self, config: LoggerConfig): - logger = ( - _JsonLogger(self, config) - if config.line_format == LineFormat.Json - else _TextLogger(self, config) - ) - logger.event_manager = self - self.loggers.append(logger) - - def flush(self): - for logger in self.loggers: - logger.flush() diff --git a/core/dbt/events/format.py b/core/dbt/events/format.py deleted file mode 100644 index dad6cf9f355..00000000000 --- a/core/dbt/events/format.py +++ /dev/null @@ -1,56 +0,0 @@ -from dbt import ui -from dbt.node_types import NodeType -from typing import Optional, Union -from datetime import datetime - - -def format_fancy_output_line( - msg: str, - status: str, - index: Optional[int], - total: Optional[int], - execution_time: Optional[float] = None, - truncate: bool = False, -) -> str: - if index is None or total is None: - progress = "" - else: - progress = "{} of {} ".format(index, total) - prefix = "{progress}{message} ".format(progress=progress, message=msg) - - truncate_width = ui.printer_width() - 3 - justified = prefix.ljust(ui.printer_width(), ".") - if truncate and len(justified) > truncate_width: - justified = justified[:truncate_width] + "..." - - if execution_time is None: - status_time = "" - else: - status_time = " in {execution_time:0.2f}s".format(execution_time=execution_time) - - output = "{justified} [{status}{status_time}]".format( - justified=justified, status=status, status_time=status_time - ) - - return output - - -def _pluralize(string: Union[str, NodeType]) -> str: - try: - convert = NodeType(string) - except ValueError: - return f"{string}s" - else: - return convert.pluralize() - - -def pluralize(count, string: Union[str, NodeType]): - pluralized: str = str(string) - if count != 1: - pluralized = _pluralize(string) - return f"{count} {pluralized}" - - -def timestamp_to_datetime_string(ts): - timestamp_dt = datetime.fromtimestamp(ts.seconds + ts.nanos / 1e9) - return timestamp_dt.strftime("%H:%M:%S.%f") diff --git a/core/dbt/events/functions.py b/core/dbt/events/functions.py deleted file mode 100644 index b4a7a17ac8a..00000000000 --- a/core/dbt/events/functions.py +++ /dev/null @@ -1,297 +0,0 @@ -from dbt.constants import METADATA_ENV_PREFIX -from dbt.events.base_types import BaseEvent, EventLevel, EventMsg -from dbt.events.eventmgr import EventManager, LoggerConfig, LineFormat, NoFilter -from dbt.events.helpers import env_secrets, scrub_secrets -from dbt.events.types import Formatting, Note -from dbt.flags import get_flags, ENABLE_LEGACY_LOGGER -from dbt.logger import GLOBAL_LOGGER, make_log_dir_if_missing -from functools import partial -import json -import os -import sys -from typing import Callable, Dict, List, Optional, TextIO -import uuid -from google.protobuf.json_format import MessageToDict - -import dbt.utils - -LOG_VERSION = 3 -metadata_vars: Optional[Dict[str, str]] = None - -# These are the logging events issued by the "clean" command, -# where we can't count on having a log directory. We've removed -# the "class" flags on the events in types.py. If necessary we -# could still use class or method flags, but we'd have to get -# the type class from the msg and then get the information from the class. -nofile_codes = ["Z012", "Z013", "Z014", "Z015"] - - -def setup_event_logger(flags, callbacks: List[Callable[[EventMsg], None]] = []) -> None: - cleanup_event_logger() - make_log_dir_if_missing(flags.LOG_PATH) - EVENT_MANAGER.callbacks = callbacks.copy() - - if ENABLE_LEGACY_LOGGER: - EVENT_MANAGER.add_logger( - _get_logbook_log_config( - flags.DEBUG, flags.USE_COLORS, flags.LOG_CACHE_EVENTS, flags.QUIET - ) - ) - else: - if flags.LOG_LEVEL != "none": - line_format = _line_format_from_str(flags.LOG_FORMAT, LineFormat.PlainText) - log_level = ( - EventLevel.ERROR - if flags.QUIET - else EventLevel.DEBUG - if flags.DEBUG - else EventLevel(flags.LOG_LEVEL) - ) - console_config = _get_stdout_config( - line_format, - flags.USE_COLORS, - log_level, - flags.LOG_CACHE_EVENTS, - ) - EVENT_MANAGER.add_logger(console_config) - - if _CAPTURE_STREAM: - # Create second stdout logger to support test which want to know what's - # being sent to stdout. - console_config.output_stream = _CAPTURE_STREAM - EVENT_MANAGER.add_logger(console_config) - - if flags.LOG_LEVEL_FILE != "none": - # create and add the file logger to the event manager - log_file = os.path.join(flags.LOG_PATH, "dbt.log") - log_file_format = _line_format_from_str(flags.LOG_FORMAT_FILE, LineFormat.DebugText) - log_level_file = EventLevel.DEBUG if flags.DEBUG else EventLevel(flags.LOG_LEVEL_FILE) - EVENT_MANAGER.add_logger( - _get_logfile_config( - log_file, - flags.USE_COLORS_FILE, - log_file_format, - log_level_file, - flags.LOG_FILE_MAX_BYTES, - ) - ) - - -def _line_format_from_str(format_str: str, default: LineFormat) -> LineFormat: - if format_str == "text": - return LineFormat.PlainText - elif format_str == "debug": - return LineFormat.DebugText - elif format_str == "json": - return LineFormat.Json - - return default - - -def _get_stdout_config( - line_format: LineFormat, - use_colors: bool, - level: EventLevel, - log_cache_events: bool, -) -> LoggerConfig: - - return LoggerConfig( - name="stdout_log", - level=level, - use_colors=use_colors, - line_format=line_format, - scrubber=env_scrubber, - filter=partial( - _stdout_filter, - log_cache_events, - line_format, - ), - output_stream=sys.stdout, - ) - - -def _stdout_filter( - log_cache_events: bool, - line_format: LineFormat, - msg: EventMsg, -) -> bool: - return (msg.info.name not in ["CacheAction", "CacheDumpGraph"] or log_cache_events) and not ( - line_format == LineFormat.Json and type(msg.data) == Formatting - ) - - -def _get_logfile_config( - log_path: str, - use_colors: bool, - line_format: LineFormat, - level: EventLevel, - log_file_max_bytes: int, -) -> LoggerConfig: - return LoggerConfig( - name="file_log", - line_format=line_format, - use_colors=use_colors, - level=level, # File log is *always* debug level - scrubber=env_scrubber, - filter=partial(_logfile_filter, bool(get_flags().LOG_CACHE_EVENTS), line_format), - output_file_name=log_path, - output_file_max_bytes=log_file_max_bytes, - ) - - -def _logfile_filter(log_cache_events: bool, line_format: LineFormat, msg: EventMsg) -> bool: - return ( - msg.info.code not in nofile_codes - and not (msg.info.name in ["CacheAction", "CacheDumpGraph"] and not log_cache_events) - and not (line_format == LineFormat.Json and type(msg.data) == Formatting) - ) - - -def _get_logbook_log_config( - debug: bool, use_colors: bool, log_cache_events: bool, quiet: bool -) -> LoggerConfig: - config = _get_stdout_config( - LineFormat.PlainText, - use_colors, - EventLevel.ERROR if quiet else EventLevel.DEBUG if debug else EventLevel.INFO, - log_cache_events, - ) - config.name = "logbook_log" - config.filter = ( - NoFilter - if log_cache_events - else lambda e: e.info.name not in ["CacheAction", "CacheDumpGraph"] - ) - config.logger = GLOBAL_LOGGER - config.output_stream = None - return config - - -def env_scrubber(msg: str) -> str: - return scrub_secrets(msg, env_secrets()) - - -def cleanup_event_logger(): - # Reset to a no-op manager to release streams associated with logs. This is - # especially important for tests, since pytest replaces the stdout stream - # during test runs, and closes the stream after the test is over. - EVENT_MANAGER.loggers.clear() - EVENT_MANAGER.callbacks.clear() - - -# Since dbt-rpc does not do its own log setup, and since some events can -# currently fire before logs can be configured by setup_event_logger(), we -# create a default configuration with default settings and no file output. -EVENT_MANAGER: EventManager = EventManager() -EVENT_MANAGER.add_logger( - _get_logbook_log_config(False, True, False, False) # type: ignore - if ENABLE_LEGACY_LOGGER - else _get_stdout_config(LineFormat.PlainText, True, EventLevel.INFO, False) -) - -# This global, and the following two functions for capturing stdout logs are -# an unpleasant hack we intend to remove as part of API-ification. The GitHub -# issue #6350 was opened for that work. -_CAPTURE_STREAM: Optional[TextIO] = None - - -# used for integration tests -def capture_stdout_logs(stream: TextIO): - global _CAPTURE_STREAM - _CAPTURE_STREAM = stream - - -def stop_capture_stdout_logs(): - global _CAPTURE_STREAM - _CAPTURE_STREAM = None - - -# returns a dictionary representation of the event fields. -# the message may contain secrets which must be scrubbed at the usage site. -def msg_to_json(msg: EventMsg) -> str: - msg_dict = msg_to_dict(msg) - raw_log_line = json.dumps(msg_dict, sort_keys=True, cls=dbt.utils.ForgivingJSONEncoder) - return raw_log_line - - -def msg_to_dict(msg: EventMsg) -> dict: - msg_dict = dict() - try: - msg_dict = MessageToDict( - msg, preserving_proto_field_name=True, including_default_value_fields=True # type: ignore - ) - except Exception as exc: - event_type = type(msg).__name__ - fire_event( - Note(msg=f"type {event_type} is not serializable. {str(exc)}"), level=EventLevel.WARN - ) - # We don't want an empty NodeInfo in output - if ( - "data" in msg_dict - and "node_info" in msg_dict["data"] - and msg_dict["data"]["node_info"]["node_name"] == "" - ): - del msg_dict["data"]["node_info"] - return msg_dict - - -def warn_or_error(event, node=None): - flags = get_flags() - if flags.WARN_ERROR or flags.WARN_ERROR_OPTIONS.includes(type(event).__name__): - - # TODO: resolve this circular import when at top - from dbt.exceptions import EventCompilationError - - raise EventCompilationError(event.message(), node) - else: - fire_event(event) - - -# an alternative to fire_event which only creates and logs the event value -# if the condition is met. Does nothing otherwise. -def fire_event_if( - conditional: bool, lazy_e: Callable[[], BaseEvent], level: Optional[EventLevel] = None -) -> None: - if conditional: - fire_event(lazy_e(), level=level) - - -# a special case of fire_event_if, to only fire events in our unit/functional tests -def fire_event_if_test( - lazy_e: Callable[[], BaseEvent], level: Optional[EventLevel] = None -) -> None: - fire_event_if(conditional=("pytest" in sys.modules), lazy_e=lazy_e, level=level) - - -# top-level method for accessing the new eventing system -# this is where all the side effects happen branched by event type -# (i.e. - mutating the event history, printing to stdout, logging -# to files, etc.) -def fire_event(e: BaseEvent, level: Optional[EventLevel] = None) -> None: - EVENT_MANAGER.fire_event(e, level=level) - - -def get_metadata_vars() -> Dict[str, str]: - global metadata_vars - if metadata_vars is None: - metadata_vars = { - k[len(METADATA_ENV_PREFIX) :]: v - for k, v in os.environ.items() - if k.startswith(METADATA_ENV_PREFIX) - } - return metadata_vars - - -def reset_metadata_vars() -> None: - global metadata_vars - metadata_vars = None - - -def get_invocation_id() -> str: - return EVENT_MANAGER.invocation_id - - -def set_invocation_id() -> None: - # This is primarily for setting the invocation_id for separate - # commands in the dbt servers. It shouldn't be necessary for the CLI. - EVENT_MANAGER.invocation_id = str(uuid.uuid4()) diff --git a/core/dbt/events/helpers.py b/core/dbt/events/helpers.py deleted file mode 100644 index 0bc351379af..00000000000 --- a/core/dbt/events/helpers.py +++ /dev/null @@ -1,30 +0,0 @@ -import os -from typing import List -from dbt.constants import SECRET_ENV_PREFIX -from datetime import datetime - - -def env_secrets() -> List[str]: - return [v for k, v in os.environ.items() if k.startswith(SECRET_ENV_PREFIX) and v.strip()] - - -def scrub_secrets(msg: str, secrets: List[str]) -> str: - scrubbed = str(msg) - - for secret in secrets: - scrubbed = scrubbed.replace(secret, "*****") - - return scrubbed - - -# This converts a datetime to a json format datetime string which -# is used in constructing protobuf message timestamps. -def datetime_to_json_string(dt: datetime) -> str: - return dt.strftime("%Y-%m-%dT%H:%M:%S.%fZ") - - -# preformatted time stamp -def get_json_string_utcnow() -> str: - ts = datetime.utcnow() - ts_rfc3339 = datetime_to_json_string(ts) - return ts_rfc3339 diff --git a/core/dbt/events/logging.py b/core/dbt/events/logging.py new file mode 100644 index 00000000000..b0db1003e72 --- /dev/null +++ b/core/dbt/events/logging.py @@ -0,0 +1,108 @@ +import os +from functools import partial +from typing import Callable, List + +from dbt.tracking import track_behavior_change_warn +from dbt_common.events.base_types import EventLevel, EventMsg +from dbt_common.events.event_manager_client import ( + add_callback_to_manager, + add_logger_to_manager, + cleanup_event_logger, + get_event_manager, +) +from dbt_common.events.functions import ( + env_scrubber, + get_capture_stream, + get_stdout_config, + make_log_dir_if_missing, +) +from dbt_common.events.logger import LineFormat, LoggerConfig +from dbt_common.invocation import get_invocation_id + +# These are the logging events issued by the "clean" command, +# where we can't count on having a log directory. We've removed +# the "class" flags on the events in types.py. If necessary we +# could still use class or method flags, but we'd have to get +# the type class from the msg and then get the information from the class. +_NOFILE_CODES = ["Z012", "Z013", "Z014", "Z015"] + + +def _line_format_from_str(format_str: str, default: LineFormat) -> LineFormat: + if format_str == "text": + return LineFormat.PlainText + elif format_str == "debug": + return LineFormat.DebugText + elif format_str == "json": + return LineFormat.Json + + return default + + +def _get_logfile_config( + log_path: str, + use_colors: bool, + line_format: LineFormat, + level: EventLevel, + log_file_max_bytes: int, + log_cache_events: bool = False, +) -> LoggerConfig: + return LoggerConfig( + name="file_log", + line_format=line_format, + use_colors=use_colors, + level=level, # File log is *always* debug level + scrubber=env_scrubber, + filter=partial(_logfile_filter, log_cache_events, line_format), + invocation_id=get_invocation_id(), + output_file_name=log_path, + output_file_max_bytes=log_file_max_bytes, + ) + + +def _logfile_filter(log_cache_events: bool, line_format: LineFormat, msg: EventMsg) -> bool: + return msg.info.code not in _NOFILE_CODES and not ( + msg.info.name in ["CacheAction", "CacheDumpGraph"] and not log_cache_events + ) + + +def setup_event_logger(flags, callbacks: List[Callable[[EventMsg], None]] = []) -> None: + cleanup_event_logger() + make_log_dir_if_missing(flags.LOG_PATH) + event_manager = get_event_manager() + event_manager.callbacks = callbacks.copy() + add_callback_to_manager(track_behavior_change_warn) + + if flags.LOG_LEVEL != "none": + line_format = _line_format_from_str(flags.LOG_FORMAT, LineFormat.PlainText) + log_level = ( + EventLevel.ERROR + if flags.QUIET + else EventLevel.DEBUG if flags.DEBUG else EventLevel(flags.LOG_LEVEL) + ) + console_config = get_stdout_config( + line_format, + flags.USE_COLORS, + log_level, + flags.LOG_CACHE_EVENTS, + ) + + if get_capture_stream(): + # Create second stdout logger to support test which want to know what's + # being sent to stdout. + console_config.output_stream = get_capture_stream() + add_logger_to_manager(console_config) + + if flags.LOG_LEVEL_FILE != "none": + # create and add the file logger to the event manager + log_file = os.path.join(flags.LOG_PATH, "dbt.log") + log_file_format = _line_format_from_str(flags.LOG_FORMAT_FILE, LineFormat.DebugText) + log_level_file = EventLevel.DEBUG if flags.DEBUG else EventLevel(flags.LOG_LEVEL_FILE) + add_logger_to_manager( + _get_logfile_config( + log_file, + flags.USE_COLORS_FILE, + log_file_format, + log_level_file, + flags.LOG_FILE_MAX_BYTES, + ) + ) diff --git a/core/dbt/events/types.py b/core/dbt/events/types.py index a8740d58c7b..dfd95eec6bf 100644 --- a/core/dbt/events/types.py +++ b/core/dbt/events/types.py @@ -1,26 +1,20 @@ import json -from dbt.ui import line_wrap_message, warning_tag, red, green, yellow from dbt.constants import MAXIMUM_SEED_SIZE_NAME, PIN_PACKAGE_URL from dbt.events.base_types import ( - DynamicLevel, DebugLevel, + DynamicLevel, + ErrorLevel, InfoLevel, WarnLevel, - ErrorLevel, - EventLevel, ) -from dbt.events.format import format_fancy_output_line, pluralize, timestamp_to_datetime_string - -from dbt.node_types import NodeType - - -# The classes in this file represent the data necessary to describe a -# particular event to both human readable logs, and machine reliable -# event streams. classes extend superclasses that indicate what -# destinations they are intended for, which mypy uses to enforce -# that the necessary methods are defined. - +from dbt_common.events.base_types import EventLevel +from dbt_common.events.format import ( + format_fancy_output_line, + pluralize, + timestamp_to_datetime_string, +) +from dbt_common.ui import error_tag, green, line_wrap_message, red, warning_tag, yellow # Event codes have prefixes which follow this table # @@ -39,53 +33,40 @@ # # The basic idea is that event codes roughly translate to the natural order of running a dbt task - -def format_adapter_message(name, base_msg, args) -> str: - # only apply formatting if there are arguments to format. - # avoids issues like "dict: {k: v}".format() which results in `KeyError 'k'` - msg = base_msg if len(args) == 0 else base_msg.format(*args) - return f"{name} adapter: {msg}" - - # ======================================================= # A - Pre-project loading # ======================================================= class MainReportVersion(InfoLevel): - def code(self): + def code(self) -> str: return "A001" - def message(self): + def message(self) -> str: return f"Running with dbt{self.version}" class MainReportArgs(DebugLevel): - def code(self): + def code(self) -> str: return "A002" - def message(self): + def message(self) -> str: return f"running dbt with arguments {str(self.args)}" class MainTrackingUserState(DebugLevel): - def code(self): + def code(self) -> str: return "A003" - def message(self): + def message(self) -> str: return f"Tracking: {self.user_state}" -class MergedFromState(DebugLevel): - def code(self): - return "A004" - - def message(self) -> str: - return f"Merged {self.num_merged} items from state (sample: {self.sample})" +# Removed A004: MergedFromState class MissingProfileTarget(InfoLevel): - def code(self): + def code(self) -> str: return "A005" def message(self) -> str: @@ -96,7 +77,7 @@ def message(self) -> str: class InvalidOptionYAML(ErrorLevel): - def code(self): + def code(self) -> str: return "A008" def message(self) -> str: @@ -104,7 +85,7 @@ def message(self) -> str: class LogDbtProjectError(ErrorLevel): - def code(self): + def code(self) -> str: return "A009" def message(self) -> str: @@ -118,7 +99,7 @@ def message(self) -> str: class LogDbtProfileError(ErrorLevel): - def code(self): + def code(self) -> str: return "A011" def message(self) -> str: @@ -139,7 +120,7 @@ def message(self) -> str: class StarterProjectPath(DebugLevel): - def code(self): + def code(self) -> str: return "A017" def message(self) -> str: @@ -147,7 +128,7 @@ def message(self) -> str: class ConfigFolderDirectory(InfoLevel): - def code(self): + def code(self) -> str: return "A018" def message(self) -> str: @@ -155,7 +136,7 @@ def message(self) -> str: class NoSampleProfileFound(InfoLevel): - def code(self): + def code(self) -> str: return "A019" def message(self) -> str: @@ -163,7 +144,7 @@ def message(self) -> str: class ProfileWrittenWithSample(InfoLevel): - def code(self): + def code(self) -> str: return "A020" def message(self) -> str: @@ -175,7 +156,7 @@ def message(self) -> str: class ProfileWrittenWithTargetTemplateYAML(InfoLevel): - def code(self): + def code(self) -> str: return "A021" def message(self) -> str: @@ -187,7 +168,7 @@ def message(self) -> str: class ProfileWrittenWithProjectTemplateYAML(InfoLevel): - def code(self): + def code(self) -> str: return "A022" def message(self) -> str: @@ -199,7 +180,7 @@ def message(self) -> str: class SettingUpProfile(InfoLevel): - def code(self): + def code(self) -> str: return "A023" def message(self) -> str: @@ -207,7 +188,7 @@ def message(self) -> str: class InvalidProfileTemplateYAML(InfoLevel): - def code(self): + def code(self) -> str: return "A024" def message(self) -> str: @@ -215,7 +196,7 @@ def message(self) -> str: class ProjectNameAlreadyExists(InfoLevel): - def code(self): + def code(self) -> str: return "A025" def message(self) -> str: @@ -223,7 +204,7 @@ def message(self) -> str: class ProjectCreated(InfoLevel): - def code(self): + def code(self) -> str: return "A026" def message(self) -> str: @@ -250,11 +231,24 @@ def message(self) -> str: # ======================================================= +class DeprecatedModel(WarnLevel): + def code(self) -> str: + return "I065" + + def message(self) -> str: + version = ".v" + self.model_version if self.model_version else "" + msg = ( + f"Model {self.model_name}{version} has passed its deprecation date of {self.deprecation_date}. " + "This model should be disabled or removed." + ) + return warning_tag(msg) + + class PackageRedirectDeprecation(WarnLevel): - def code(self): + def code(self) -> str: return "D001" - def message(self): + def message(self) -> str: description = ( f"The `{self.old_name}` package is deprecated in favor of `{self.new_name}`. Please " f"update your `packages.yml` configuration to use `{self.new_name}` instead." @@ -263,10 +257,10 @@ def message(self): class PackageInstallPathDeprecation(WarnLevel): - def code(self): + def code(self) -> str: return "D002" - def message(self): + def message(self) -> str: description = """\ The default package install path has changed from `dbt_modules` to `dbt_packages`. Please update `clean-targets` in `dbt_project.yml` and check `.gitignore` as well. @@ -276,10 +270,10 @@ def message(self): class ConfigSourcePathDeprecation(WarnLevel): - def code(self): + def code(self) -> str: return "D003" - def message(self): + def message(self) -> str: description = ( f"The `{self.deprecated_path}` config has been renamed to `{self.exp_path}`. " "Please update your `dbt_project.yml` configuration to reflect this change." @@ -288,10 +282,10 @@ def message(self): class ConfigDataPathDeprecation(WarnLevel): - def code(self): + def code(self) -> str: return "D004" - def message(self): + def message(self) -> str: description = ( f"The `{self.deprecated_path}` config has been renamed to `{self.exp_path}`. " "Please update your `dbt_project.yml` configuration to reflect this change." @@ -299,25 +293,11 @@ def message(self): return line_wrap_message(warning_tag(f"Deprecated functionality\n\n{description}")) -class AdapterDeprecationWarning(WarnLevel): - def code(self): - return "D005" - - def message(self): - description = ( - f"The adapter function `adapter.{self.old_name}` is deprecated and will be removed in " - f"a future release of dbt. Please use `adapter.{self.new_name}` instead. " - f"\n\nDocumentation for {self.new_name} can be found here:" - f"\n\nhttps://docs.getdbt.com/docs/adapter" - ) - return line_wrap_message(warning_tag(f"Deprecated functionality\n\n{description}")) - - class MetricAttributesRenamed(WarnLevel): - def code(self): + def code(self) -> str: return "D006" - def message(self): + def message(self) -> str: description = ( "dbt-core v1.3 renamed attributes for metrics:" "\n 'sql' -> 'expression'" @@ -331,10 +311,10 @@ def message(self): class ExposureNameDeprecation(WarnLevel): - def code(self): + def code(self) -> str: return "D007" - def message(self): + def message(self) -> str: description = ( "Starting in v1.3, the 'name' of an exposure should contain only letters, " "numbers, and underscores. Exposures support a new property, 'label', which may " @@ -346,10 +326,10 @@ def message(self): class InternalDeprecation(WarnLevel): - def code(self): + def code(self) -> str: return "D008" - def message(self): + def message(self) -> str: extra_reason = "" if self.reason: extra_reason = f"\n{self.reason}" @@ -361,10 +341,10 @@ def message(self): class EnvironmentVariableRenamed(WarnLevel): - def code(self): + def code(self) -> str: return "D009" - def message(self): + def message(self) -> str: description = ( f"The environment variable `{self.old_name}` has been renamed as `{self.new_name}`.\n" f"If `{self.old_name}` is currently set, its value will be used instead of `{self.new_name}`.\n" @@ -375,10 +355,10 @@ def message(self): class ConfigLogPathDeprecation(WarnLevel): - def code(self): + def code(self) -> str: return "D010" - def message(self): + def message(self) -> str: output = "logs" cli_flag = "--log-path" env_var = "DBT_LOG_PATH" @@ -392,10 +372,10 @@ def message(self): class ConfigTargetPathDeprecation(WarnLevel): - def code(self): + def code(self) -> str: return "D011" - def message(self): + def message(self) -> str: output = "artifacts" cli_flag = "--target-path" env_var = "DBT_TARGET_PATH" @@ -408,393 +388,82 @@ def message(self): return line_wrap_message(warning_tag(f"Deprecated functionality\n\n{description}")) -class CollectFreshnessReturnSignature(WarnLevel): - def code(self): +# Note: this deprecation has been removed, but we are leaving +# the event class here, because users may have specified it in +# warn_error_options. +class TestsConfigDeprecation(WarnLevel): + def code(self) -> str: return "D012" - def message(self): + def message(self) -> str: description = ( - "The 'collect_freshness' macro signature has changed to return the full " - "query result, rather than just a table of values. See the v1.5 migration guide " - "for details on how to update your custom macro: https://docs.getdbt.com/guides/migration/versions/upgrading-to-v1.5" + f"The `{self.deprecated_path}` config has been renamed to `{self.exp_path}`. " + "Please see https://docs.getdbt.com/docs/build/data-tests#new-data_tests-syntax for more information." ) return line_wrap_message(warning_tag(f"Deprecated functionality\n\n{description}")) -# ======================================================= -# E - DB Adapter -# ======================================================= - - -class AdapterEventDebug(DebugLevel): - def code(self): - return "E001" - - def message(self): - return format_adapter_message(self.name, self.base_msg, self.args) - - -class AdapterEventInfo(InfoLevel): - def code(self): - return "E002" - - def message(self): - return format_adapter_message(self.name, self.base_msg, self.args) - - -class AdapterEventWarning(WarnLevel): - def code(self): - return "E003" - - def message(self): - return format_adapter_message(self.name, self.base_msg, self.args) - - -class AdapterEventError(ErrorLevel): - def code(self): - return "E004" - - def message(self): - return format_adapter_message(self.name, self.base_msg, self.args) - - -class NewConnection(DebugLevel): - def code(self): - return "E005" - - def message(self) -> str: - return f"Acquiring new {self.conn_type} connection '{self.conn_name}'" - - -class ConnectionReused(DebugLevel): - def code(self): - return "E006" - - def message(self) -> str: - return f"Re-using an available connection from the pool (formerly {self.orig_conn_name}, now {self.conn_name})" - - -class ConnectionLeftOpenInCleanup(DebugLevel): - def code(self): - return "E007" - - def message(self) -> str: - return f"Connection '{self.conn_name}' was left open." - - -class ConnectionClosedInCleanup(DebugLevel): - def code(self): - return "E008" - - def message(self) -> str: - return f"Connection '{self.conn_name}' was properly closed." - - -class RollbackFailed(DebugLevel): - def code(self): - return "E009" - - def message(self) -> str: - return f"Failed to rollback '{self.conn_name}'" - - -class ConnectionClosed(DebugLevel): - def code(self): - return "E010" - - def message(self) -> str: - return f"On {self.conn_name}: Close" - - -class ConnectionLeftOpen(DebugLevel): - def code(self): - return "E011" - - def message(self) -> str: - return f"On {self.conn_name}: No close available on handle" - - -class Rollback(DebugLevel): - def code(self): - return "E012" - - def message(self) -> str: - return f"On {self.conn_name}: ROLLBACK" - - -class CacheMiss(DebugLevel): - def code(self): - return "E013" - - def message(self) -> str: - return ( - f'On "{self.conn_name}": cache miss for schema ' - f'"{self.database}.{self.schema}", this is inefficient' - ) - - -class ListRelations(DebugLevel): - def code(self): - return "E014" - - def message(self) -> str: - identifiers_str = ", ".join(r.identifier for r in self.relations) - return f"While listing relations in database={self.database}, schema={self.schema}, found: {identifiers_str}" - - -class ConnectionUsed(DebugLevel): - def code(self): - return "E015" - - def message(self) -> str: - return f'Using {self.conn_type} connection "{self.conn_name}"' - - -class SQLQuery(DebugLevel): - def code(self): - return "E016" - - def message(self) -> str: - return f"On {self.conn_name}: {self.sql}" - - -class SQLQueryStatus(DebugLevel): - def code(self): - return "E017" - - def message(self) -> str: - return f"SQL status: {self.status} in {self.elapsed} seconds" - - -class SQLCommit(DebugLevel): - def code(self): - return "E018" - - def message(self) -> str: - return f"On {self.conn_name}: COMMIT" - - -class ColTypeChange(DebugLevel): - def code(self): - return "E019" - - def message(self) -> str: - return f"Changing col type from {self.orig_type} to {self.new_type} in table {self.table}" - - -class SchemaCreation(DebugLevel): - def code(self): - return "E020" - - def message(self) -> str: - return f'Creating schema "{self.relation}"' - - -class SchemaDrop(DebugLevel): - def code(self): - return "E021" - - def message(self) -> str: - return f'Dropping schema "{self.relation}".' - - -class CacheAction(DebugLevel): - def code(self): - return "E022" - - def format_ref_key(self, ref_key): - return f"(database={ref_key.database}, schema={ref_key.schema}, identifier={ref_key.identifier})" - - def message(self): - ref_key = self.format_ref_key(self.ref_key) - ref_key_2 = self.format_ref_key(self.ref_key_2) - ref_key_3 = self.format_ref_key(self.ref_key_3) - ref_list = [] - for rfk in self.ref_list: - ref_list.append(self.format_ref_key(rfk)) - if self.action == "add_link": - return f"adding link, {ref_key} references {ref_key_2}" - elif self.action == "add_relation": - return f"adding relation: {ref_key}" - elif self.action == "drop_missing_relation": - return f"dropped a nonexistent relationship: {ref_key}" - elif self.action == "drop_cascade": - return f"drop {ref_key} is cascading to {ref_list}" - elif self.action == "drop_relation": - return f"Dropping relation: {ref_key}" - elif self.action == "update_reference": - return ( - f"updated reference from {ref_key} -> {ref_key_3} to " - f"{ref_key_2} -> {ref_key_3}" - ) - elif self.action == "temporary_relation": - return f"old key {ref_key} not found in self.relations, assuming temporary" - elif self.action == "rename_relation": - return f"Renaming relation {ref_key} to {ref_key_2}" - elif self.action == "uncached_relation": - return ( - f"{ref_key_2} references {ref_key} " - f"but {self.ref_key.database}.{self.ref_key.schema}" - "is not in the cache, skipping assumed external relation" - ) - else: - return ref_key - - -# Skipping E023, E024, E025, E026, E027, E028, E029, E030 - - -class CacheDumpGraph(DebugLevel): - def code(self): - return "E031" +class ProjectFlagsMovedDeprecation(WarnLevel): + def code(self) -> str: + return "D013" def message(self) -> str: - return f"dump {self.before_after} {self.action} : {self.dump}" - - -# Skipping E032, E033, E034 - - -class AdapterRegistered(InfoLevel): - def code(self): - return "E034" - - def message(self) -> str: - return f"Registered adapter: {self.adapter_name}{self.adapter_version}" - - -class AdapterImportError(InfoLevel): - def code(self): - return "E035" - - def message(self) -> str: - return f"Error importing adapter: {self.exc}" - - -class PluginLoadError(DebugLevel): - def code(self): - return "E036" - - def message(self): - return f"{self.exc_info}" - - -class NewConnectionOpening(DebugLevel): - def code(self): - return "E037" - - def message(self) -> str: - return f"Opening a new connection, currently in state {self.connection_state}" - - -class CodeExecution(DebugLevel): - def code(self): - return "E038" - - def message(self) -> str: - return f"On {self.conn_name}: {self.code_content}" - - -class CodeExecutionStatus(DebugLevel): - def code(self): - return "E039" - - def message(self) -> str: - return f"Execution status: {self.status} in {self.elapsed} seconds" - - -class CatalogGenerationError(WarnLevel): - def code(self): - return "E040" - - def message(self) -> str: - return f"Encountered an error while generating catalog: {self.exc}" - - -class WriteCatalogFailure(ErrorLevel): - def code(self): - return "E041" - - def message(self) -> str: - return ( - f"dbt encountered {self.num_exceptions} failure{(self.num_exceptions != 1) * 's'} " - "while writing the catalog" + description = ( + "User config should be moved from the 'config' key in profiles.yml to the 'flags' " + "key in dbt_project.yml." ) + # Can't use line_wrap_message here because flags.printer_width isn't available yet + return warning_tag(f"Deprecated functionality\n\n{description}") -class CatalogWritten(InfoLevel): - def code(self): - return "E042" +class SpacesInResourceNameDeprecation(DynamicLevel): + def code(self) -> str: + return "D014" def message(self) -> str: - return f"Catalog written to {self.path}" + description = f"Found spaces in the name of `{self.unique_id}`" + if self.level == EventLevel.ERROR.value: + description = error_tag(description) + elif self.level == EventLevel.WARN.value: + description = warning_tag(description) -class CannotGenerateDocs(InfoLevel): - def code(self): - return "E043" + return line_wrap_message(description) - def message(self) -> str: - return "compile failed, cannot generate docs" - -class BuildingCatalog(InfoLevel): - def code(self): - return "E044" +class ResourceNamesWithSpacesDeprecation(WarnLevel): + def code(self) -> str: + return "D015" def message(self) -> str: - return "Building catalog" + description = f"Spaces found in {self.count_invalid_names} resource name(s). This is deprecated, and may lead to errors when using dbt." + if self.show_debug_hint: + description += " Run again with `--debug` to see them all." -class DatabaseErrorRunningHook(InfoLevel): - def code(self): - return "E045" + description += " For more information: https://docs.getdbt.com/reference/global-configs/legacy-behaviors" - def message(self) -> str: - return f"Database error while running {self.hook_type}" + return line_wrap_message(warning_tag(description)) -class HooksRunning(InfoLevel): - def code(self): - return "E046" +class PackageMaterializationOverrideDeprecation(WarnLevel): + def code(self) -> str: + return "D016" def message(self) -> str: - plural = "hook" if self.num_hooks == 1 else "hooks" - return f"Running {self.num_hooks} {self.hook_type} {plural}" + description = f"Installed package '{self.package_name}' is overriding the built-in materialization '{self.materialization_name}'. Overrides of built-in materializations from installed packages will be deprecated in future versions of dbt. For more information: https://docs.getdbt.com/reference/global-configs/legacy-behaviors" - -class FinishedRunningStats(InfoLevel): - def code(self): - return "E047" - - def message(self) -> str: - return f"Finished running {self.stat_line}{self.execution} ({self.execution_time:0.2f}s)." + return line_wrap_message(warning_tag(description)) -class ConstraintNotEnforced(WarnLevel): - def code(self): - return "E048" +class SourceFreshnessProjectHooksNotRun(WarnLevel): + def code(self) -> str: + return "D017" def message(self) -> str: - msg = ( - f"The constraint type {self.constraint} is not enforced by {self.adapter}. " - "The constraint will be included in this model's DDL statement, but it will not " - "guarantee anything about the underlying data. Set 'warn_unenforced: false' on " - "this constraint to ignore this warning." - ) - return line_wrap_message(warning_tag(msg)) + description = "In a future version of dbt, the `source freshness` command will start running `on-run-start` and `on-run-end` hooks by default. For more information: https://docs.getdbt.com/reference/global-configs/legacy-behaviors" - -class ConstraintNotSupported(WarnLevel): - def code(self): - return "E049" - - def message(self) -> str: - msg = ( - f"The constraint type {self.constraint} is not supported by {self.adapter}, and will " - "be ignored. Set 'warn_unsupported: false' on this constraint to ignore this warning." - ) - return line_wrap_message(warning_tag(msg)) + return line_wrap_message(warning_tag(description)) # ======================================================= @@ -803,7 +472,7 @@ def message(self) -> str: class InputFileDiffError(DebugLevel): - def code(self): + def code(self) -> str: return "I001" def message(self) -> str: @@ -814,7 +483,7 @@ def message(self) -> str: class InvalidValueForField(WarnLevel): - def code(self): + def code(self) -> str: return "I008" def message(self) -> str: @@ -822,7 +491,7 @@ def message(self) -> str: class ValidationWarning(WarnLevel): - def code(self): + def code(self) -> str: return "I009" def message(self) -> str: @@ -830,7 +499,7 @@ def message(self) -> str: class ParsePerfInfoPath(InfoLevel): - def code(self): + def code(self) -> str: return "I010" def message(self) -> str: @@ -847,7 +516,7 @@ def message(self) -> str: class PartialParsingErrorProcessingFile(DebugLevel): - def code(self): + def code(self) -> str: return "I014" def message(self) -> str: @@ -858,7 +527,7 @@ def message(self) -> str: class PartialParsingError(DebugLevel): - def code(self): + def code(self) -> str: return "I016" def message(self) -> str: @@ -866,7 +535,7 @@ def message(self) -> str: class PartialParsingSkipParsing(DebugLevel): - def code(self): + def code(self) -> str: return "I017" def message(self) -> str: @@ -877,7 +546,7 @@ def message(self) -> str: class UnableToPartialParse(InfoLevel): - def code(self): + def code(self) -> str: return "I024" def message(self) -> str: @@ -885,7 +554,7 @@ def message(self) -> str: class StateCheckVarsHash(DebugLevel): - def code(self): + def code(self) -> str: return "I025" def message(self) -> str: @@ -896,7 +565,7 @@ def message(self) -> str: class PartialParsingNotEnabled(DebugLevel): - def code(self): + def code(self) -> str: return "I028" def message(self) -> str: @@ -904,7 +573,7 @@ def message(self) -> str: class ParsedFileLoadFailed(DebugLevel): - def code(self): + def code(self) -> str: return "I029" def message(self) -> str: @@ -915,7 +584,7 @@ def message(self) -> str: class PartialParsingEnabled(DebugLevel): - def code(self): + def code(self) -> str: return "I040" def message(self) -> str: @@ -928,7 +597,7 @@ def message(self) -> str: class PartialParsingFile(DebugLevel): - def code(self): + def code(self) -> str: return "I041" def message(self) -> str: @@ -939,7 +608,7 @@ def message(self) -> str: class InvalidDisabledTargetInTestNode(DebugLevel): - def code(self): + def code(self) -> str: return "I050" def message(self) -> str: @@ -958,7 +627,7 @@ def message(self) -> str: class UnusedResourceConfigPath(WarnLevel): - def code(self): + def code(self) -> str: return "I051" def message(self) -> str: @@ -972,7 +641,7 @@ def message(self) -> str: class SeedIncreased(WarnLevel): - def code(self): + def code(self) -> str: return "I052" def message(self) -> str: @@ -985,7 +654,7 @@ def message(self) -> str: class SeedExceedsLimitSamePath(WarnLevel): - def code(self): + def code(self) -> str: return "I053" def message(self) -> str: @@ -998,7 +667,7 @@ def message(self) -> str: class SeedExceedsLimitAndPathChanged(WarnLevel): - def code(self): + def code(self) -> str: return "I054" def message(self) -> str: @@ -1011,7 +680,7 @@ def message(self) -> str: class SeedExceedsLimitChecksumChanged(WarnLevel): - def code(self): + def code(self) -> str: return "I055" def message(self) -> str: @@ -1024,7 +693,7 @@ def message(self) -> str: class UnusedTables(WarnLevel): - def code(self): + def code(self) -> str: return "I056" def message(self) -> str: @@ -1037,7 +706,7 @@ def message(self) -> str: class WrongResourceSchemaFile(WarnLevel): - def code(self): + def code(self) -> str: return "I057" def message(self) -> str: @@ -1054,7 +723,7 @@ def message(self) -> str: class NoNodeForYamlKey(WarnLevel): - def code(self): + def code(self) -> str: return "I058" def message(self) -> str: @@ -1067,7 +736,7 @@ def message(self) -> str: class MacroNotFoundForPatch(WarnLevel): - def code(self): + def code(self) -> str: return "I059" def message(self) -> str: @@ -1076,13 +745,13 @@ def message(self) -> str: class NodeNotFoundOrDisabled(WarnLevel): - def code(self): + def code(self) -> str: return "I060" def message(self) -> str: # this is duplicated logic from exceptions.get_not_found_or_disabled_msg - # when we convert exceptions to be stuctured maybe it can be combined? - # convverting the bool to a string since None is also valid + # when we convert exceptions to be structured maybe it can be combined? + # converting the bool to a string since None is also valid if self.disabled == "None": reason = "was not found or is disabled" elif self.disabled == "True": @@ -1105,7 +774,7 @@ def message(self) -> str: class JinjaLogWarning(WarnLevel): - def code(self): + def code(self) -> str: return "I061" def message(self) -> str: @@ -1113,7 +782,7 @@ def message(self) -> str: class JinjaLogInfo(InfoLevel): - def code(self): + def code(self) -> str: return "I062" def message(self) -> str: @@ -1122,7 +791,7 @@ def message(self) -> str: class JinjaLogDebug(DebugLevel): - def code(self): + def code(self) -> str: return "I063" def message(self) -> str: @@ -1131,7 +800,7 @@ def message(self) -> str: class UnpinnedRefNewVersionAvailable(InfoLevel): - def code(self): + def code(self) -> str: return "I064" def message(self) -> str: @@ -1147,21 +816,8 @@ def message(self) -> str: return msg -class DeprecatedModel(WarnLevel): - def code(self): - return "I065" - - def message(self) -> str: - version = ".v" + self.model_version if self.model_version else "" - msg = ( - f"Model {self.model_name}{version} has passed its deprecation date of {self.deprecation_date}. " - "This model should be disabled or removed." - ) - return warning_tag(msg) - - class UpcomingReferenceDeprecation(WarnLevel): - def code(self): + def code(self) -> str: return "I066" def message(self) -> str: @@ -1183,7 +839,7 @@ def message(self) -> str: class DeprecatedReference(WarnLevel): - def code(self): + def code(self) -> str: return "I067" def message(self) -> str: @@ -1205,7 +861,7 @@ def message(self) -> str: class UnsupportedConstraintMaterialization(WarnLevel): - def code(self): + def code(self) -> str: return "I068" def message(self) -> str: @@ -1218,7 +874,7 @@ def message(self) -> str: class ParseInlineNodeError(ErrorLevel): - def code(self): + def code(self) -> str: return "I069" def message(self) -> str: @@ -1226,20 +882,55 @@ def message(self) -> str: class SemanticValidationFailure(WarnLevel): - def code(self): + def code(self) -> str: return "I070" def message(self) -> str: return self.msg +class UnversionedBreakingChange(WarnLevel): + def code(self) -> str: + return "I071" + + def message(self) -> str: + reasons = "\n - ".join(self.breaking_changes) + + msg = ( + f"Breaking change to contracted, unversioned model {self.model_name} ({self.model_file_path})" + "\nWhile comparing to previous project state, dbt detected a breaking change to an unversioned model." + f"\n - {reasons}\n" + ) + + return warning_tag(msg) + + +class WarnStateTargetEqual(WarnLevel): + def code(self) -> str: + return "I072" + + def message(self) -> str: + return yellow( + f"Warning: The state and target directories are the same: '{self.state_path}'. " + f"This could lead to missing changes due to overwritten state including non-idempotent retries." + ) + + +class FreshnessConfigProblem(WarnLevel): + def code(self) -> str: + return "I073" + + def message(self) -> str: + return self.msg + + # ======================================================= # M - Deps generation # ======================================================= class GitSparseCheckoutSubdirectory(DebugLevel): - def code(self): + def code(self) -> str: return "M001" def message(self) -> str: @@ -1247,7 +938,7 @@ def message(self) -> str: class GitProgressCheckoutRevision(DebugLevel): - def code(self): + def code(self) -> str: return "M002" def message(self) -> str: @@ -1255,7 +946,7 @@ def message(self) -> str: class GitProgressUpdatingExistingDependency(DebugLevel): - def code(self): + def code(self) -> str: return "M003" def message(self) -> str: @@ -1263,7 +954,7 @@ def message(self) -> str: class GitProgressPullingNewDependency(DebugLevel): - def code(self): + def code(self) -> str: return "M004" def message(self) -> str: @@ -1271,7 +962,7 @@ def message(self) -> str: class GitNothingToDo(DebugLevel): - def code(self): + def code(self) -> str: return "M005" def message(self) -> str: @@ -1279,7 +970,7 @@ def message(self) -> str: class GitProgressUpdatedCheckoutRange(DebugLevel): - def code(self): + def code(self) -> str: return "M006" def message(self) -> str: @@ -1287,7 +978,7 @@ def message(self) -> str: class GitProgressCheckedOutAt(DebugLevel): - def code(self): + def code(self) -> str: return "M007" def message(self) -> str: @@ -1295,7 +986,7 @@ def message(self) -> str: class RegistryProgressGETRequest(DebugLevel): - def code(self): + def code(self) -> str: return "M008" def message(self) -> str: @@ -1303,7 +994,7 @@ def message(self) -> str: class RegistryProgressGETResponse(DebugLevel): - def code(self): + def code(self) -> str: return "M009" def message(self) -> str: @@ -1311,7 +1002,7 @@ def message(self) -> str: class SelectorReportInvalidSelector(InfoLevel): - def code(self): + def code(self) -> str: return "M010" def message(self) -> str: @@ -1322,7 +1013,7 @@ def message(self) -> str: class DepsNoPackagesFound(InfoLevel): - def code(self): + def code(self) -> str: return "M013" def message(self) -> str: @@ -1330,7 +1021,7 @@ def message(self) -> str: class DepsStartPackageInstall(InfoLevel): - def code(self): + def code(self) -> str: return "M014" def message(self) -> str: @@ -1338,7 +1029,7 @@ def message(self) -> str: class DepsInstallInfo(InfoLevel): - def code(self): + def code(self) -> str: return "M015" def message(self) -> str: @@ -1346,7 +1037,7 @@ def message(self) -> str: class DepsUpdateAvailable(InfoLevel): - def code(self): + def code(self) -> str: return "M016" def message(self) -> str: @@ -1354,7 +1045,7 @@ def message(self) -> str: class DepsUpToDate(InfoLevel): - def code(self): + def code(self) -> str: return "M017" def message(self) -> str: @@ -1362,7 +1053,7 @@ def message(self) -> str: class DepsListSubdirectory(InfoLevel): - def code(self): + def code(self) -> str: return "M018" def message(self) -> str: @@ -1370,7 +1061,7 @@ def message(self) -> str: class DepsNotifyUpdatesAvailable(InfoLevel): - def code(self): + def code(self) -> str: return "M019" def message(self) -> str: @@ -1378,24 +1069,8 @@ def message(self) -> str: \nUpdate your versions in packages.yml, then run dbt deps" -class RetryExternalCall(DebugLevel): - def code(self): - return "M020" - - def message(self) -> str: - return f"Retrying external call. Attempt: {self.attempt} Max attempts: {self.max}" - - -class RecordRetryException(DebugLevel): - def code(self): - return "M021" - - def message(self) -> str: - return f"External call exception: {self.exc}" - - class RegistryIndexProgressGETRequest(DebugLevel): - def code(self): + def code(self) -> str: return "M022" def message(self) -> str: @@ -1403,7 +1078,7 @@ def message(self) -> str: class RegistryIndexProgressGETResponse(DebugLevel): - def code(self): + def code(self) -> str: return "M023" def message(self) -> str: @@ -1411,7 +1086,7 @@ def message(self) -> str: class RegistryResponseUnexpectedType(DebugLevel): - def code(self): + def code(self) -> str: return "M024" def message(self) -> str: @@ -1419,7 +1094,7 @@ def message(self) -> str: class RegistryResponseMissingTopKeys(DebugLevel): - def code(self): + def code(self) -> str: return "M025" def message(self) -> str: @@ -1428,7 +1103,7 @@ def message(self) -> str: class RegistryResponseMissingNestedKeys(DebugLevel): - def code(self): + def code(self) -> str: return "M026" def message(self) -> str: @@ -1437,7 +1112,7 @@ def message(self) -> str: class RegistryResponseExtraNestedKeys(DebugLevel): - def code(self): + def code(self) -> str: return "M027" def message(self) -> str: @@ -1446,7 +1121,7 @@ def message(self) -> str: class DepsSetDownloadDirectory(DebugLevel): - def code(self): + def code(self) -> str: return "M028" def message(self) -> str: @@ -1454,7 +1129,7 @@ def message(self) -> str: class DepsUnpinned(WarnLevel): - def code(self): + def code(self) -> str: return "M029" def message(self) -> str: @@ -1473,11 +1148,43 @@ def message(self) -> str: class NoNodesForSelectionCriteria(WarnLevel): - def code(self): + def code(self) -> str: return "M030" def message(self) -> str: - return f"The selection criterion '{self.spec_raw}' does not match any nodes" + return f"The selection criterion '{self.spec_raw}' does not match any enabled nodes" + + +class DepsLockUpdating(InfoLevel): + def code(self): + return "M031" + + def message(self) -> str: + return f"Updating lock file in file path: {self.lock_filepath}" + + +class DepsAddPackage(InfoLevel): + def code(self): + return "M032" + + def message(self) -> str: + return f"Added new package {self.package_name}@{self.version} to {self.packages_filepath}" + + +class DepsFoundDuplicatePackage(InfoLevel): + def code(self): + return "M033" + + def message(self) -> str: + return f"Found duplicate package in packages.yml, removing: {self.removed_package}" + + +class DepsScrubbedPackageName(WarnLevel): + def code(self): + return "M035" + + def message(self) -> str: + return f"Detected secret env var in {self.package_name}. dbt will write a scrubbed representation to the lock file. This will cause issues with subsequent 'dbt deps' using the lock file, requiring 'dbt deps --upgrade'" # ======================================================= @@ -1486,7 +1193,7 @@ def message(self) -> str: class RunningOperationCaughtError(ErrorLevel): - def code(self): + def code(self) -> str: return "Q001" def message(self) -> str: @@ -1494,7 +1201,7 @@ def message(self) -> str: class CompileComplete(InfoLevel): - def code(self): + def code(self) -> str: return "Q002" def message(self) -> str: @@ -1502,7 +1209,7 @@ def message(self) -> str: class FreshnessCheckComplete(InfoLevel): - def code(self): + def code(self) -> str: return "Q003" def message(self) -> str: @@ -1510,7 +1217,7 @@ def message(self) -> str: class SeedHeader(InfoLevel): - def code(self): + def code(self) -> str: return "Q004" def message(self) -> str: @@ -1518,7 +1225,7 @@ def message(self) -> str: class SQLRunnerException(DebugLevel): - def code(self): + def code(self) -> str: return "Q006" def message(self) -> str: @@ -1526,13 +1233,15 @@ def message(self) -> str: class LogTestResult(DynamicLevel): - def code(self): + def code(self) -> str: return "Q007" def message(self) -> str: if self.status == "error": info = "ERROR" - status = red(info) + status = red( + info, + ) elif self.status == "pass": info = "PASS" status = green(info) @@ -1571,7 +1280,7 @@ def status_to_level(cls, status): class LogStartLine(InfoLevel): - def code(self): + def code(self) -> str: return "Q011" def message(self) -> str: @@ -1580,7 +1289,7 @@ def message(self) -> str: class LogModelResult(DynamicLevel): - def code(self): + def code(self) -> str: return "Q012" def message(self) -> str: @@ -1605,7 +1314,7 @@ def message(self) -> str: class LogSnapshotResult(DynamicLevel): - def code(self): + def code(self) -> str: return "Q015" def message(self) -> str: @@ -1614,7 +1323,7 @@ def message(self) -> str: status = red(self.status.upper()) else: info = "OK snapshotted" - status = green(self.status) + status = green(self.result_message) msg = "{info} {description}".format(info=info, description=self.description, **self.cfg) return format_fancy_output_line( @@ -1627,7 +1336,7 @@ def message(self) -> str: class LogSeedResult(DynamicLevel): - def code(self): + def code(self) -> str: return "Q016" def message(self) -> str: @@ -1651,7 +1360,7 @@ def message(self) -> str: class LogFreshnessResult(DynamicLevel): - def code(self): + def code(self) -> str: return "Q018" def message(self) -> str: @@ -1692,11 +1401,26 @@ def status_to_level(cls, status): return EventLevel.INFO -# Skipped Q019, Q020, Q021 +class LogNodeNoOpResult(InfoLevel): + def code(self) -> str: + return "Q019" + + def message(self) -> str: + msg = f"NO-OP {self.description}" + return format_fancy_output_line( + msg=msg, + status=yellow("NO-OP"), + index=self.index, + total=self.total, + execution_time=self.execution_time, + ) + + +# Skipped Q020, Q021 class LogCancelLine(ErrorLevel): - def code(self): + def code(self) -> str: return "Q022" def message(self) -> str: @@ -1705,7 +1429,7 @@ def message(self) -> str: class DefaultSelector(InfoLevel): - def code(self): + def code(self) -> str: return "Q023" def message(self) -> str: @@ -1713,7 +1437,7 @@ def message(self) -> str: class NodeStart(DebugLevel): - def code(self): + def code(self) -> str: return "Q024" def message(self) -> str: @@ -1721,7 +1445,7 @@ def message(self) -> str: class NodeFinished(DebugLevel): - def code(self): + def code(self) -> str: return "Q025" def message(self) -> str: @@ -1729,7 +1453,7 @@ def message(self) -> str: class QueryCancelationUnsupported(InfoLevel): - def code(self): + def code(self) -> str: return "Q026" def message(self) -> str: @@ -1742,7 +1466,7 @@ def message(self) -> str: class ConcurrencyLine(InfoLevel): - def code(self): + def code(self) -> str: return "Q027" def message(self) -> str: @@ -1750,7 +1474,7 @@ def message(self) -> str: class WritingInjectedSQLForNode(DebugLevel): - def code(self): + def code(self) -> str: return "Q029" def message(self) -> str: @@ -1758,7 +1482,7 @@ def message(self) -> str: class NodeCompiling(DebugLevel): - def code(self): + def code(self) -> str: return "Q030" def message(self) -> str: @@ -1766,7 +1490,7 @@ def message(self) -> str: class NodeExecuting(DebugLevel): - def code(self): + def code(self) -> str: return "Q031" def message(self) -> str: @@ -1774,7 +1498,7 @@ def message(self) -> str: class LogHookStartLine(InfoLevel): - def code(self): + def code(self) -> str: return "Q032" def message(self) -> str: @@ -1785,7 +1509,7 @@ def message(self) -> str: class LogHookEndLine(InfoLevel): - def code(self): + def code(self) -> str: return "Q033" def message(self) -> str: @@ -1801,11 +1525,12 @@ def message(self) -> str: class SkippingDetails(InfoLevel): - def code(self): + def code(self) -> str: return "Q034" def message(self) -> str: - if self.resource_type in NodeType.refable(): + # ToDo: move to core or figure out NodeType + if self.resource_type in ["model", "seed", "snapshot"]: msg = f"SKIP relation {self.schema}.{self.node_name}" else: msg = f"SKIP {self.resource_type} {self.node_name}" @@ -1815,7 +1540,7 @@ def message(self) -> str: class NothingToDo(WarnLevel): - def code(self): + def code(self) -> str: return "Q035" def message(self) -> str: @@ -1823,7 +1548,7 @@ def message(self) -> str: class RunningOperationUncaughtError(ErrorLevel): - def code(self): + def code(self) -> str: return "Q036" def message(self) -> str: @@ -1831,7 +1556,7 @@ def message(self) -> str: class EndRunResult(DebugLevel): - def code(self): + def code(self) -> str: return "Q037" def message(self) -> str: @@ -1839,7 +1564,7 @@ def message(self) -> str: class NoNodesSelected(WarnLevel): - def code(self): + def code(self) -> str: return "Q038" def message(self) -> str: @@ -1847,7 +1572,7 @@ def message(self) -> str: class CommandCompleted(DebugLevel): - def code(self): + def code(self) -> str: return "Q039" def message(self) -> str: @@ -1857,7 +1582,7 @@ def message(self) -> str: class ShowNode(InfoLevel): - def code(self): + def code(self) -> str: return "Q041" def message(self) -> str: @@ -1876,7 +1601,7 @@ def message(self) -> str: class CompiledNode(InfoLevel): - def code(self): + def code(self) -> str: return "Q042" def message(self) -> str: @@ -1892,6 +1617,18 @@ def message(self) -> str: return f"Compiled node '{self.node_name}' is:\n{self.compiled}" +class SnapshotTimestampWarning(WarnLevel): + def code(self) -> str: + return "Q043" + + def message(self) -> str: + return ( + f"Data type of snapshot table timestamp columns ({self.snapshot_time_data_type}) " + f"doesn't match derived column 'updated_at' ({self.updated_at_data_type}). " + "Please update snapshot config 'updated_at'." + ) + + # ======================================================= # W - Node testing # ======================================================= @@ -1900,7 +1637,7 @@ def message(self) -> str: class CatchableExceptionOnRun(DebugLevel): - def code(self): + def code(self) -> str: return "W002" def message(self) -> str: @@ -1908,7 +1645,7 @@ def message(self) -> str: class InternalErrorOnRun(DebugLevel): - def code(self): + def code(self) -> str: return "W003" def message(self) -> str: @@ -1922,7 +1659,7 @@ def message(self) -> str: class GenericExceptionOnRun(ErrorLevel): - def code(self): + def code(self) -> str: return "W004" def message(self) -> str: @@ -1934,7 +1671,7 @@ def message(self) -> str: class NodeConnectionReleaseError(DebugLevel): - def code(self): + def code(self) -> str: return "W005" def message(self) -> str: @@ -1942,7 +1679,7 @@ def message(self) -> str: class FoundStats(InfoLevel): - def code(self): + def code(self) -> str: return "W006" def message(self) -> str: @@ -1955,7 +1692,7 @@ def message(self) -> str: class MainKeyboardInterrupt(InfoLevel): - def code(self): + def code(self) -> str: return "Z001" def message(self) -> str: @@ -1963,7 +1700,7 @@ def message(self) -> str: class MainEncounteredError(ErrorLevel): - def code(self): + def code(self) -> str: return "Z002" def message(self) -> str: @@ -1971,7 +1708,7 @@ def message(self) -> str: class MainStackTrace(ErrorLevel): - def code(self): + def code(self) -> str: return "Z003" def message(self) -> str: @@ -1981,51 +1718,8 @@ def message(self) -> str: # Skipped Z004 -class SystemCouldNotWrite(DebugLevel): - def code(self): - return "Z005" - - def message(self) -> str: - return ( - f"Could not write to path {self.path}({len(self.path)} characters): " - f"{self.reason}\nexception: {self.exc}" - ) - - -class SystemExecutingCmd(DebugLevel): - def code(self): - return "Z006" - - def message(self) -> str: - return f'Executing "{" ".join(self.cmd)}"' - - -class SystemStdOut(DebugLevel): - def code(self): - return "Z007" - - def message(self) -> str: - return f'STDOUT: "{str(self.bmsg)}"' - - -class SystemStdErr(DebugLevel): - def code(self): - return "Z008" - - def message(self) -> str: - return f'STDERR: "{str(self.bmsg)}"' - - -class SystemReportReturnCode(DebugLevel): - def code(self): - return "Z009" - - def message(self) -> str: - return f"command return code={self.returncode}" - - class TimingInfoCollected(DebugLevel): - def code(self): + def code(self) -> str: return "Z010" def message(self) -> str: @@ -2039,7 +1733,7 @@ def message(self) -> str: class LogDebugStackTrace(DebugLevel): - def code(self): + def code(self) -> str: return "Z011" def message(self) -> str: @@ -2051,7 +1745,7 @@ def message(self) -> str: class CheckCleanPath(InfoLevel): - def code(self): + def code(self) -> str: return "Z012" def message(self) -> str: @@ -2059,7 +1753,7 @@ def message(self) -> str: class ConfirmCleanPath(InfoLevel): - def code(self): + def code(self) -> str: return "Z013" def message(self) -> str: @@ -2067,7 +1761,7 @@ def message(self) -> str: class ProtectedCleanPath(InfoLevel): - def code(self): + def code(self) -> str: return "Z014" def message(self) -> str: @@ -2075,7 +1769,7 @@ def message(self) -> str: class FinishedCleanPaths(InfoLevel): - def code(self): + def code(self) -> str: return "Z015" def message(self) -> str: @@ -2083,7 +1777,7 @@ def message(self) -> str: class OpenCommand(InfoLevel): - def code(self): + def code(self) -> str: return "Z016" def message(self) -> str: @@ -2094,23 +1788,8 @@ def message(self) -> str: return msg -# We use events to create console output, but also think of them as a sequence of important and -# meaningful occurrences to be used for debugging and monitoring. The Formatting event helps eases -# the tension between these two goals by allowing empty lines, heading separators, and other -# formatting to be written to the console, while they can be ignored for other purposes. For -# general information that isn't simple formatting, the Note event should be used instead. - - -class Formatting(InfoLevel): - def code(self): - return "Z017" - - def message(self) -> str: - return self.msg - - class RunResultWarning(WarnLevel): - def code(self): + def code(self) -> str: return "Z021" def message(self) -> str: @@ -2119,7 +1798,7 @@ def message(self) -> str: class RunResultFailure(ErrorLevel): - def code(self): + def code(self) -> str: return "Z022" def message(self) -> str: @@ -2128,7 +1807,7 @@ def message(self) -> str: class StatsLine(InfoLevel): - def code(self): + def code(self) -> str: return "Z023" def message(self) -> str: @@ -2137,7 +1816,7 @@ def message(self) -> str: class RunResultError(ErrorLevel): - def code(self): + def code(self) -> str: return "Z024" def message(self) -> str: @@ -2146,7 +1825,7 @@ def message(self) -> str: class RunResultErrorNoMessage(ErrorLevel): - def code(self): + def code(self) -> str: return "Z025" def message(self) -> str: @@ -2154,15 +1833,15 @@ def message(self) -> str: class SQLCompiledPath(InfoLevel): - def code(self): + def code(self) -> str: return "Z026" def message(self) -> str: - return f" compiled Code at {self.path}" + return f" compiled code at {self.path}" class CheckNodeTestFailure(InfoLevel): - def code(self): + def code(self) -> str: return "Z027" def message(self) -> str: @@ -2171,29 +1850,11 @@ def message(self) -> str: return f" See test failures:\n {border}\n {msg}\n {border}" -# FirstRunResultError and AfterFirstRunResultError are just splitting the message from the result -# object into multiple log lines -# TODO: is this reallly needed? See printer.py - - -class FirstRunResultError(ErrorLevel): - def code(self): - return "Z028" - - def message(self) -> str: - return yellow(self.msg) - - -class AfterFirstRunResultError(ErrorLevel): - def code(self): - return "Z029" - - def message(self) -> str: - return self.msg +# Skipped Z028, Z029 class EndOfRunSummary(InfoLevel): - def code(self): + def code(self) -> str: return "Z030" def message(self) -> str: @@ -2210,15 +1871,29 @@ def message(self) -> str: return message -# Skipped Z031, Z032, Z033 +# Skipped Z031, Z032 + + +class MarkSkippedChildren(DebugLevel): + def code(self) -> str: + return "Z033" + + def message(self) -> str: + msg = ( + f"Marking all children of '{self.unique_id}' to be skipped " + f"because of status '{self.status}'. " + ) + if self.run_result.message: + msg = msg + f" Reason: {self.run_result.message}." + return msg class LogSkipBecauseError(ErrorLevel): - def code(self): + def code(self) -> str: return "Z034" def message(self) -> str: - msg = f"SKIP relation {self.schema}.{self.relation} due to ephemeral model error" + msg = f"SKIP relation {self.schema}.{self.relation} due to ephemeral model status '{self.status}'" return format_fancy_output_line( msg=msg, status=red("ERROR SKIP"), index=self.index, total=self.total ) @@ -2228,7 +1903,7 @@ def message(self) -> str: class EnsureGitInstalled(ErrorLevel): - def code(self): + def code(self) -> str: return "Z036" def message(self) -> str: @@ -2240,7 +1915,7 @@ def message(self) -> str: class DepsCreatingLocalSymlink(DebugLevel): - def code(self): + def code(self) -> str: return "Z037" def message(self) -> str: @@ -2248,7 +1923,7 @@ def message(self) -> str: class DepsSymlinkNotAvailable(DebugLevel): - def code(self): + def code(self) -> str: return "Z038" def message(self) -> str: @@ -2256,7 +1931,7 @@ def message(self) -> str: class DisableTracking(DebugLevel): - def code(self): + def code(self) -> str: return "Z039" def message(self) -> str: @@ -2268,7 +1943,7 @@ def message(self) -> str: class SendingEvent(DebugLevel): - def code(self): + def code(self) -> str: return "Z040" def message(self) -> str: @@ -2276,7 +1951,7 @@ def message(self) -> str: class SendEventFailure(DebugLevel): - def code(self): + def code(self) -> str: return "Z041" def message(self) -> str: @@ -2284,7 +1959,7 @@ def message(self) -> str: class FlushEvents(DebugLevel): - def code(self): + def code(self) -> str: return "Z042" def message(self) -> str: @@ -2292,7 +1967,7 @@ def message(self) -> str: class FlushEventsFailure(DebugLevel): - def code(self): + def code(self) -> str: return "Z043" def message(self) -> str: @@ -2300,7 +1975,7 @@ def message(self) -> str: class TrackingInitializeFailure(DebugLevel): - def code(self): + def code(self) -> str: return "Z044" def message(self) -> str: @@ -2311,7 +1986,7 @@ def message(self) -> str: class RunResultWarningMessage(WarnLevel): - def code(self): + def code(self) -> str: return "Z046" def message(self) -> str: @@ -2320,7 +1995,7 @@ def message(self) -> str: class DebugCmdOut(InfoLevel): - def code(self): + def code(self) -> str: return "Z047" def message(self) -> str: @@ -2328,7 +2003,7 @@ def message(self) -> str: class DebugCmdResult(InfoLevel): - def code(self): + def code(self) -> str: return "Z048" def message(self) -> str: @@ -2336,20 +2011,17 @@ def message(self) -> str: class ListCmdOut(InfoLevel): - def code(self): + # No longer in use, switching to Z051 PrintEvent in dbt-common + def code(self) -> str: return "Z049" def message(self) -> str: return self.msg -# The Note event provides a way to log messages which aren't likely to be useful as more structured events. -# For console formatting text like empty lines and separator bars, use the Formatting event instead. - - -class Note(InfoLevel): - def code(self): - return "Z050" +class ResourceReport(DebugLevel): + def code(self) -> str: + return "Z051" def message(self) -> str: - return self.msg + return f"Resource report: {self.to_json()}" diff --git a/core/dbt/events/types_pb2.py b/core/dbt/events/types_pb2.py deleted file mode 100644 index 80cfbba69a7..00000000000 --- a/core/dbt/events/types_pb2.py +++ /dev/null @@ -1,903 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: types.proto -"""Generated protocol buffer code.""" -from google.protobuf.internal import builder as _builder -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import symbol_database as _symbol_database -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0btypes.proto\x12\x0bproto_types\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1cgoogle/protobuf/struct.proto\"\x91\x02\n\tEventInfo\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04\x63ode\x18\x02 \x01(\t\x12\x0b\n\x03msg\x18\x03 \x01(\t\x12\r\n\x05level\x18\x04 \x01(\t\x12\x15\n\rinvocation_id\x18\x05 \x01(\t\x12\x0b\n\x03pid\x18\x06 \x01(\x05\x12\x0e\n\x06thread\x18\x07 \x01(\t\x12&\n\x02ts\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x30\n\x05\x65xtra\x18\t \x03(\x0b\x32!.proto_types.EventInfo.ExtraEntry\x12\x10\n\x08\x63\x61tegory\x18\n \x01(\t\x1a,\n\nExtraEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\x7f\n\rTimingInfoMsg\x12\x0c\n\x04name\x18\x01 \x01(\t\x12.\n\nstarted_at\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x30\n\x0c\x63ompleted_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\"V\n\x0cNodeRelation\x12\x10\n\x08\x64\x61tabase\x18\n \x01(\t\x12\x0e\n\x06schema\x18\x0b \x01(\t\x12\r\n\x05\x61lias\x18\x0c \x01(\t\x12\x15\n\rrelation_name\x18\r \x01(\t\"\x91\x02\n\x08NodeInfo\x12\x11\n\tnode_path\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x11\n\tunique_id\x18\x03 \x01(\t\x12\x15\n\rresource_type\x18\x04 \x01(\t\x12\x14\n\x0cmaterialized\x18\x05 \x01(\t\x12\x13\n\x0bnode_status\x18\x06 \x01(\t\x12\x17\n\x0fnode_started_at\x18\x07 \x01(\t\x12\x18\n\x10node_finished_at\x18\x08 \x01(\t\x12%\n\x04meta\x18\t \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x30\n\rnode_relation\x18\n \x01(\x0b\x32\x19.proto_types.NodeRelation\"\xd1\x01\n\x0cRunResultMsg\x12\x0e\n\x06status\x18\x01 \x01(\t\x12\x0f\n\x07message\x18\x02 \x01(\t\x12/\n\x0btiming_info\x18\x03 \x03(\x0b\x32\x1a.proto_types.TimingInfoMsg\x12\x0e\n\x06thread\x18\x04 \x01(\t\x12\x16\n\x0e\x65xecution_time\x18\x05 \x01(\x02\x12\x31\n\x10\x61\x64\x61pter_response\x18\x06 \x01(\x0b\x32\x17.google.protobuf.Struct\x12\x14\n\x0cnum_failures\x18\x07 \x01(\x05\"G\n\x0fReferenceKeyMsg\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12\x12\n\nidentifier\x18\x03 \x01(\t\"6\n\x0eGenericMessage\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\"9\n\x11MainReportVersion\x12\x0f\n\x07version\x18\x01 \x01(\t\x12\x13\n\x0blog_version\x18\x02 \x01(\x05\"j\n\x14MainReportVersionMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.MainReportVersion\"r\n\x0eMainReportArgs\x12\x33\n\x04\x61rgs\x18\x01 \x03(\x0b\x32%.proto_types.MainReportArgs.ArgsEntry\x1a+\n\tArgsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"d\n\x11MainReportArgsMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.MainReportArgs\"+\n\x15MainTrackingUserState\x12\x12\n\nuser_state\x18\x01 \x01(\t\"r\n\x18MainTrackingUserStateMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.MainTrackingUserState\"5\n\x0fMergedFromState\x12\x12\n\nnum_merged\x18\x01 \x01(\x05\x12\x0e\n\x06sample\x18\x02 \x03(\t\"f\n\x12MergedFromStateMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.MergedFromState\"A\n\x14MissingProfileTarget\x12\x14\n\x0cprofile_name\x18\x01 \x01(\t\x12\x13\n\x0btarget_name\x18\x02 \x01(\t\"p\n\x17MissingProfileTargetMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.MissingProfileTarget\"(\n\x11InvalidOptionYAML\x12\x13\n\x0boption_name\x18\x01 \x01(\t\"j\n\x14InvalidOptionYAMLMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.InvalidOptionYAML\"!\n\x12LogDbtProjectError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"l\n\x15LogDbtProjectErrorMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.LogDbtProjectError\"3\n\x12LogDbtProfileError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\x12\x10\n\x08profiles\x18\x02 \x03(\t\"l\n\x15LogDbtProfileErrorMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.LogDbtProfileError\"!\n\x12StarterProjectPath\x12\x0b\n\x03\x64ir\x18\x01 \x01(\t\"l\n\x15StarterProjectPathMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.StarterProjectPath\"$\n\x15\x43onfigFolderDirectory\x12\x0b\n\x03\x64ir\x18\x01 \x01(\t\"r\n\x18\x43onfigFolderDirectoryMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.ConfigFolderDirectory\"\'\n\x14NoSampleProfileFound\x12\x0f\n\x07\x61\x64\x61pter\x18\x01 \x01(\t\"p\n\x17NoSampleProfileFoundMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.NoSampleProfileFound\"6\n\x18ProfileWrittenWithSample\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\"x\n\x1bProfileWrittenWithSampleMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.ProfileWrittenWithSample\"B\n$ProfileWrittenWithTargetTemplateYAML\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\"\x90\x01\n\'ProfileWrittenWithTargetTemplateYAMLMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12?\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x31.proto_types.ProfileWrittenWithTargetTemplateYAML\"C\n%ProfileWrittenWithProjectTemplateYAML\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\t\"\x92\x01\n(ProfileWrittenWithProjectTemplateYAMLMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12@\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x32.proto_types.ProfileWrittenWithProjectTemplateYAML\"\x12\n\x10SettingUpProfile\"h\n\x13SettingUpProfileMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.SettingUpProfile\"\x1c\n\x1aInvalidProfileTemplateYAML\"|\n\x1dInvalidProfileTemplateYAMLMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.InvalidProfileTemplateYAML\"(\n\x18ProjectNameAlreadyExists\x12\x0c\n\x04name\x18\x01 \x01(\t\"x\n\x1bProjectNameAlreadyExistsMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.ProjectNameAlreadyExists\"K\n\x0eProjectCreated\x12\x14\n\x0cproject_name\x18\x01 \x01(\t\x12\x10\n\x08\x64ocs_url\x18\x02 \x01(\t\x12\x11\n\tslack_url\x18\x03 \x01(\t\"d\n\x11ProjectCreatedMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.ProjectCreated\"@\n\x1aPackageRedirectDeprecation\x12\x10\n\x08old_name\x18\x01 \x01(\t\x12\x10\n\x08new_name\x18\x02 \x01(\t\"|\n\x1dPackageRedirectDeprecationMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.PackageRedirectDeprecation\"\x1f\n\x1dPackageInstallPathDeprecation\"\x82\x01\n PackageInstallPathDeprecationMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x38\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32*.proto_types.PackageInstallPathDeprecation\"H\n\x1b\x43onfigSourcePathDeprecation\x12\x17\n\x0f\x64\x65precated_path\x18\x01 \x01(\t\x12\x10\n\x08\x65xp_path\x18\x02 \x01(\t\"~\n\x1e\x43onfigSourcePathDeprecationMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.ConfigSourcePathDeprecation\"F\n\x19\x43onfigDataPathDeprecation\x12\x17\n\x0f\x64\x65precated_path\x18\x01 \x01(\t\x12\x10\n\x08\x65xp_path\x18\x02 \x01(\t\"z\n\x1c\x43onfigDataPathDeprecationMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.ConfigDataPathDeprecation\"?\n\x19\x41\x64\x61pterDeprecationWarning\x12\x10\n\x08old_name\x18\x01 \x01(\t\x12\x10\n\x08new_name\x18\x02 \x01(\t\"z\n\x1c\x41\x64\x61pterDeprecationWarningMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.AdapterDeprecationWarning\".\n\x17MetricAttributesRenamed\x12\x13\n\x0bmetric_name\x18\x01 \x01(\t\"v\n\x1aMetricAttributesRenamedMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.MetricAttributesRenamed\"+\n\x17\x45xposureNameDeprecation\x12\x10\n\x08\x65xposure\x18\x01 \x01(\t\"v\n\x1a\x45xposureNameDeprecationMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.ExposureNameDeprecation\"^\n\x13InternalDeprecation\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06reason\x18\x02 \x01(\t\x12\x18\n\x10suggested_action\x18\x03 \x01(\t\x12\x0f\n\x07version\x18\x04 \x01(\t\"n\n\x16InternalDeprecationMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.InternalDeprecation\"@\n\x1a\x45nvironmentVariableRenamed\x12\x10\n\x08old_name\x18\x01 \x01(\t\x12\x10\n\x08new_name\x18\x02 \x01(\t\"|\n\x1d\x45nvironmentVariableRenamedMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.EnvironmentVariableRenamed\"3\n\x18\x43onfigLogPathDeprecation\x12\x17\n\x0f\x64\x65precated_path\x18\x01 \x01(\t\"x\n\x1b\x43onfigLogPathDeprecationMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.ConfigLogPathDeprecation\"6\n\x1b\x43onfigTargetPathDeprecation\x12\x17\n\x0f\x64\x65precated_path\x18\x01 \x01(\t\"~\n\x1e\x43onfigTargetPathDeprecationMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.ConfigTargetPathDeprecation\"!\n\x1f\x43ollectFreshnessReturnSignature\"\x86\x01\n\"CollectFreshnessReturnSignatureMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.CollectFreshnessReturnSignature\"\x87\x01\n\x11\x41\x64\x61pterEventDebug\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"j\n\x14\x41\x64\x61pterEventDebugMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventDebug\"\x86\x01\n\x10\x41\x64\x61pterEventInfo\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"h\n\x13\x41\x64\x61pterEventInfoMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.AdapterEventInfo\"\x89\x01\n\x13\x41\x64\x61pterEventWarning\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\"n\n\x16\x41\x64\x61pterEventWarningMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.AdapterEventWarning\"\x99\x01\n\x11\x41\x64\x61pterEventError\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x10\n\x08\x62\x61se_msg\x18\x03 \x01(\t\x12(\n\x04\x61rgs\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.ListValue\x12\x10\n\x08\x65xc_info\x18\x05 \x01(\t\"j\n\x14\x41\x64\x61pterEventErrorMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterEventError\"_\n\rNewConnection\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"b\n\x10NewConnectionMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.NewConnection\"=\n\x10\x43onnectionReused\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x16\n\x0eorig_conn_name\x18\x02 \x01(\t\"h\n\x13\x43onnectionReusedMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionReused\"0\n\x1b\x43onnectionLeftOpenInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"~\n\x1e\x43onnectionLeftOpenInCleanupMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.ConnectionLeftOpenInCleanup\".\n\x19\x43onnectionClosedInCleanup\x12\x11\n\tconn_name\x18\x01 \x01(\t\"z\n\x1c\x43onnectionClosedInCleanupMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.ConnectionClosedInCleanup\"_\n\x0eRollbackFailed\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"d\n\x11RollbackFailedMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.RollbackFailed\"O\n\x10\x43onnectionClosed\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"h\n\x13\x43onnectionClosedMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConnectionClosed\"Q\n\x12\x43onnectionLeftOpen\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"l\n\x15\x43onnectionLeftOpenMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.ConnectionLeftOpen\"G\n\x08Rollback\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"X\n\x0bRollbackMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.Rollback\"@\n\tCacheMiss\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x10\n\x08\x64\x61tabase\x18\x02 \x01(\t\x12\x0e\n\x06schema\x18\x03 \x01(\t\"Z\n\x0c\x43\x61\x63heMissMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.CacheMiss\"b\n\rListRelations\x12\x10\n\x08\x64\x61tabase\x18\x01 \x01(\t\x12\x0e\n\x06schema\x18\x02 \x01(\t\x12/\n\trelations\x18\x03 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"b\n\x10ListRelationsMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ListRelations\"`\n\x0e\x43onnectionUsed\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x11\n\tconn_type\x18\x02 \x01(\t\x12\x11\n\tconn_name\x18\x03 \x01(\t\"d\n\x11\x43onnectionUsedMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.ConnectionUsed\"T\n\x08SQLQuery\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\x12\x0b\n\x03sql\x18\x03 \x01(\t\"X\n\x0bSQLQueryMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.SQLQuery\"[\n\x0eSQLQueryStatus\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0e\n\x06status\x18\x02 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x03 \x01(\x02\"d\n\x11SQLQueryStatusMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SQLQueryStatus\"H\n\tSQLCommit\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x11\n\tconn_name\x18\x02 \x01(\t\"Z\n\x0cSQLCommitMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.SQLCommit\"a\n\rColTypeChange\x12\x11\n\torig_type\x18\x01 \x01(\t\x12\x10\n\x08new_type\x18\x02 \x01(\t\x12+\n\x05table\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"b\n\x10\x43olTypeChangeMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.ColTypeChange\"@\n\x0eSchemaCreation\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"d\n\x11SchemaCreationMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.SchemaCreation\"<\n\nSchemaDrop\x12.\n\x08relation\x18\x01 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"\\\n\rSchemaDropMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.SchemaDrop\"\xde\x01\n\x0b\x43\x61\x63heAction\x12\x0e\n\x06\x61\x63tion\x18\x01 \x01(\t\x12-\n\x07ref_key\x18\x02 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_2\x18\x03 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12/\n\tref_key_3\x18\x04 \x01(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\x12.\n\x08ref_list\x18\x05 \x03(\x0b\x32\x1c.proto_types.ReferenceKeyMsg\"^\n\x0e\x43\x61\x63heActionMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.CacheAction\"\x98\x01\n\x0e\x43\x61\x63heDumpGraph\x12\x33\n\x04\x64ump\x18\x01 \x03(\x0b\x32%.proto_types.CacheDumpGraph.DumpEntry\x12\x14\n\x0c\x62\x65\x66ore_after\x18\x02 \x01(\t\x12\x0e\n\x06\x61\x63tion\x18\x03 \x01(\t\x1a+\n\tDumpEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"d\n\x11\x43\x61\x63heDumpGraphMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CacheDumpGraph\"B\n\x11\x41\x64\x61pterRegistered\x12\x14\n\x0c\x61\x64\x61pter_name\x18\x01 \x01(\t\x12\x17\n\x0f\x61\x64\x61pter_version\x18\x02 \x01(\t\"j\n\x14\x41\x64\x61pterRegisteredMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.AdapterRegistered\"!\n\x12\x41\x64\x61pterImportError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"l\n\x15\x41\x64\x61pterImportErrorMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.AdapterImportError\"#\n\x0fPluginLoadError\x12\x10\n\x08\x65xc_info\x18\x01 \x01(\t\"f\n\x12PluginLoadErrorMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.PluginLoadError\"Z\n\x14NewConnectionOpening\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x18\n\x10\x63onnection_state\x18\x02 \x01(\t\"p\n\x17NewConnectionOpeningMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.NewConnectionOpening\"8\n\rCodeExecution\x12\x11\n\tconn_name\x18\x01 \x01(\t\x12\x14\n\x0c\x63ode_content\x18\x02 \x01(\t\"b\n\x10\x43odeExecutionMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.CodeExecution\"6\n\x13\x43odeExecutionStatus\x12\x0e\n\x06status\x18\x01 \x01(\t\x12\x0f\n\x07\x65lapsed\x18\x02 \x01(\x02\"n\n\x16\x43odeExecutionStatusMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.CodeExecutionStatus\"%\n\x16\x43\x61talogGenerationError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"t\n\x19\x43\x61talogGenerationErrorMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.CatalogGenerationError\"-\n\x13WriteCatalogFailure\x12\x16\n\x0enum_exceptions\x18\x01 \x01(\x05\"n\n\x16WriteCatalogFailureMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.WriteCatalogFailure\"\x1e\n\x0e\x43\x61talogWritten\x12\x0c\n\x04path\x18\x01 \x01(\t\"d\n\x11\x43\x61talogWrittenMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CatalogWritten\"\x14\n\x12\x43\x61nnotGenerateDocs\"l\n\x15\x43\x61nnotGenerateDocsMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.CannotGenerateDocs\"\x11\n\x0f\x42uildingCatalog\"f\n\x12\x42uildingCatalogMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.BuildingCatalog\"-\n\x18\x44\x61tabaseErrorRunningHook\x12\x11\n\thook_type\x18\x01 \x01(\t\"x\n\x1b\x44\x61tabaseErrorRunningHookMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.DatabaseErrorRunningHook\"4\n\x0cHooksRunning\x12\x11\n\tnum_hooks\x18\x01 \x01(\x05\x12\x11\n\thook_type\x18\x02 \x01(\t\"`\n\x0fHooksRunningMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.HooksRunning\"T\n\x14\x46inishedRunningStats\x12\x11\n\tstat_line\x18\x01 \x01(\t\x12\x11\n\texecution\x18\x02 \x01(\t\x12\x16\n\x0e\x65xecution_time\x18\x03 \x01(\x02\"p\n\x17\x46inishedRunningStatsMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.FinishedRunningStats\"<\n\x15\x43onstraintNotEnforced\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"r\n\x18\x43onstraintNotEnforcedMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.ConstraintNotEnforced\"=\n\x16\x43onstraintNotSupported\x12\x12\n\nconstraint\x18\x01 \x01(\t\x12\x0f\n\x07\x61\x64\x61pter\x18\x02 \x01(\t\"t\n\x19\x43onstraintNotSupportedMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.ConstraintNotSupported\"7\n\x12InputFileDiffError\x12\x10\n\x08\x63\x61tegory\x18\x01 \x01(\t\x12\x0f\n\x07\x66ile_id\x18\x02 \x01(\t\"l\n\x15InputFileDiffErrorMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.InputFileDiffError\"?\n\x14InvalidValueForField\x12\x12\n\nfield_name\x18\x01 \x01(\t\x12\x13\n\x0b\x66ield_value\x18\x02 \x01(\t\"p\n\x17InvalidValueForFieldMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.InvalidValueForField\"Q\n\x11ValidationWarning\x12\x15\n\rresource_type\x18\x01 \x01(\t\x12\x12\n\nfield_name\x18\x02 \x01(\t\x12\x11\n\tnode_name\x18\x03 \x01(\t\"j\n\x14ValidationWarningMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.ValidationWarning\"!\n\x11ParsePerfInfoPath\x12\x0c\n\x04path\x18\x01 \x01(\t\"j\n\x14ParsePerfInfoPathMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.ParsePerfInfoPath\"1\n!PartialParsingErrorProcessingFile\x12\x0c\n\x04\x66ile\x18\x01 \x01(\t\"\x8a\x01\n$PartialParsingErrorProcessingFileMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12<\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32..proto_types.PartialParsingErrorProcessingFile\"\x86\x01\n\x13PartialParsingError\x12?\n\x08\x65xc_info\x18\x01 \x03(\x0b\x32-.proto_types.PartialParsingError.ExcInfoEntry\x1a.\n\x0c\x45xcInfoEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"n\n\x16PartialParsingErrorMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.PartialParsingError\"\x1b\n\x19PartialParsingSkipParsing\"z\n\x1cPartialParsingSkipParsingMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.PartialParsingSkipParsing\"&\n\x14UnableToPartialParse\x12\x0e\n\x06reason\x18\x01 \x01(\t\"p\n\x17UnableToPartialParseMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.UnableToPartialParse\"f\n\x12StateCheckVarsHash\x12\x10\n\x08\x63hecksum\x18\x01 \x01(\t\x12\x0c\n\x04vars\x18\x02 \x01(\t\x12\x0f\n\x07profile\x18\x03 \x01(\t\x12\x0e\n\x06target\x18\x04 \x01(\t\x12\x0f\n\x07version\x18\x05 \x01(\t\"l\n\x15StateCheckVarsHashMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.StateCheckVarsHash\"\x1a\n\x18PartialParsingNotEnabled\"x\n\x1bPartialParsingNotEnabledMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.PartialParsingNotEnabled\"C\n\x14ParsedFileLoadFailed\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x0b\n\x03\x65xc\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"p\n\x17ParsedFileLoadFailedMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.ParsedFileLoadFailed\"H\n\x15PartialParsingEnabled\x12\x0f\n\x07\x64\x65leted\x18\x01 \x01(\x05\x12\r\n\x05\x61\x64\x64\x65\x64\x18\x02 \x01(\x05\x12\x0f\n\x07\x63hanged\x18\x03 \x01(\x05\"r\n\x18PartialParsingEnabledMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.PartialParsingEnabled\"8\n\x12PartialParsingFile\x12\x0f\n\x07\x66ile_id\x18\x01 \x01(\t\x12\x11\n\toperation\x18\x02 \x01(\t\"l\n\x15PartialParsingFileMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.PartialParsingFile\"\xaf\x01\n\x1fInvalidDisabledTargetInTestNode\x12\x1b\n\x13resource_type_title\x18\x01 \x01(\t\x12\x11\n\tunique_id\x18\x02 \x01(\t\x12\x1a\n\x12original_file_path\x18\x03 \x01(\t\x12\x13\n\x0btarget_kind\x18\x04 \x01(\t\x12\x13\n\x0btarget_name\x18\x05 \x01(\t\x12\x16\n\x0etarget_package\x18\x06 \x01(\t\"\x86\x01\n\"InvalidDisabledTargetInTestNodeMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.InvalidDisabledTargetInTestNode\"7\n\x18UnusedResourceConfigPath\x12\x1b\n\x13unused_config_paths\x18\x01 \x03(\t\"x\n\x1bUnusedResourceConfigPathMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.UnusedResourceConfigPath\"3\n\rSeedIncreased\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"b\n\x10SeedIncreasedMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.SeedIncreased\">\n\x18SeedExceedsLimitSamePath\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"x\n\x1bSeedExceedsLimitSamePathMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.SeedExceedsLimitSamePath\"D\n\x1eSeedExceedsLimitAndPathChanged\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"\x84\x01\n!SeedExceedsLimitAndPathChangedMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x39\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32+.proto_types.SeedExceedsLimitAndPathChanged\"\\\n\x1fSeedExceedsLimitChecksumChanged\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x15\n\rchecksum_name\x18\x03 \x01(\t\"\x86\x01\n\"SeedExceedsLimitChecksumChangedMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.SeedExceedsLimitChecksumChanged\"%\n\x0cUnusedTables\x12\x15\n\runused_tables\x18\x01 \x03(\t\"`\n\x0fUnusedTablesMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.UnusedTables\"\x87\x01\n\x17WrongResourceSchemaFile\x12\x12\n\npatch_name\x18\x01 \x01(\t\x12\x15\n\rresource_type\x18\x02 \x01(\t\x12\x1c\n\x14plural_resource_type\x18\x03 \x01(\t\x12\x10\n\x08yaml_key\x18\x04 \x01(\t\x12\x11\n\tfile_path\x18\x05 \x01(\t\"v\n\x1aWrongResourceSchemaFileMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.WrongResourceSchemaFile\"K\n\x10NoNodeForYamlKey\x12\x12\n\npatch_name\x18\x01 \x01(\t\x12\x10\n\x08yaml_key\x18\x02 \x01(\t\x12\x11\n\tfile_path\x18\x03 \x01(\t\"h\n\x13NoNodeForYamlKeyMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.NoNodeForYamlKey\"+\n\x15MacroNotFoundForPatch\x12\x12\n\npatch_name\x18\x01 \x01(\t\"r\n\x18MacroNotFoundForPatchMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.MacroNotFoundForPatch\"\xb8\x01\n\x16NodeNotFoundOrDisabled\x12\x1a\n\x12original_file_path\x18\x01 \x01(\t\x12\x11\n\tunique_id\x18\x02 \x01(\t\x12\x1b\n\x13resource_type_title\x18\x03 \x01(\t\x12\x13\n\x0btarget_name\x18\x04 \x01(\t\x12\x13\n\x0btarget_kind\x18\x05 \x01(\t\x12\x16\n\x0etarget_package\x18\x06 \x01(\t\x12\x10\n\x08\x64isabled\x18\x07 \x01(\t\"t\n\x19NodeNotFoundOrDisabledMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.NodeNotFoundOrDisabled\"H\n\x0fJinjaLogWarning\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0b\n\x03msg\x18\x02 \x01(\t\"f\n\x12JinjaLogWarningMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.JinjaLogWarning\"E\n\x0cJinjaLogInfo\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0b\n\x03msg\x18\x02 \x01(\t\"`\n\x0fJinjaLogInfoMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.JinjaLogInfo\"F\n\rJinjaLogDebug\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0b\n\x03msg\x18\x02 \x01(\t\"b\n\x10JinjaLogDebugMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.JinjaLogDebug\"\xae\x01\n\x1eUnpinnedRefNewVersionAvailable\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x15\n\rref_node_name\x18\x02 \x01(\t\x12\x18\n\x10ref_node_package\x18\x03 \x01(\t\x12\x18\n\x10ref_node_version\x18\x04 \x01(\t\x12\x17\n\x0fref_max_version\x18\x05 \x01(\t\"\x84\x01\n!UnpinnedRefNewVersionAvailableMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x39\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32+.proto_types.UnpinnedRefNewVersionAvailable\"V\n\x0f\x44\x65precatedModel\x12\x12\n\nmodel_name\x18\x01 \x01(\t\x12\x15\n\rmodel_version\x18\x02 \x01(\t\x12\x18\n\x10\x64\x65precation_date\x18\x03 \x01(\t\"f\n\x12\x44\x65precatedModelMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.DeprecatedModel\"\xc6\x01\n\x1cUpcomingReferenceDeprecation\x12\x12\n\nmodel_name\x18\x01 \x01(\t\x12\x19\n\x11ref_model_package\x18\x02 \x01(\t\x12\x16\n\x0eref_model_name\x18\x03 \x01(\t\x12\x19\n\x11ref_model_version\x18\x04 \x01(\t\x12 \n\x18ref_model_latest_version\x18\x05 \x01(\t\x12\"\n\x1aref_model_deprecation_date\x18\x06 \x01(\t\"\x80\x01\n\x1fUpcomingReferenceDeprecationMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x37\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32).proto_types.UpcomingReferenceDeprecation\"\xbd\x01\n\x13\x44\x65precatedReference\x12\x12\n\nmodel_name\x18\x01 \x01(\t\x12\x19\n\x11ref_model_package\x18\x02 \x01(\t\x12\x16\n\x0eref_model_name\x18\x03 \x01(\t\x12\x19\n\x11ref_model_version\x18\x04 \x01(\t\x12 \n\x18ref_model_latest_version\x18\x05 \x01(\t\x12\"\n\x1aref_model_deprecation_date\x18\x06 \x01(\t\"n\n\x16\x44\x65precatedReferenceMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.DeprecatedReference\"<\n$UnsupportedConstraintMaterialization\x12\x14\n\x0cmaterialized\x18\x01 \x01(\t\"\x90\x01\n\'UnsupportedConstraintMaterializationMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12?\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x31.proto_types.UnsupportedConstraintMaterialization\"M\n\x14ParseInlineNodeError\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0b\n\x03\x65xc\x18\x02 \x01(\t\"p\n\x17ParseInlineNodeErrorMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.ParseInlineNodeError\"(\n\x19SemanticValidationFailure\x12\x0b\n\x03msg\x18\x02 \x01(\t\"z\n\x1cSemanticValidationFailureMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.SemanticValidationFailure\"/\n\x1dGitSparseCheckoutSubdirectory\x12\x0e\n\x06subdir\x18\x01 \x01(\t\"\x82\x01\n GitSparseCheckoutSubdirectoryMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x38\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32*.proto_types.GitSparseCheckoutSubdirectory\"/\n\x1bGitProgressCheckoutRevision\x12\x10\n\x08revision\x18\x01 \x01(\t\"~\n\x1eGitProgressCheckoutRevisionMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.GitProgressCheckoutRevision\"4\n%GitProgressUpdatingExistingDependency\x12\x0b\n\x03\x64ir\x18\x01 \x01(\t\"\x92\x01\n(GitProgressUpdatingExistingDependencyMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12@\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x32.proto_types.GitProgressUpdatingExistingDependency\".\n\x1fGitProgressPullingNewDependency\x12\x0b\n\x03\x64ir\x18\x01 \x01(\t\"\x86\x01\n\"GitProgressPullingNewDependencyMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.GitProgressPullingNewDependency\"\x1d\n\x0eGitNothingToDo\x12\x0b\n\x03sha\x18\x01 \x01(\t\"d\n\x11GitNothingToDoMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.GitNothingToDo\"E\n\x1fGitProgressUpdatedCheckoutRange\x12\x11\n\tstart_sha\x18\x01 \x01(\t\x12\x0f\n\x07\x65nd_sha\x18\x02 \x01(\t\"\x86\x01\n\"GitProgressUpdatedCheckoutRangeMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.GitProgressUpdatedCheckoutRange\"*\n\x17GitProgressCheckedOutAt\x12\x0f\n\x07\x65nd_sha\x18\x01 \x01(\t\"v\n\x1aGitProgressCheckedOutAtMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.GitProgressCheckedOutAt\")\n\x1aRegistryProgressGETRequest\x12\x0b\n\x03url\x18\x01 \x01(\t\"|\n\x1dRegistryProgressGETRequestMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.RegistryProgressGETRequest\"=\n\x1bRegistryProgressGETResponse\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\x11\n\tresp_code\x18\x02 \x01(\x05\"~\n\x1eRegistryProgressGETResponseMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.RegistryProgressGETResponse\"_\n\x1dSelectorReportInvalidSelector\x12\x17\n\x0fvalid_selectors\x18\x01 \x01(\t\x12\x13\n\x0bspec_method\x18\x02 \x01(\t\x12\x10\n\x08raw_spec\x18\x03 \x01(\t\"\x82\x01\n SelectorReportInvalidSelectorMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x38\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32*.proto_types.SelectorReportInvalidSelector\"\x15\n\x13\x44\x65psNoPackagesFound\"n\n\x16\x44\x65psNoPackagesFoundMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.DepsNoPackagesFound\"/\n\x17\x44\x65psStartPackageInstall\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\"v\n\x1a\x44\x65psStartPackageInstallMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.DepsStartPackageInstall\"\'\n\x0f\x44\x65psInstallInfo\x12\x14\n\x0cversion_name\x18\x01 \x01(\t\"f\n\x12\x44\x65psInstallInfoMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.DepsInstallInfo\"-\n\x13\x44\x65psUpdateAvailable\x12\x16\n\x0eversion_latest\x18\x01 \x01(\t\"n\n\x16\x44\x65psUpdateAvailableMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.DepsUpdateAvailable\"\x0e\n\x0c\x44\x65psUpToDate\"`\n\x0f\x44\x65psUpToDateMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.DepsUpToDate\",\n\x14\x44\x65psListSubdirectory\x12\x14\n\x0csubdirectory\x18\x01 \x01(\t\"p\n\x17\x44\x65psListSubdirectoryMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.DepsListSubdirectory\".\n\x1a\x44\x65psNotifyUpdatesAvailable\x12\x10\n\x08packages\x18\x01 \x03(\t\"|\n\x1d\x44\x65psNotifyUpdatesAvailableMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.DepsNotifyUpdatesAvailable\"1\n\x11RetryExternalCall\x12\x0f\n\x07\x61ttempt\x18\x01 \x01(\x05\x12\x0b\n\x03max\x18\x02 \x01(\x05\"j\n\x14RetryExternalCallMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.RetryExternalCall\"#\n\x14RecordRetryException\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"p\n\x17RecordRetryExceptionMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.RecordRetryException\".\n\x1fRegistryIndexProgressGETRequest\x12\x0b\n\x03url\x18\x01 \x01(\t\"\x86\x01\n\"RegistryIndexProgressGETRequestMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.RegistryIndexProgressGETRequest\"B\n RegistryIndexProgressGETResponse\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\x11\n\tresp_code\x18\x02 \x01(\x05\"\x88\x01\n#RegistryIndexProgressGETResponseMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12;\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32-.proto_types.RegistryIndexProgressGETResponse\"2\n\x1eRegistryResponseUnexpectedType\x12\x10\n\x08response\x18\x01 \x01(\t\"\x84\x01\n!RegistryResponseUnexpectedTypeMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x39\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32+.proto_types.RegistryResponseUnexpectedType\"2\n\x1eRegistryResponseMissingTopKeys\x12\x10\n\x08response\x18\x01 \x01(\t\"\x84\x01\n!RegistryResponseMissingTopKeysMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x39\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32+.proto_types.RegistryResponseMissingTopKeys\"5\n!RegistryResponseMissingNestedKeys\x12\x10\n\x08response\x18\x01 \x01(\t\"\x8a\x01\n$RegistryResponseMissingNestedKeysMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12<\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32..proto_types.RegistryResponseMissingNestedKeys\"3\n\x1fRegistryResponseExtraNestedKeys\x12\x10\n\x08response\x18\x01 \x01(\t\"\x86\x01\n\"RegistryResponseExtraNestedKeysMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12:\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32,.proto_types.RegistryResponseExtraNestedKeys\"(\n\x18\x44\x65psSetDownloadDirectory\x12\x0c\n\x04path\x18\x01 \x01(\t\"x\n\x1b\x44\x65psSetDownloadDirectoryMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.DepsSetDownloadDirectory\"-\n\x0c\x44\x65psUnpinned\x12\x10\n\x08revision\x18\x01 \x01(\t\x12\x0b\n\x03git\x18\x02 \x01(\t\"`\n\x0f\x44\x65psUnpinnedMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.DepsUnpinned\"/\n\x1bNoNodesForSelectionCriteria\x12\x10\n\x08spec_raw\x18\x01 \x01(\t\"~\n\x1eNoNodesForSelectionCriteriaMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.NoNodesForSelectionCriteria\"*\n\x1bRunningOperationCaughtError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"~\n\x1eRunningOperationCaughtErrorMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.RunningOperationCaughtError\"\x11\n\x0f\x43ompileComplete\"f\n\x12\x43ompileCompleteMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.CompileComplete\"\x18\n\x16\x46reshnessCheckComplete\"t\n\x19\x46reshnessCheckCompleteMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.FreshnessCheckComplete\"\x1c\n\nSeedHeader\x12\x0e\n\x06header\x18\x01 \x01(\t\"\\\n\rSeedHeaderMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.SeedHeader\"3\n\x12SQLRunnerException\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x02 \x01(\t\"l\n\x15SQLRunnerExceptionMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.SQLRunnerException\"\xa8\x01\n\rLogTestResult\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\t\x12\r\n\x05index\x18\x04 \x01(\x05\x12\x12\n\nnum_models\x18\x05 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x06 \x01(\x02\x12\x14\n\x0cnum_failures\x18\x07 \x01(\x05\"b\n\x10LogTestResultMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.LogTestResult\"k\n\x0cLogStartLine\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\r\n\x05index\x18\x03 \x01(\x05\x12\r\n\x05total\x18\x04 \x01(\x05\"`\n\x0fLogStartLineMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.LogStartLine\"\x95\x01\n\x0eLogModelResult\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\t\x12\r\n\x05index\x18\x04 \x01(\x05\x12\r\n\x05total\x18\x05 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x06 \x01(\x02\"d\n\x11LogModelResultMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.LogModelResult\"\xfa\x01\n\x11LogSnapshotResult\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\t\x12\r\n\x05index\x18\x04 \x01(\x05\x12\r\n\x05total\x18\x05 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x06 \x01(\x02\x12\x34\n\x03\x63\x66g\x18\x07 \x03(\x0b\x32\'.proto_types.LogSnapshotResult.CfgEntry\x1a*\n\x08\x43\x66gEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"j\n\x14LogSnapshotResultMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12,\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1e.proto_types.LogSnapshotResult\"\xb9\x01\n\rLogSeedResult\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0e\n\x06status\x18\x02 \x01(\t\x12\x16\n\x0eresult_message\x18\x03 \x01(\t\x12\r\n\x05index\x18\x04 \x01(\x05\x12\r\n\x05total\x18\x05 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x06 \x01(\x02\x12\x0e\n\x06schema\x18\x07 \x01(\t\x12\x10\n\x08relation\x18\x08 \x01(\t\"b\n\x10LogSeedResultMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.LogSeedResult\"\xad\x01\n\x12LogFreshnessResult\x12\x0e\n\x06status\x18\x01 \x01(\t\x12(\n\tnode_info\x18\x02 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\r\n\x05index\x18\x03 \x01(\x05\x12\r\n\x05total\x18\x04 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x05 \x01(\x02\x12\x13\n\x0bsource_name\x18\x06 \x01(\t\x12\x12\n\ntable_name\x18\x07 \x01(\t\"l\n\x15LogFreshnessResultMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.LogFreshnessResult\"\"\n\rLogCancelLine\x12\x11\n\tconn_name\x18\x01 \x01(\t\"b\n\x10LogCancelLineMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.LogCancelLine\"\x1f\n\x0f\x44\x65\x66\x61ultSelector\x12\x0c\n\x04name\x18\x01 \x01(\t\"f\n\x12\x44\x65\x66\x61ultSelectorMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.DefaultSelector\"5\n\tNodeStart\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\"Z\n\x0cNodeStartMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.NodeStart\"g\n\x0cNodeFinished\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12-\n\nrun_result\x18\x02 \x01(\x0b\x32\x19.proto_types.RunResultMsg\"`\n\x0fNodeFinishedMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.NodeFinished\"+\n\x1bQueryCancelationUnsupported\x12\x0c\n\x04type\x18\x01 \x01(\t\"~\n\x1eQueryCancelationUnsupportedMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x36\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32(.proto_types.QueryCancelationUnsupported\"O\n\x0f\x43oncurrencyLine\x12\x13\n\x0bnum_threads\x18\x01 \x01(\x05\x12\x13\n\x0btarget_name\x18\x02 \x01(\t\x12\x12\n\nnode_count\x18\x03 \x01(\x05\"f\n\x12\x43oncurrencyLineMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.ConcurrencyLine\"E\n\x19WritingInjectedSQLForNode\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\"z\n\x1cWritingInjectedSQLForNodeMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.WritingInjectedSQLForNode\"9\n\rNodeCompiling\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\"b\n\x10NodeCompilingMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.NodeCompiling\"9\n\rNodeExecuting\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\"b\n\x10NodeExecutingMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12(\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1a.proto_types.NodeExecuting\"m\n\x10LogHookStartLine\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x11\n\tstatement\x18\x02 \x01(\t\x12\r\n\x05index\x18\x03 \x01(\x05\x12\r\n\x05total\x18\x04 \x01(\x05\"h\n\x13LogHookStartLineMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.LogHookStartLine\"\x93\x01\n\x0eLogHookEndLine\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x11\n\tstatement\x18\x02 \x01(\t\x12\x0e\n\x06status\x18\x03 \x01(\t\x12\r\n\x05index\x18\x04 \x01(\x05\x12\r\n\x05total\x18\x05 \x01(\x05\x12\x16\n\x0e\x65xecution_time\x18\x06 \x01(\x02\"d\n\x11LogHookEndLineMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.LogHookEndLine\"\x93\x01\n\x0fSkippingDetails\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x15\n\rresource_type\x18\x02 \x01(\t\x12\x0e\n\x06schema\x18\x03 \x01(\t\x12\x11\n\tnode_name\x18\x04 \x01(\t\x12\r\n\x05index\x18\x05 \x01(\x05\x12\r\n\x05total\x18\x06 \x01(\x05\"f\n\x12SkippingDetailsMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.SkippingDetails\"\r\n\x0bNothingToDo\"^\n\x0eNothingToDoMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.NothingToDo\",\n\x1dRunningOperationUncaughtError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"\x82\x01\n RunningOperationUncaughtErrorMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x38\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32*.proto_types.RunningOperationUncaughtError\"\x93\x01\n\x0c\x45ndRunResult\x12*\n\x07results\x18\x01 \x03(\x0b\x32\x19.proto_types.RunResultMsg\x12\x14\n\x0c\x65lapsed_time\x18\x02 \x01(\x02\x12\x30\n\x0cgenerated_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07success\x18\x04 \x01(\x08\"`\n\x0f\x45ndRunResultMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.EndRunResult\"\x11\n\x0fNoNodesSelected\"f\n\x12NoNodesSelectedMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.NoNodesSelected\"w\n\x10\x43ommandCompleted\x12\x0f\n\x07\x63ommand\x18\x01 \x01(\t\x12\x0f\n\x07success\x18\x02 \x01(\x08\x12\x30\n\x0c\x63ompleted_at\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07\x65lapsed\x18\x04 \x01(\x02\"h\n\x13\x43ommandCompletedMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.CommandCompleted\"k\n\x08ShowNode\x12\x11\n\tnode_name\x18\x01 \x01(\t\x12\x0f\n\x07preview\x18\x02 \x01(\t\x12\x11\n\tis_inline\x18\x03 \x01(\x08\x12\x15\n\routput_format\x18\x04 \x01(\t\x12\x11\n\tunique_id\x18\x05 \x01(\t\"X\n\x0bShowNodeMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12#\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x15.proto_types.ShowNode\"p\n\x0c\x43ompiledNode\x12\x11\n\tnode_name\x18\x01 \x01(\t\x12\x10\n\x08\x63ompiled\x18\x02 \x01(\t\x12\x11\n\tis_inline\x18\x03 \x01(\x08\x12\x15\n\routput_format\x18\x04 \x01(\t\x12\x11\n\tunique_id\x18\x05 \x01(\t\"`\n\x0f\x43ompiledNodeMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.CompiledNode\"b\n\x17\x43\x61tchableExceptionOnRun\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12\x0b\n\x03\x65xc\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"v\n\x1a\x43\x61tchableExceptionOnRunMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.CatchableExceptionOnRun\"5\n\x12InternalErrorOnRun\x12\x12\n\nbuild_path\x18\x01 \x01(\t\x12\x0b\n\x03\x65xc\x18\x02 \x01(\t\"l\n\x15InternalErrorOnRunMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.InternalErrorOnRun\"K\n\x15GenericExceptionOnRun\x12\x12\n\nbuild_path\x18\x01 \x01(\t\x12\x11\n\tunique_id\x18\x02 \x01(\t\x12\x0b\n\x03\x65xc\x18\x03 \x01(\t\"r\n\x18GenericExceptionOnRunMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.GenericExceptionOnRun\"N\n\x1aNodeConnectionReleaseError\x12\x11\n\tnode_name\x18\x01 \x01(\t\x12\x0b\n\x03\x65xc\x18\x02 \x01(\t\x12\x10\n\x08\x65xc_info\x18\x03 \x01(\t\"|\n\x1dNodeConnectionReleaseErrorMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x35\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\'.proto_types.NodeConnectionReleaseError\"\x1f\n\nFoundStats\x12\x11\n\tstat_line\x18\x01 \x01(\t\"\\\n\rFoundStatsMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.FoundStats\"\x17\n\x15MainKeyboardInterrupt\"r\n\x18MainKeyboardInterruptMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.MainKeyboardInterrupt\"#\n\x14MainEncounteredError\x12\x0b\n\x03\x65xc\x18\x01 \x01(\t\"p\n\x17MainEncounteredErrorMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.MainEncounteredError\"%\n\x0eMainStackTrace\x12\x13\n\x0bstack_trace\x18\x01 \x01(\t\"d\n\x11MainStackTraceMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.MainStackTrace\"@\n\x13SystemCouldNotWrite\x12\x0c\n\x04path\x18\x01 \x01(\t\x12\x0e\n\x06reason\x18\x02 \x01(\t\x12\x0b\n\x03\x65xc\x18\x03 \x01(\t\"n\n\x16SystemCouldNotWriteMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.SystemCouldNotWrite\"!\n\x12SystemExecutingCmd\x12\x0b\n\x03\x63md\x18\x01 \x03(\t\"l\n\x15SystemExecutingCmdMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.SystemExecutingCmd\"\x1c\n\x0cSystemStdOut\x12\x0c\n\x04\x62msg\x18\x01 \x01(\t\"`\n\x0fSystemStdOutMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.SystemStdOut\"\x1c\n\x0cSystemStdErr\x12\x0c\n\x04\x62msg\x18\x01 \x01(\t\"`\n\x0fSystemStdErrMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.SystemStdErr\",\n\x16SystemReportReturnCode\x12\x12\n\nreturncode\x18\x01 \x01(\x05\"t\n\x19SystemReportReturnCodeMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x31\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32#.proto_types.SystemReportReturnCode\"p\n\x13TimingInfoCollected\x12(\n\tnode_info\x18\x01 \x01(\x0b\x32\x15.proto_types.NodeInfo\x12/\n\x0btiming_info\x18\x02 \x01(\x0b\x32\x1a.proto_types.TimingInfoMsg\"n\n\x16TimingInfoCollectedMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.TimingInfoCollected\"&\n\x12LogDebugStackTrace\x12\x10\n\x08\x65xc_info\x18\x01 \x01(\t\"l\n\x15LogDebugStackTraceMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.LogDebugStackTrace\"\x1e\n\x0e\x43heckCleanPath\x12\x0c\n\x04path\x18\x01 \x01(\t\"d\n\x11\x43heckCleanPathMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.CheckCleanPath\" \n\x10\x43onfirmCleanPath\x12\x0c\n\x04path\x18\x01 \x01(\t\"h\n\x13\x43onfirmCleanPathMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.ConfirmCleanPath\"\"\n\x12ProtectedCleanPath\x12\x0c\n\x04path\x18\x01 \x01(\t\"l\n\x15ProtectedCleanPathMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.ProtectedCleanPath\"\x14\n\x12\x46inishedCleanPaths\"l\n\x15\x46inishedCleanPathsMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.FinishedCleanPaths\"5\n\x0bOpenCommand\x12\x10\n\x08open_cmd\x18\x01 \x01(\t\x12\x14\n\x0cprofiles_dir\x18\x02 \x01(\t\"^\n\x0eOpenCommandMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.OpenCommand\"\x19\n\nFormatting\x12\x0b\n\x03msg\x18\x01 \x01(\t\"\\\n\rFormattingMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.Formatting\"0\n\x0fServingDocsPort\x12\x0f\n\x07\x61\x64\x64ress\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x05\"f\n\x12ServingDocsPortMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.ServingDocsPort\"%\n\x15ServingDocsAccessInfo\x12\x0c\n\x04port\x18\x01 \x01(\t\"r\n\x18ServingDocsAccessInfoMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x30\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\".proto_types.ServingDocsAccessInfo\"\x15\n\x13ServingDocsExitInfo\"n\n\x16ServingDocsExitInfoMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.ServingDocsExitInfo\"J\n\x10RunResultWarning\x12\x15\n\rresource_type\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x0c\n\x04path\x18\x03 \x01(\t\"h\n\x13RunResultWarningMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.RunResultWarning\"J\n\x10RunResultFailure\x12\x15\n\rresource_type\x18\x01 \x01(\t\x12\x11\n\tnode_name\x18\x02 \x01(\t\x12\x0c\n\x04path\x18\x03 \x01(\t\"h\n\x13RunResultFailureMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.RunResultFailure\"k\n\tStatsLine\x12\x30\n\x05stats\x18\x01 \x03(\x0b\x32!.proto_types.StatsLine.StatsEntry\x1a,\n\nStatsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"Z\n\x0cStatsLineMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12$\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x16.proto_types.StatsLine\"\x1d\n\x0eRunResultError\x12\x0b\n\x03msg\x18\x01 \x01(\t\"d\n\x11RunResultErrorMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.RunResultError\")\n\x17RunResultErrorNoMessage\x12\x0e\n\x06status\x18\x01 \x01(\t\"v\n\x1aRunResultErrorNoMessageMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.RunResultErrorNoMessage\"\x1f\n\x0fSQLCompiledPath\x12\x0c\n\x04path\x18\x01 \x01(\t\"f\n\x12SQLCompiledPathMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.SQLCompiledPath\"-\n\x14\x43heckNodeTestFailure\x12\x15\n\rrelation_name\x18\x01 \x01(\t\"p\n\x17\x43heckNodeTestFailureMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12/\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32!.proto_types.CheckNodeTestFailure\"\"\n\x13\x46irstRunResultError\x12\x0b\n\x03msg\x18\x01 \x01(\t\"n\n\x16\x46irstRunResultErrorMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.FirstRunResultError\"\'\n\x18\x41\x66terFirstRunResultError\x12\x0b\n\x03msg\x18\x01 \x01(\t\"x\n\x1b\x41\x66terFirstRunResultErrorMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.AfterFirstRunResultError\"W\n\x0f\x45ndOfRunSummary\x12\x12\n\nnum_errors\x18\x01 \x01(\x05\x12\x14\n\x0cnum_warnings\x18\x02 \x01(\x05\x12\x1a\n\x12keyboard_interrupt\x18\x03 \x01(\x08\"f\n\x12\x45ndOfRunSummaryMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.EndOfRunSummary\"U\n\x13LogSkipBecauseError\x12\x0e\n\x06schema\x18\x01 \x01(\t\x12\x10\n\x08relation\x18\x02 \x01(\t\x12\r\n\x05index\x18\x03 \x01(\x05\x12\r\n\x05total\x18\x04 \x01(\x05\"n\n\x16LogSkipBecauseErrorMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12.\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32 .proto_types.LogSkipBecauseError\"\x14\n\x12\x45nsureGitInstalled\"l\n\x15\x45nsureGitInstalledMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.EnsureGitInstalled\"\x1a\n\x18\x44\x65psCreatingLocalSymlink\"x\n\x1b\x44\x65psCreatingLocalSymlinkMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x33\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32%.proto_types.DepsCreatingLocalSymlink\"\x19\n\x17\x44\x65psSymlinkNotAvailable\"v\n\x1a\x44\x65psSymlinkNotAvailableMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.DepsSymlinkNotAvailable\"\x11\n\x0f\x44isableTracking\"f\n\x12\x44isableTrackingMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12*\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1c.proto_types.DisableTracking\"\x1e\n\x0cSendingEvent\x12\x0e\n\x06kwargs\x18\x01 \x01(\t\"`\n\x0fSendingEventMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\'\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x19.proto_types.SendingEvent\"\x12\n\x10SendEventFailure\"h\n\x13SendEventFailureMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12+\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1d.proto_types.SendEventFailure\"\r\n\x0b\x46lushEvents\"^\n\x0e\x46lushEventsMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.FlushEvents\"\x14\n\x12\x46lushEventsFailure\"l\n\x15\x46lushEventsFailureMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12-\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1f.proto_types.FlushEventsFailure\"-\n\x19TrackingInitializeFailure\x12\x10\n\x08\x65xc_info\x18\x01 \x01(\t\"z\n\x1cTrackingInitializeFailureMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x34\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32&.proto_types.TrackingInitializeFailure\"&\n\x17RunResultWarningMessage\x12\x0b\n\x03msg\x18\x01 \x01(\t\"v\n\x1aRunResultWarningMessageMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x32\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32$.proto_types.RunResultWarningMessage\"\x1a\n\x0b\x44\x65\x62ugCmdOut\x12\x0b\n\x03msg\x18\x01 \x01(\t\"^\n\x0e\x44\x65\x62ugCmdOutMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12&\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x18.proto_types.DebugCmdOut\"\x1d\n\x0e\x44\x65\x62ugCmdResult\x12\x0b\n\x03msg\x18\x01 \x01(\t\"d\n\x11\x44\x65\x62ugCmdResultMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12)\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x1b.proto_types.DebugCmdResult\"\x19\n\nListCmdOut\x12\x0b\n\x03msg\x18\x01 \x01(\t\"\\\n\rListCmdOutMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12%\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x17.proto_types.ListCmdOut\"\x13\n\x04Note\x12\x0b\n\x03msg\x18\x01 \x01(\t\"P\n\x07NoteMsg\x12$\n\x04info\x18\x01 \x01(\x0b\x32\x16.proto_types.EventInfo\x12\x1f\n\x04\x64\x61ta\x18\x02 \x01(\x0b\x32\x11.proto_types.Noteb\x06proto3') - -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'types_pb2', globals()) -if _descriptor._USE_C_DESCRIPTORS == False: - - DESCRIPTOR._options = None - _EVENTINFO_EXTRAENTRY._options = None - _EVENTINFO_EXTRAENTRY._serialized_options = b'8\001' - _MAINREPORTARGS_ARGSENTRY._options = None - _MAINREPORTARGS_ARGSENTRY._serialized_options = b'8\001' - _CACHEDUMPGRAPH_DUMPENTRY._options = None - _CACHEDUMPGRAPH_DUMPENTRY._serialized_options = b'8\001' - _PARTIALPARSINGERROR_EXCINFOENTRY._options = None - _PARTIALPARSINGERROR_EXCINFOENTRY._serialized_options = b'8\001' - _LOGSNAPSHOTRESULT_CFGENTRY._options = None - _LOGSNAPSHOTRESULT_CFGENTRY._serialized_options = b'8\001' - _STATSLINE_STATSENTRY._options = None - _STATSLINE_STATSENTRY._serialized_options = b'8\001' - _EVENTINFO._serialized_start=92 - _EVENTINFO._serialized_end=365 - _EVENTINFO_EXTRAENTRY._serialized_start=321 - _EVENTINFO_EXTRAENTRY._serialized_end=365 - _TIMINGINFOMSG._serialized_start=367 - _TIMINGINFOMSG._serialized_end=494 - _NODERELATION._serialized_start=496 - _NODERELATION._serialized_end=582 - _NODEINFO._serialized_start=585 - _NODEINFO._serialized_end=858 - _RUNRESULTMSG._serialized_start=861 - _RUNRESULTMSG._serialized_end=1070 - _REFERENCEKEYMSG._serialized_start=1072 - _REFERENCEKEYMSG._serialized_end=1143 - _GENERICMESSAGE._serialized_start=1145 - _GENERICMESSAGE._serialized_end=1199 - _MAINREPORTVERSION._serialized_start=1201 - _MAINREPORTVERSION._serialized_end=1258 - _MAINREPORTVERSIONMSG._serialized_start=1260 - _MAINREPORTVERSIONMSG._serialized_end=1366 - _MAINREPORTARGS._serialized_start=1368 - _MAINREPORTARGS._serialized_end=1482 - _MAINREPORTARGS_ARGSENTRY._serialized_start=1439 - _MAINREPORTARGS_ARGSENTRY._serialized_end=1482 - _MAINREPORTARGSMSG._serialized_start=1484 - _MAINREPORTARGSMSG._serialized_end=1584 - _MAINTRACKINGUSERSTATE._serialized_start=1586 - _MAINTRACKINGUSERSTATE._serialized_end=1629 - _MAINTRACKINGUSERSTATEMSG._serialized_start=1631 - _MAINTRACKINGUSERSTATEMSG._serialized_end=1745 - _MERGEDFROMSTATE._serialized_start=1747 - _MERGEDFROMSTATE._serialized_end=1800 - _MERGEDFROMSTATEMSG._serialized_start=1802 - _MERGEDFROMSTATEMSG._serialized_end=1904 - _MISSINGPROFILETARGET._serialized_start=1906 - _MISSINGPROFILETARGET._serialized_end=1971 - _MISSINGPROFILETARGETMSG._serialized_start=1973 - _MISSINGPROFILETARGETMSG._serialized_end=2085 - _INVALIDOPTIONYAML._serialized_start=2087 - _INVALIDOPTIONYAML._serialized_end=2127 - _INVALIDOPTIONYAMLMSG._serialized_start=2129 - _INVALIDOPTIONYAMLMSG._serialized_end=2235 - _LOGDBTPROJECTERROR._serialized_start=2237 - _LOGDBTPROJECTERROR._serialized_end=2270 - _LOGDBTPROJECTERRORMSG._serialized_start=2272 - _LOGDBTPROJECTERRORMSG._serialized_end=2380 - _LOGDBTPROFILEERROR._serialized_start=2382 - _LOGDBTPROFILEERROR._serialized_end=2433 - _LOGDBTPROFILEERRORMSG._serialized_start=2435 - _LOGDBTPROFILEERRORMSG._serialized_end=2543 - _STARTERPROJECTPATH._serialized_start=2545 - _STARTERPROJECTPATH._serialized_end=2578 - _STARTERPROJECTPATHMSG._serialized_start=2580 - _STARTERPROJECTPATHMSG._serialized_end=2688 - _CONFIGFOLDERDIRECTORY._serialized_start=2690 - _CONFIGFOLDERDIRECTORY._serialized_end=2726 - _CONFIGFOLDERDIRECTORYMSG._serialized_start=2728 - _CONFIGFOLDERDIRECTORYMSG._serialized_end=2842 - _NOSAMPLEPROFILEFOUND._serialized_start=2844 - _NOSAMPLEPROFILEFOUND._serialized_end=2883 - _NOSAMPLEPROFILEFOUNDMSG._serialized_start=2885 - _NOSAMPLEPROFILEFOUNDMSG._serialized_end=2997 - _PROFILEWRITTENWITHSAMPLE._serialized_start=2999 - _PROFILEWRITTENWITHSAMPLE._serialized_end=3053 - _PROFILEWRITTENWITHSAMPLEMSG._serialized_start=3055 - _PROFILEWRITTENWITHSAMPLEMSG._serialized_end=3175 - _PROFILEWRITTENWITHTARGETTEMPLATEYAML._serialized_start=3177 - _PROFILEWRITTENWITHTARGETTEMPLATEYAML._serialized_end=3243 - _PROFILEWRITTENWITHTARGETTEMPLATEYAMLMSG._serialized_start=3246 - _PROFILEWRITTENWITHTARGETTEMPLATEYAMLMSG._serialized_end=3390 - _PROFILEWRITTENWITHPROJECTTEMPLATEYAML._serialized_start=3392 - _PROFILEWRITTENWITHPROJECTTEMPLATEYAML._serialized_end=3459 - _PROFILEWRITTENWITHPROJECTTEMPLATEYAMLMSG._serialized_start=3462 - _PROFILEWRITTENWITHPROJECTTEMPLATEYAMLMSG._serialized_end=3608 - _SETTINGUPPROFILE._serialized_start=3610 - _SETTINGUPPROFILE._serialized_end=3628 - _SETTINGUPPROFILEMSG._serialized_start=3630 - _SETTINGUPPROFILEMSG._serialized_end=3734 - _INVALIDPROFILETEMPLATEYAML._serialized_start=3736 - _INVALIDPROFILETEMPLATEYAML._serialized_end=3764 - _INVALIDPROFILETEMPLATEYAMLMSG._serialized_start=3766 - _INVALIDPROFILETEMPLATEYAMLMSG._serialized_end=3890 - _PROJECTNAMEALREADYEXISTS._serialized_start=3892 - _PROJECTNAMEALREADYEXISTS._serialized_end=3932 - _PROJECTNAMEALREADYEXISTSMSG._serialized_start=3934 - _PROJECTNAMEALREADYEXISTSMSG._serialized_end=4054 - _PROJECTCREATED._serialized_start=4056 - _PROJECTCREATED._serialized_end=4131 - _PROJECTCREATEDMSG._serialized_start=4133 - _PROJECTCREATEDMSG._serialized_end=4233 - _PACKAGEREDIRECTDEPRECATION._serialized_start=4235 - _PACKAGEREDIRECTDEPRECATION._serialized_end=4299 - _PACKAGEREDIRECTDEPRECATIONMSG._serialized_start=4301 - _PACKAGEREDIRECTDEPRECATIONMSG._serialized_end=4425 - _PACKAGEINSTALLPATHDEPRECATION._serialized_start=4427 - _PACKAGEINSTALLPATHDEPRECATION._serialized_end=4458 - _PACKAGEINSTALLPATHDEPRECATIONMSG._serialized_start=4461 - _PACKAGEINSTALLPATHDEPRECATIONMSG._serialized_end=4591 - _CONFIGSOURCEPATHDEPRECATION._serialized_start=4593 - _CONFIGSOURCEPATHDEPRECATION._serialized_end=4665 - _CONFIGSOURCEPATHDEPRECATIONMSG._serialized_start=4667 - _CONFIGSOURCEPATHDEPRECATIONMSG._serialized_end=4793 - _CONFIGDATAPATHDEPRECATION._serialized_start=4795 - _CONFIGDATAPATHDEPRECATION._serialized_end=4865 - _CONFIGDATAPATHDEPRECATIONMSG._serialized_start=4867 - _CONFIGDATAPATHDEPRECATIONMSG._serialized_end=4989 - _ADAPTERDEPRECATIONWARNING._serialized_start=4991 - _ADAPTERDEPRECATIONWARNING._serialized_end=5054 - _ADAPTERDEPRECATIONWARNINGMSG._serialized_start=5056 - _ADAPTERDEPRECATIONWARNINGMSG._serialized_end=5178 - _METRICATTRIBUTESRENAMED._serialized_start=5180 - _METRICATTRIBUTESRENAMED._serialized_end=5226 - _METRICATTRIBUTESRENAMEDMSG._serialized_start=5228 - _METRICATTRIBUTESRENAMEDMSG._serialized_end=5346 - _EXPOSURENAMEDEPRECATION._serialized_start=5348 - _EXPOSURENAMEDEPRECATION._serialized_end=5391 - _EXPOSURENAMEDEPRECATIONMSG._serialized_start=5393 - _EXPOSURENAMEDEPRECATIONMSG._serialized_end=5511 - _INTERNALDEPRECATION._serialized_start=5513 - _INTERNALDEPRECATION._serialized_end=5607 - _INTERNALDEPRECATIONMSG._serialized_start=5609 - _INTERNALDEPRECATIONMSG._serialized_end=5719 - _ENVIRONMENTVARIABLERENAMED._serialized_start=5721 - _ENVIRONMENTVARIABLERENAMED._serialized_end=5785 - _ENVIRONMENTVARIABLERENAMEDMSG._serialized_start=5787 - _ENVIRONMENTVARIABLERENAMEDMSG._serialized_end=5911 - _CONFIGLOGPATHDEPRECATION._serialized_start=5913 - _CONFIGLOGPATHDEPRECATION._serialized_end=5964 - _CONFIGLOGPATHDEPRECATIONMSG._serialized_start=5966 - _CONFIGLOGPATHDEPRECATIONMSG._serialized_end=6086 - _CONFIGTARGETPATHDEPRECATION._serialized_start=6088 - _CONFIGTARGETPATHDEPRECATION._serialized_end=6142 - _CONFIGTARGETPATHDEPRECATIONMSG._serialized_start=6144 - _CONFIGTARGETPATHDEPRECATIONMSG._serialized_end=6270 - _COLLECTFRESHNESSRETURNSIGNATURE._serialized_start=6272 - _COLLECTFRESHNESSRETURNSIGNATURE._serialized_end=6305 - _COLLECTFRESHNESSRETURNSIGNATUREMSG._serialized_start=6308 - _COLLECTFRESHNESSRETURNSIGNATUREMSG._serialized_end=6442 - _ADAPTEREVENTDEBUG._serialized_start=6445 - _ADAPTEREVENTDEBUG._serialized_end=6580 - _ADAPTEREVENTDEBUGMSG._serialized_start=6582 - _ADAPTEREVENTDEBUGMSG._serialized_end=6688 - _ADAPTEREVENTINFO._serialized_start=6691 - _ADAPTEREVENTINFO._serialized_end=6825 - _ADAPTEREVENTINFOMSG._serialized_start=6827 - _ADAPTEREVENTINFOMSG._serialized_end=6931 - _ADAPTEREVENTWARNING._serialized_start=6934 - _ADAPTEREVENTWARNING._serialized_end=7071 - _ADAPTEREVENTWARNINGMSG._serialized_start=7073 - _ADAPTEREVENTWARNINGMSG._serialized_end=7183 - _ADAPTEREVENTERROR._serialized_start=7186 - _ADAPTEREVENTERROR._serialized_end=7339 - _ADAPTEREVENTERRORMSG._serialized_start=7341 - _ADAPTEREVENTERRORMSG._serialized_end=7447 - _NEWCONNECTION._serialized_start=7449 - _NEWCONNECTION._serialized_end=7544 - _NEWCONNECTIONMSG._serialized_start=7546 - _NEWCONNECTIONMSG._serialized_end=7644 - _CONNECTIONREUSED._serialized_start=7646 - _CONNECTIONREUSED._serialized_end=7707 - _CONNECTIONREUSEDMSG._serialized_start=7709 - _CONNECTIONREUSEDMSG._serialized_end=7813 - _CONNECTIONLEFTOPENINCLEANUP._serialized_start=7815 - _CONNECTIONLEFTOPENINCLEANUP._serialized_end=7863 - _CONNECTIONLEFTOPENINCLEANUPMSG._serialized_start=7865 - _CONNECTIONLEFTOPENINCLEANUPMSG._serialized_end=7991 - _CONNECTIONCLOSEDINCLEANUP._serialized_start=7993 - _CONNECTIONCLOSEDINCLEANUP._serialized_end=8039 - _CONNECTIONCLOSEDINCLEANUPMSG._serialized_start=8041 - _CONNECTIONCLOSEDINCLEANUPMSG._serialized_end=8163 - _ROLLBACKFAILED._serialized_start=8165 - _ROLLBACKFAILED._serialized_end=8260 - _ROLLBACKFAILEDMSG._serialized_start=8262 - _ROLLBACKFAILEDMSG._serialized_end=8362 - _CONNECTIONCLOSED._serialized_start=8364 - _CONNECTIONCLOSED._serialized_end=8443 - _CONNECTIONCLOSEDMSG._serialized_start=8445 - _CONNECTIONCLOSEDMSG._serialized_end=8549 - _CONNECTIONLEFTOPEN._serialized_start=8551 - _CONNECTIONLEFTOPEN._serialized_end=8632 - _CONNECTIONLEFTOPENMSG._serialized_start=8634 - _CONNECTIONLEFTOPENMSG._serialized_end=8742 - _ROLLBACK._serialized_start=8744 - _ROLLBACK._serialized_end=8815 - _ROLLBACKMSG._serialized_start=8817 - _ROLLBACKMSG._serialized_end=8905 - _CACHEMISS._serialized_start=8907 - _CACHEMISS._serialized_end=8971 - _CACHEMISSMSG._serialized_start=8973 - _CACHEMISSMSG._serialized_end=9063 - _LISTRELATIONS._serialized_start=9065 - _LISTRELATIONS._serialized_end=9163 - _LISTRELATIONSMSG._serialized_start=9165 - _LISTRELATIONSMSG._serialized_end=9263 - _CONNECTIONUSED._serialized_start=9265 - _CONNECTIONUSED._serialized_end=9361 - _CONNECTIONUSEDMSG._serialized_start=9363 - _CONNECTIONUSEDMSG._serialized_end=9463 - _SQLQUERY._serialized_start=9465 - _SQLQUERY._serialized_end=9549 - _SQLQUERYMSG._serialized_start=9551 - _SQLQUERYMSG._serialized_end=9639 - _SQLQUERYSTATUS._serialized_start=9641 - _SQLQUERYSTATUS._serialized_end=9732 - _SQLQUERYSTATUSMSG._serialized_start=9734 - _SQLQUERYSTATUSMSG._serialized_end=9834 - _SQLCOMMIT._serialized_start=9836 - _SQLCOMMIT._serialized_end=9908 - _SQLCOMMITMSG._serialized_start=9910 - _SQLCOMMITMSG._serialized_end=10000 - _COLTYPECHANGE._serialized_start=10002 - _COLTYPECHANGE._serialized_end=10099 - _COLTYPECHANGEMSG._serialized_start=10101 - _COLTYPECHANGEMSG._serialized_end=10199 - _SCHEMACREATION._serialized_start=10201 - _SCHEMACREATION._serialized_end=10265 - _SCHEMACREATIONMSG._serialized_start=10267 - _SCHEMACREATIONMSG._serialized_end=10367 - _SCHEMADROP._serialized_start=10369 - _SCHEMADROP._serialized_end=10429 - _SCHEMADROPMSG._serialized_start=10431 - _SCHEMADROPMSG._serialized_end=10523 - _CACHEACTION._serialized_start=10526 - _CACHEACTION._serialized_end=10748 - _CACHEACTIONMSG._serialized_start=10750 - _CACHEACTIONMSG._serialized_end=10844 - _CACHEDUMPGRAPH._serialized_start=10847 - _CACHEDUMPGRAPH._serialized_end=10999 - _CACHEDUMPGRAPH_DUMPENTRY._serialized_start=10956 - _CACHEDUMPGRAPH_DUMPENTRY._serialized_end=10999 - _CACHEDUMPGRAPHMSG._serialized_start=11001 - _CACHEDUMPGRAPHMSG._serialized_end=11101 - _ADAPTERREGISTERED._serialized_start=11103 - _ADAPTERREGISTERED._serialized_end=11169 - _ADAPTERREGISTEREDMSG._serialized_start=11171 - _ADAPTERREGISTEREDMSG._serialized_end=11277 - _ADAPTERIMPORTERROR._serialized_start=11279 - _ADAPTERIMPORTERROR._serialized_end=11312 - _ADAPTERIMPORTERRORMSG._serialized_start=11314 - _ADAPTERIMPORTERRORMSG._serialized_end=11422 - _PLUGINLOADERROR._serialized_start=11424 - _PLUGINLOADERROR._serialized_end=11459 - _PLUGINLOADERRORMSG._serialized_start=11461 - _PLUGINLOADERRORMSG._serialized_end=11563 - _NEWCONNECTIONOPENING._serialized_start=11565 - _NEWCONNECTIONOPENING._serialized_end=11655 - _NEWCONNECTIONOPENINGMSG._serialized_start=11657 - _NEWCONNECTIONOPENINGMSG._serialized_end=11769 - _CODEEXECUTION._serialized_start=11771 - _CODEEXECUTION._serialized_end=11827 - _CODEEXECUTIONMSG._serialized_start=11829 - _CODEEXECUTIONMSG._serialized_end=11927 - _CODEEXECUTIONSTATUS._serialized_start=11929 - _CODEEXECUTIONSTATUS._serialized_end=11983 - _CODEEXECUTIONSTATUSMSG._serialized_start=11985 - _CODEEXECUTIONSTATUSMSG._serialized_end=12095 - _CATALOGGENERATIONERROR._serialized_start=12097 - _CATALOGGENERATIONERROR._serialized_end=12134 - _CATALOGGENERATIONERRORMSG._serialized_start=12136 - _CATALOGGENERATIONERRORMSG._serialized_end=12252 - _WRITECATALOGFAILURE._serialized_start=12254 - _WRITECATALOGFAILURE._serialized_end=12299 - _WRITECATALOGFAILUREMSG._serialized_start=12301 - _WRITECATALOGFAILUREMSG._serialized_end=12411 - _CATALOGWRITTEN._serialized_start=12413 - _CATALOGWRITTEN._serialized_end=12443 - _CATALOGWRITTENMSG._serialized_start=12445 - _CATALOGWRITTENMSG._serialized_end=12545 - _CANNOTGENERATEDOCS._serialized_start=12547 - _CANNOTGENERATEDOCS._serialized_end=12567 - _CANNOTGENERATEDOCSMSG._serialized_start=12569 - _CANNOTGENERATEDOCSMSG._serialized_end=12677 - _BUILDINGCATALOG._serialized_start=12679 - _BUILDINGCATALOG._serialized_end=12696 - _BUILDINGCATALOGMSG._serialized_start=12698 - _BUILDINGCATALOGMSG._serialized_end=12800 - _DATABASEERRORRUNNINGHOOK._serialized_start=12802 - _DATABASEERRORRUNNINGHOOK._serialized_end=12847 - _DATABASEERRORRUNNINGHOOKMSG._serialized_start=12849 - _DATABASEERRORRUNNINGHOOKMSG._serialized_end=12969 - _HOOKSRUNNING._serialized_start=12971 - _HOOKSRUNNING._serialized_end=13023 - _HOOKSRUNNINGMSG._serialized_start=13025 - _HOOKSRUNNINGMSG._serialized_end=13121 - _FINISHEDRUNNINGSTATS._serialized_start=13123 - _FINISHEDRUNNINGSTATS._serialized_end=13207 - _FINISHEDRUNNINGSTATSMSG._serialized_start=13209 - _FINISHEDRUNNINGSTATSMSG._serialized_end=13321 - _CONSTRAINTNOTENFORCED._serialized_start=13323 - _CONSTRAINTNOTENFORCED._serialized_end=13383 - _CONSTRAINTNOTENFORCEDMSG._serialized_start=13385 - _CONSTRAINTNOTENFORCEDMSG._serialized_end=13499 - _CONSTRAINTNOTSUPPORTED._serialized_start=13501 - _CONSTRAINTNOTSUPPORTED._serialized_end=13562 - _CONSTRAINTNOTSUPPORTEDMSG._serialized_start=13564 - _CONSTRAINTNOTSUPPORTEDMSG._serialized_end=13680 - _INPUTFILEDIFFERROR._serialized_start=13682 - _INPUTFILEDIFFERROR._serialized_end=13737 - _INPUTFILEDIFFERRORMSG._serialized_start=13739 - _INPUTFILEDIFFERRORMSG._serialized_end=13847 - _INVALIDVALUEFORFIELD._serialized_start=13849 - _INVALIDVALUEFORFIELD._serialized_end=13912 - _INVALIDVALUEFORFIELDMSG._serialized_start=13914 - _INVALIDVALUEFORFIELDMSG._serialized_end=14026 - _VALIDATIONWARNING._serialized_start=14028 - _VALIDATIONWARNING._serialized_end=14109 - _VALIDATIONWARNINGMSG._serialized_start=14111 - _VALIDATIONWARNINGMSG._serialized_end=14217 - _PARSEPERFINFOPATH._serialized_start=14219 - _PARSEPERFINFOPATH._serialized_end=14252 - _PARSEPERFINFOPATHMSG._serialized_start=14254 - _PARSEPERFINFOPATHMSG._serialized_end=14360 - _PARTIALPARSINGERRORPROCESSINGFILE._serialized_start=14362 - _PARTIALPARSINGERRORPROCESSINGFILE._serialized_end=14411 - _PARTIALPARSINGERRORPROCESSINGFILEMSG._serialized_start=14414 - _PARTIALPARSINGERRORPROCESSINGFILEMSG._serialized_end=14552 - _PARTIALPARSINGERROR._serialized_start=14555 - _PARTIALPARSINGERROR._serialized_end=14689 - _PARTIALPARSINGERROR_EXCINFOENTRY._serialized_start=14643 - _PARTIALPARSINGERROR_EXCINFOENTRY._serialized_end=14689 - _PARTIALPARSINGERRORMSG._serialized_start=14691 - _PARTIALPARSINGERRORMSG._serialized_end=14801 - _PARTIALPARSINGSKIPPARSING._serialized_start=14803 - _PARTIALPARSINGSKIPPARSING._serialized_end=14830 - _PARTIALPARSINGSKIPPARSINGMSG._serialized_start=14832 - _PARTIALPARSINGSKIPPARSINGMSG._serialized_end=14954 - _UNABLETOPARTIALPARSE._serialized_start=14956 - _UNABLETOPARTIALPARSE._serialized_end=14994 - _UNABLETOPARTIALPARSEMSG._serialized_start=14996 - _UNABLETOPARTIALPARSEMSG._serialized_end=15108 - _STATECHECKVARSHASH._serialized_start=15110 - _STATECHECKVARSHASH._serialized_end=15212 - _STATECHECKVARSHASHMSG._serialized_start=15214 - _STATECHECKVARSHASHMSG._serialized_end=15322 - _PARTIALPARSINGNOTENABLED._serialized_start=15324 - _PARTIALPARSINGNOTENABLED._serialized_end=15350 - _PARTIALPARSINGNOTENABLEDMSG._serialized_start=15352 - _PARTIALPARSINGNOTENABLEDMSG._serialized_end=15472 - _PARSEDFILELOADFAILED._serialized_start=15474 - _PARSEDFILELOADFAILED._serialized_end=15541 - _PARSEDFILELOADFAILEDMSG._serialized_start=15543 - _PARSEDFILELOADFAILEDMSG._serialized_end=15655 - _PARTIALPARSINGENABLED._serialized_start=15657 - _PARTIALPARSINGENABLED._serialized_end=15729 - _PARTIALPARSINGENABLEDMSG._serialized_start=15731 - _PARTIALPARSINGENABLEDMSG._serialized_end=15845 - _PARTIALPARSINGFILE._serialized_start=15847 - _PARTIALPARSINGFILE._serialized_end=15903 - _PARTIALPARSINGFILEMSG._serialized_start=15905 - _PARTIALPARSINGFILEMSG._serialized_end=16013 - _INVALIDDISABLEDTARGETINTESTNODE._serialized_start=16016 - _INVALIDDISABLEDTARGETINTESTNODE._serialized_end=16191 - _INVALIDDISABLEDTARGETINTESTNODEMSG._serialized_start=16194 - _INVALIDDISABLEDTARGETINTESTNODEMSG._serialized_end=16328 - _UNUSEDRESOURCECONFIGPATH._serialized_start=16330 - _UNUSEDRESOURCECONFIGPATH._serialized_end=16385 - _UNUSEDRESOURCECONFIGPATHMSG._serialized_start=16387 - _UNUSEDRESOURCECONFIGPATHMSG._serialized_end=16507 - _SEEDINCREASED._serialized_start=16509 - _SEEDINCREASED._serialized_end=16560 - _SEEDINCREASEDMSG._serialized_start=16562 - _SEEDINCREASEDMSG._serialized_end=16660 - _SEEDEXCEEDSLIMITSAMEPATH._serialized_start=16662 - _SEEDEXCEEDSLIMITSAMEPATH._serialized_end=16724 - _SEEDEXCEEDSLIMITSAMEPATHMSG._serialized_start=16726 - _SEEDEXCEEDSLIMITSAMEPATHMSG._serialized_end=16846 - _SEEDEXCEEDSLIMITANDPATHCHANGED._serialized_start=16848 - _SEEDEXCEEDSLIMITANDPATHCHANGED._serialized_end=16916 - _SEEDEXCEEDSLIMITANDPATHCHANGEDMSG._serialized_start=16919 - _SEEDEXCEEDSLIMITANDPATHCHANGEDMSG._serialized_end=17051 - _SEEDEXCEEDSLIMITCHECKSUMCHANGED._serialized_start=17053 - _SEEDEXCEEDSLIMITCHECKSUMCHANGED._serialized_end=17145 - _SEEDEXCEEDSLIMITCHECKSUMCHANGEDMSG._serialized_start=17148 - _SEEDEXCEEDSLIMITCHECKSUMCHANGEDMSG._serialized_end=17282 - _UNUSEDTABLES._serialized_start=17284 - _UNUSEDTABLES._serialized_end=17321 - _UNUSEDTABLESMSG._serialized_start=17323 - _UNUSEDTABLESMSG._serialized_end=17419 - _WRONGRESOURCESCHEMAFILE._serialized_start=17422 - _WRONGRESOURCESCHEMAFILE._serialized_end=17557 - _WRONGRESOURCESCHEMAFILEMSG._serialized_start=17559 - _WRONGRESOURCESCHEMAFILEMSG._serialized_end=17677 - _NONODEFORYAMLKEY._serialized_start=17679 - _NONODEFORYAMLKEY._serialized_end=17754 - _NONODEFORYAMLKEYMSG._serialized_start=17756 - _NONODEFORYAMLKEYMSG._serialized_end=17860 - _MACRONOTFOUNDFORPATCH._serialized_start=17862 - _MACRONOTFOUNDFORPATCH._serialized_end=17905 - _MACRONOTFOUNDFORPATCHMSG._serialized_start=17907 - _MACRONOTFOUNDFORPATCHMSG._serialized_end=18021 - _NODENOTFOUNDORDISABLED._serialized_start=18024 - _NODENOTFOUNDORDISABLED._serialized_end=18208 - _NODENOTFOUNDORDISABLEDMSG._serialized_start=18210 - _NODENOTFOUNDORDISABLEDMSG._serialized_end=18326 - _JINJALOGWARNING._serialized_start=18328 - _JINJALOGWARNING._serialized_end=18400 - _JINJALOGWARNINGMSG._serialized_start=18402 - _JINJALOGWARNINGMSG._serialized_end=18504 - _JINJALOGINFO._serialized_start=18506 - _JINJALOGINFO._serialized_end=18575 - _JINJALOGINFOMSG._serialized_start=18577 - _JINJALOGINFOMSG._serialized_end=18673 - _JINJALOGDEBUG._serialized_start=18675 - _JINJALOGDEBUG._serialized_end=18745 - _JINJALOGDEBUGMSG._serialized_start=18747 - _JINJALOGDEBUGMSG._serialized_end=18845 - _UNPINNEDREFNEWVERSIONAVAILABLE._serialized_start=18848 - _UNPINNEDREFNEWVERSIONAVAILABLE._serialized_end=19022 - _UNPINNEDREFNEWVERSIONAVAILABLEMSG._serialized_start=19025 - _UNPINNEDREFNEWVERSIONAVAILABLEMSG._serialized_end=19157 - _DEPRECATEDMODEL._serialized_start=19159 - _DEPRECATEDMODEL._serialized_end=19245 - _DEPRECATEDMODELMSG._serialized_start=19247 - _DEPRECATEDMODELMSG._serialized_end=19349 - _UPCOMINGREFERENCEDEPRECATION._serialized_start=19352 - _UPCOMINGREFERENCEDEPRECATION._serialized_end=19550 - _UPCOMINGREFERENCEDEPRECATIONMSG._serialized_start=19553 - _UPCOMINGREFERENCEDEPRECATIONMSG._serialized_end=19681 - _DEPRECATEDREFERENCE._serialized_start=19684 - _DEPRECATEDREFERENCE._serialized_end=19873 - _DEPRECATEDREFERENCEMSG._serialized_start=19875 - _DEPRECATEDREFERENCEMSG._serialized_end=19985 - _UNSUPPORTEDCONSTRAINTMATERIALIZATION._serialized_start=19987 - _UNSUPPORTEDCONSTRAINTMATERIALIZATION._serialized_end=20047 - _UNSUPPORTEDCONSTRAINTMATERIALIZATIONMSG._serialized_start=20050 - _UNSUPPORTEDCONSTRAINTMATERIALIZATIONMSG._serialized_end=20194 - _PARSEINLINENODEERROR._serialized_start=20196 - _PARSEINLINENODEERROR._serialized_end=20273 - _PARSEINLINENODEERRORMSG._serialized_start=20275 - _PARSEINLINENODEERRORMSG._serialized_end=20387 - _SEMANTICVALIDATIONFAILURE._serialized_start=20389 - _SEMANTICVALIDATIONFAILURE._serialized_end=20429 - _SEMANTICVALIDATIONFAILUREMSG._serialized_start=20431 - _SEMANTICVALIDATIONFAILUREMSG._serialized_end=20553 - _GITSPARSECHECKOUTSUBDIRECTORY._serialized_start=20555 - _GITSPARSECHECKOUTSUBDIRECTORY._serialized_end=20602 - _GITSPARSECHECKOUTSUBDIRECTORYMSG._serialized_start=20605 - _GITSPARSECHECKOUTSUBDIRECTORYMSG._serialized_end=20735 - _GITPROGRESSCHECKOUTREVISION._serialized_start=20737 - _GITPROGRESSCHECKOUTREVISION._serialized_end=20784 - _GITPROGRESSCHECKOUTREVISIONMSG._serialized_start=20786 - _GITPROGRESSCHECKOUTREVISIONMSG._serialized_end=20912 - _GITPROGRESSUPDATINGEXISTINGDEPENDENCY._serialized_start=20914 - _GITPROGRESSUPDATINGEXISTINGDEPENDENCY._serialized_end=20966 - _GITPROGRESSUPDATINGEXISTINGDEPENDENCYMSG._serialized_start=20969 - _GITPROGRESSUPDATINGEXISTINGDEPENDENCYMSG._serialized_end=21115 - _GITPROGRESSPULLINGNEWDEPENDENCY._serialized_start=21117 - _GITPROGRESSPULLINGNEWDEPENDENCY._serialized_end=21163 - _GITPROGRESSPULLINGNEWDEPENDENCYMSG._serialized_start=21166 - _GITPROGRESSPULLINGNEWDEPENDENCYMSG._serialized_end=21300 - _GITNOTHINGTODO._serialized_start=21302 - _GITNOTHINGTODO._serialized_end=21331 - _GITNOTHINGTODOMSG._serialized_start=21333 - _GITNOTHINGTODOMSG._serialized_end=21433 - _GITPROGRESSUPDATEDCHECKOUTRANGE._serialized_start=21435 - _GITPROGRESSUPDATEDCHECKOUTRANGE._serialized_end=21504 - _GITPROGRESSUPDATEDCHECKOUTRANGEMSG._serialized_start=21507 - _GITPROGRESSUPDATEDCHECKOUTRANGEMSG._serialized_end=21641 - _GITPROGRESSCHECKEDOUTAT._serialized_start=21643 - _GITPROGRESSCHECKEDOUTAT._serialized_end=21685 - _GITPROGRESSCHECKEDOUTATMSG._serialized_start=21687 - _GITPROGRESSCHECKEDOUTATMSG._serialized_end=21805 - _REGISTRYPROGRESSGETREQUEST._serialized_start=21807 - _REGISTRYPROGRESSGETREQUEST._serialized_end=21848 - _REGISTRYPROGRESSGETREQUESTMSG._serialized_start=21850 - _REGISTRYPROGRESSGETREQUESTMSG._serialized_end=21974 - _REGISTRYPROGRESSGETRESPONSE._serialized_start=21976 - _REGISTRYPROGRESSGETRESPONSE._serialized_end=22037 - _REGISTRYPROGRESSGETRESPONSEMSG._serialized_start=22039 - _REGISTRYPROGRESSGETRESPONSEMSG._serialized_end=22165 - _SELECTORREPORTINVALIDSELECTOR._serialized_start=22167 - _SELECTORREPORTINVALIDSELECTOR._serialized_end=22262 - _SELECTORREPORTINVALIDSELECTORMSG._serialized_start=22265 - _SELECTORREPORTINVALIDSELECTORMSG._serialized_end=22395 - _DEPSNOPACKAGESFOUND._serialized_start=22397 - _DEPSNOPACKAGESFOUND._serialized_end=22418 - _DEPSNOPACKAGESFOUNDMSG._serialized_start=22420 - _DEPSNOPACKAGESFOUNDMSG._serialized_end=22530 - _DEPSSTARTPACKAGEINSTALL._serialized_start=22532 - _DEPSSTARTPACKAGEINSTALL._serialized_end=22579 - _DEPSSTARTPACKAGEINSTALLMSG._serialized_start=22581 - _DEPSSTARTPACKAGEINSTALLMSG._serialized_end=22699 - _DEPSINSTALLINFO._serialized_start=22701 - _DEPSINSTALLINFO._serialized_end=22740 - _DEPSINSTALLINFOMSG._serialized_start=22742 - _DEPSINSTALLINFOMSG._serialized_end=22844 - _DEPSUPDATEAVAILABLE._serialized_start=22846 - _DEPSUPDATEAVAILABLE._serialized_end=22891 - _DEPSUPDATEAVAILABLEMSG._serialized_start=22893 - _DEPSUPDATEAVAILABLEMSG._serialized_end=23003 - _DEPSUPTODATE._serialized_start=23005 - _DEPSUPTODATE._serialized_end=23019 - _DEPSUPTODATEMSG._serialized_start=23021 - _DEPSUPTODATEMSG._serialized_end=23117 - _DEPSLISTSUBDIRECTORY._serialized_start=23119 - _DEPSLISTSUBDIRECTORY._serialized_end=23163 - _DEPSLISTSUBDIRECTORYMSG._serialized_start=23165 - _DEPSLISTSUBDIRECTORYMSG._serialized_end=23277 - _DEPSNOTIFYUPDATESAVAILABLE._serialized_start=23279 - _DEPSNOTIFYUPDATESAVAILABLE._serialized_end=23325 - _DEPSNOTIFYUPDATESAVAILABLEMSG._serialized_start=23327 - _DEPSNOTIFYUPDATESAVAILABLEMSG._serialized_end=23451 - _RETRYEXTERNALCALL._serialized_start=23453 - _RETRYEXTERNALCALL._serialized_end=23502 - _RETRYEXTERNALCALLMSG._serialized_start=23504 - _RETRYEXTERNALCALLMSG._serialized_end=23610 - _RECORDRETRYEXCEPTION._serialized_start=23612 - _RECORDRETRYEXCEPTION._serialized_end=23647 - _RECORDRETRYEXCEPTIONMSG._serialized_start=23649 - _RECORDRETRYEXCEPTIONMSG._serialized_end=23761 - _REGISTRYINDEXPROGRESSGETREQUEST._serialized_start=23763 - _REGISTRYINDEXPROGRESSGETREQUEST._serialized_end=23809 - _REGISTRYINDEXPROGRESSGETREQUESTMSG._serialized_start=23812 - _REGISTRYINDEXPROGRESSGETREQUESTMSG._serialized_end=23946 - _REGISTRYINDEXPROGRESSGETRESPONSE._serialized_start=23948 - _REGISTRYINDEXPROGRESSGETRESPONSE._serialized_end=24014 - _REGISTRYINDEXPROGRESSGETRESPONSEMSG._serialized_start=24017 - _REGISTRYINDEXPROGRESSGETRESPONSEMSG._serialized_end=24153 - _REGISTRYRESPONSEUNEXPECTEDTYPE._serialized_start=24155 - _REGISTRYRESPONSEUNEXPECTEDTYPE._serialized_end=24205 - _REGISTRYRESPONSEUNEXPECTEDTYPEMSG._serialized_start=24208 - _REGISTRYRESPONSEUNEXPECTEDTYPEMSG._serialized_end=24340 - _REGISTRYRESPONSEMISSINGTOPKEYS._serialized_start=24342 - _REGISTRYRESPONSEMISSINGTOPKEYS._serialized_end=24392 - _REGISTRYRESPONSEMISSINGTOPKEYSMSG._serialized_start=24395 - _REGISTRYRESPONSEMISSINGTOPKEYSMSG._serialized_end=24527 - _REGISTRYRESPONSEMISSINGNESTEDKEYS._serialized_start=24529 - _REGISTRYRESPONSEMISSINGNESTEDKEYS._serialized_end=24582 - _REGISTRYRESPONSEMISSINGNESTEDKEYSMSG._serialized_start=24585 - _REGISTRYRESPONSEMISSINGNESTEDKEYSMSG._serialized_end=24723 - _REGISTRYRESPONSEEXTRANESTEDKEYS._serialized_start=24725 - _REGISTRYRESPONSEEXTRANESTEDKEYS._serialized_end=24776 - _REGISTRYRESPONSEEXTRANESTEDKEYSMSG._serialized_start=24779 - _REGISTRYRESPONSEEXTRANESTEDKEYSMSG._serialized_end=24913 - _DEPSSETDOWNLOADDIRECTORY._serialized_start=24915 - _DEPSSETDOWNLOADDIRECTORY._serialized_end=24955 - _DEPSSETDOWNLOADDIRECTORYMSG._serialized_start=24957 - _DEPSSETDOWNLOADDIRECTORYMSG._serialized_end=25077 - _DEPSUNPINNED._serialized_start=25079 - _DEPSUNPINNED._serialized_end=25124 - _DEPSUNPINNEDMSG._serialized_start=25126 - _DEPSUNPINNEDMSG._serialized_end=25222 - _NONODESFORSELECTIONCRITERIA._serialized_start=25224 - _NONODESFORSELECTIONCRITERIA._serialized_end=25271 - _NONODESFORSELECTIONCRITERIAMSG._serialized_start=25273 - _NONODESFORSELECTIONCRITERIAMSG._serialized_end=25399 - _RUNNINGOPERATIONCAUGHTERROR._serialized_start=25401 - _RUNNINGOPERATIONCAUGHTERROR._serialized_end=25443 - _RUNNINGOPERATIONCAUGHTERRORMSG._serialized_start=25445 - _RUNNINGOPERATIONCAUGHTERRORMSG._serialized_end=25571 - _COMPILECOMPLETE._serialized_start=25573 - _COMPILECOMPLETE._serialized_end=25590 - _COMPILECOMPLETEMSG._serialized_start=25592 - _COMPILECOMPLETEMSG._serialized_end=25694 - _FRESHNESSCHECKCOMPLETE._serialized_start=25696 - _FRESHNESSCHECKCOMPLETE._serialized_end=25720 - _FRESHNESSCHECKCOMPLETEMSG._serialized_start=25722 - _FRESHNESSCHECKCOMPLETEMSG._serialized_end=25838 - _SEEDHEADER._serialized_start=25840 - _SEEDHEADER._serialized_end=25868 - _SEEDHEADERMSG._serialized_start=25870 - _SEEDHEADERMSG._serialized_end=25962 - _SQLRUNNEREXCEPTION._serialized_start=25964 - _SQLRUNNEREXCEPTION._serialized_end=26015 - _SQLRUNNEREXCEPTIONMSG._serialized_start=26017 - _SQLRUNNEREXCEPTIONMSG._serialized_end=26125 - _LOGTESTRESULT._serialized_start=26128 - _LOGTESTRESULT._serialized_end=26296 - _LOGTESTRESULTMSG._serialized_start=26298 - _LOGTESTRESULTMSG._serialized_end=26396 - _LOGSTARTLINE._serialized_start=26398 - _LOGSTARTLINE._serialized_end=26505 - _LOGSTARTLINEMSG._serialized_start=26507 - _LOGSTARTLINEMSG._serialized_end=26603 - _LOGMODELRESULT._serialized_start=26606 - _LOGMODELRESULT._serialized_end=26755 - _LOGMODELRESULTMSG._serialized_start=26757 - _LOGMODELRESULTMSG._serialized_end=26857 - _LOGSNAPSHOTRESULT._serialized_start=26860 - _LOGSNAPSHOTRESULT._serialized_end=27110 - _LOGSNAPSHOTRESULT_CFGENTRY._serialized_start=27068 - _LOGSNAPSHOTRESULT_CFGENTRY._serialized_end=27110 - _LOGSNAPSHOTRESULTMSG._serialized_start=27112 - _LOGSNAPSHOTRESULTMSG._serialized_end=27218 - _LOGSEEDRESULT._serialized_start=27221 - _LOGSEEDRESULT._serialized_end=27406 - _LOGSEEDRESULTMSG._serialized_start=27408 - _LOGSEEDRESULTMSG._serialized_end=27506 - _LOGFRESHNESSRESULT._serialized_start=27509 - _LOGFRESHNESSRESULT._serialized_end=27682 - _LOGFRESHNESSRESULTMSG._serialized_start=27684 - _LOGFRESHNESSRESULTMSG._serialized_end=27792 - _LOGCANCELLINE._serialized_start=27794 - _LOGCANCELLINE._serialized_end=27828 - _LOGCANCELLINEMSG._serialized_start=27830 - _LOGCANCELLINEMSG._serialized_end=27928 - _DEFAULTSELECTOR._serialized_start=27930 - _DEFAULTSELECTOR._serialized_end=27961 - _DEFAULTSELECTORMSG._serialized_start=27963 - _DEFAULTSELECTORMSG._serialized_end=28065 - _NODESTART._serialized_start=28067 - _NODESTART._serialized_end=28120 - _NODESTARTMSG._serialized_start=28122 - _NODESTARTMSG._serialized_end=28212 - _NODEFINISHED._serialized_start=28214 - _NODEFINISHED._serialized_end=28317 - _NODEFINISHEDMSG._serialized_start=28319 - _NODEFINISHEDMSG._serialized_end=28415 - _QUERYCANCELATIONUNSUPPORTED._serialized_start=28417 - _QUERYCANCELATIONUNSUPPORTED._serialized_end=28460 - _QUERYCANCELATIONUNSUPPORTEDMSG._serialized_start=28462 - _QUERYCANCELATIONUNSUPPORTEDMSG._serialized_end=28588 - _CONCURRENCYLINE._serialized_start=28590 - _CONCURRENCYLINE._serialized_end=28669 - _CONCURRENCYLINEMSG._serialized_start=28671 - _CONCURRENCYLINEMSG._serialized_end=28773 - _WRITINGINJECTEDSQLFORNODE._serialized_start=28775 - _WRITINGINJECTEDSQLFORNODE._serialized_end=28844 - _WRITINGINJECTEDSQLFORNODEMSG._serialized_start=28846 - _WRITINGINJECTEDSQLFORNODEMSG._serialized_end=28968 - _NODECOMPILING._serialized_start=28970 - _NODECOMPILING._serialized_end=29027 - _NODECOMPILINGMSG._serialized_start=29029 - _NODECOMPILINGMSG._serialized_end=29127 - _NODEEXECUTING._serialized_start=29129 - _NODEEXECUTING._serialized_end=29186 - _NODEEXECUTINGMSG._serialized_start=29188 - _NODEEXECUTINGMSG._serialized_end=29286 - _LOGHOOKSTARTLINE._serialized_start=29288 - _LOGHOOKSTARTLINE._serialized_end=29397 - _LOGHOOKSTARTLINEMSG._serialized_start=29399 - _LOGHOOKSTARTLINEMSG._serialized_end=29503 - _LOGHOOKENDLINE._serialized_start=29506 - _LOGHOOKENDLINE._serialized_end=29653 - _LOGHOOKENDLINEMSG._serialized_start=29655 - _LOGHOOKENDLINEMSG._serialized_end=29755 - _SKIPPINGDETAILS._serialized_start=29758 - _SKIPPINGDETAILS._serialized_end=29905 - _SKIPPINGDETAILSMSG._serialized_start=29907 - _SKIPPINGDETAILSMSG._serialized_end=30009 - _NOTHINGTODO._serialized_start=30011 - _NOTHINGTODO._serialized_end=30024 - _NOTHINGTODOMSG._serialized_start=30026 - _NOTHINGTODOMSG._serialized_end=30120 - _RUNNINGOPERATIONUNCAUGHTERROR._serialized_start=30122 - _RUNNINGOPERATIONUNCAUGHTERROR._serialized_end=30166 - _RUNNINGOPERATIONUNCAUGHTERRORMSG._serialized_start=30169 - _RUNNINGOPERATIONUNCAUGHTERRORMSG._serialized_end=30299 - _ENDRUNRESULT._serialized_start=30302 - _ENDRUNRESULT._serialized_end=30449 - _ENDRUNRESULTMSG._serialized_start=30451 - _ENDRUNRESULTMSG._serialized_end=30547 - _NONODESSELECTED._serialized_start=30549 - _NONODESSELECTED._serialized_end=30566 - _NONODESSELECTEDMSG._serialized_start=30568 - _NONODESSELECTEDMSG._serialized_end=30670 - _COMMANDCOMPLETED._serialized_start=30672 - _COMMANDCOMPLETED._serialized_end=30791 - _COMMANDCOMPLETEDMSG._serialized_start=30793 - _COMMANDCOMPLETEDMSG._serialized_end=30897 - _SHOWNODE._serialized_start=30899 - _SHOWNODE._serialized_end=31006 - _SHOWNODEMSG._serialized_start=31008 - _SHOWNODEMSG._serialized_end=31096 - _COMPILEDNODE._serialized_start=31098 - _COMPILEDNODE._serialized_end=31210 - _COMPILEDNODEMSG._serialized_start=31212 - _COMPILEDNODEMSG._serialized_end=31308 - _CATCHABLEEXCEPTIONONRUN._serialized_start=31310 - _CATCHABLEEXCEPTIONONRUN._serialized_end=31408 - _CATCHABLEEXCEPTIONONRUNMSG._serialized_start=31410 - _CATCHABLEEXCEPTIONONRUNMSG._serialized_end=31528 - _INTERNALERRORONRUN._serialized_start=31530 - _INTERNALERRORONRUN._serialized_end=31583 - _INTERNALERRORONRUNMSG._serialized_start=31585 - _INTERNALERRORONRUNMSG._serialized_end=31693 - _GENERICEXCEPTIONONRUN._serialized_start=31695 - _GENERICEXCEPTIONONRUN._serialized_end=31770 - _GENERICEXCEPTIONONRUNMSG._serialized_start=31772 - _GENERICEXCEPTIONONRUNMSG._serialized_end=31886 - _NODECONNECTIONRELEASEERROR._serialized_start=31888 - _NODECONNECTIONRELEASEERROR._serialized_end=31966 - _NODECONNECTIONRELEASEERRORMSG._serialized_start=31968 - _NODECONNECTIONRELEASEERRORMSG._serialized_end=32092 - _FOUNDSTATS._serialized_start=32094 - _FOUNDSTATS._serialized_end=32125 - _FOUNDSTATSMSG._serialized_start=32127 - _FOUNDSTATSMSG._serialized_end=32219 - _MAINKEYBOARDINTERRUPT._serialized_start=32221 - _MAINKEYBOARDINTERRUPT._serialized_end=32244 - _MAINKEYBOARDINTERRUPTMSG._serialized_start=32246 - _MAINKEYBOARDINTERRUPTMSG._serialized_end=32360 - _MAINENCOUNTEREDERROR._serialized_start=32362 - _MAINENCOUNTEREDERROR._serialized_end=32397 - _MAINENCOUNTEREDERRORMSG._serialized_start=32399 - _MAINENCOUNTEREDERRORMSG._serialized_end=32511 - _MAINSTACKTRACE._serialized_start=32513 - _MAINSTACKTRACE._serialized_end=32550 - _MAINSTACKTRACEMSG._serialized_start=32552 - _MAINSTACKTRACEMSG._serialized_end=32652 - _SYSTEMCOULDNOTWRITE._serialized_start=32654 - _SYSTEMCOULDNOTWRITE._serialized_end=32718 - _SYSTEMCOULDNOTWRITEMSG._serialized_start=32720 - _SYSTEMCOULDNOTWRITEMSG._serialized_end=32830 - _SYSTEMEXECUTINGCMD._serialized_start=32832 - _SYSTEMEXECUTINGCMD._serialized_end=32865 - _SYSTEMEXECUTINGCMDMSG._serialized_start=32867 - _SYSTEMEXECUTINGCMDMSG._serialized_end=32975 - _SYSTEMSTDOUT._serialized_start=32977 - _SYSTEMSTDOUT._serialized_end=33005 - _SYSTEMSTDOUTMSG._serialized_start=33007 - _SYSTEMSTDOUTMSG._serialized_end=33103 - _SYSTEMSTDERR._serialized_start=33105 - _SYSTEMSTDERR._serialized_end=33133 - _SYSTEMSTDERRMSG._serialized_start=33135 - _SYSTEMSTDERRMSG._serialized_end=33231 - _SYSTEMREPORTRETURNCODE._serialized_start=33233 - _SYSTEMREPORTRETURNCODE._serialized_end=33277 - _SYSTEMREPORTRETURNCODEMSG._serialized_start=33279 - _SYSTEMREPORTRETURNCODEMSG._serialized_end=33395 - _TIMINGINFOCOLLECTED._serialized_start=33397 - _TIMINGINFOCOLLECTED._serialized_end=33509 - _TIMINGINFOCOLLECTEDMSG._serialized_start=33511 - _TIMINGINFOCOLLECTEDMSG._serialized_end=33621 - _LOGDEBUGSTACKTRACE._serialized_start=33623 - _LOGDEBUGSTACKTRACE._serialized_end=33661 - _LOGDEBUGSTACKTRACEMSG._serialized_start=33663 - _LOGDEBUGSTACKTRACEMSG._serialized_end=33771 - _CHECKCLEANPATH._serialized_start=33773 - _CHECKCLEANPATH._serialized_end=33803 - _CHECKCLEANPATHMSG._serialized_start=33805 - _CHECKCLEANPATHMSG._serialized_end=33905 - _CONFIRMCLEANPATH._serialized_start=33907 - _CONFIRMCLEANPATH._serialized_end=33939 - _CONFIRMCLEANPATHMSG._serialized_start=33941 - _CONFIRMCLEANPATHMSG._serialized_end=34045 - _PROTECTEDCLEANPATH._serialized_start=34047 - _PROTECTEDCLEANPATH._serialized_end=34081 - _PROTECTEDCLEANPATHMSG._serialized_start=34083 - _PROTECTEDCLEANPATHMSG._serialized_end=34191 - _FINISHEDCLEANPATHS._serialized_start=34193 - _FINISHEDCLEANPATHS._serialized_end=34213 - _FINISHEDCLEANPATHSMSG._serialized_start=34215 - _FINISHEDCLEANPATHSMSG._serialized_end=34323 - _OPENCOMMAND._serialized_start=34325 - _OPENCOMMAND._serialized_end=34378 - _OPENCOMMANDMSG._serialized_start=34380 - _OPENCOMMANDMSG._serialized_end=34474 - _FORMATTING._serialized_start=34476 - _FORMATTING._serialized_end=34501 - _FORMATTINGMSG._serialized_start=34503 - _FORMATTINGMSG._serialized_end=34595 - _SERVINGDOCSPORT._serialized_start=34597 - _SERVINGDOCSPORT._serialized_end=34645 - _SERVINGDOCSPORTMSG._serialized_start=34647 - _SERVINGDOCSPORTMSG._serialized_end=34749 - _SERVINGDOCSACCESSINFO._serialized_start=34751 - _SERVINGDOCSACCESSINFO._serialized_end=34788 - _SERVINGDOCSACCESSINFOMSG._serialized_start=34790 - _SERVINGDOCSACCESSINFOMSG._serialized_end=34904 - _SERVINGDOCSEXITINFO._serialized_start=34906 - _SERVINGDOCSEXITINFO._serialized_end=34927 - _SERVINGDOCSEXITINFOMSG._serialized_start=34929 - _SERVINGDOCSEXITINFOMSG._serialized_end=35039 - _RUNRESULTWARNING._serialized_start=35041 - _RUNRESULTWARNING._serialized_end=35115 - _RUNRESULTWARNINGMSG._serialized_start=35117 - _RUNRESULTWARNINGMSG._serialized_end=35221 - _RUNRESULTFAILURE._serialized_start=35223 - _RUNRESULTFAILURE._serialized_end=35297 - _RUNRESULTFAILUREMSG._serialized_start=35299 - _RUNRESULTFAILUREMSG._serialized_end=35403 - _STATSLINE._serialized_start=35405 - _STATSLINE._serialized_end=35512 - _STATSLINE_STATSENTRY._serialized_start=35468 - _STATSLINE_STATSENTRY._serialized_end=35512 - _STATSLINEMSG._serialized_start=35514 - _STATSLINEMSG._serialized_end=35604 - _RUNRESULTERROR._serialized_start=35606 - _RUNRESULTERROR._serialized_end=35635 - _RUNRESULTERRORMSG._serialized_start=35637 - _RUNRESULTERRORMSG._serialized_end=35737 - _RUNRESULTERRORNOMESSAGE._serialized_start=35739 - _RUNRESULTERRORNOMESSAGE._serialized_end=35780 - _RUNRESULTERRORNOMESSAGEMSG._serialized_start=35782 - _RUNRESULTERRORNOMESSAGEMSG._serialized_end=35900 - _SQLCOMPILEDPATH._serialized_start=35902 - _SQLCOMPILEDPATH._serialized_end=35933 - _SQLCOMPILEDPATHMSG._serialized_start=35935 - _SQLCOMPILEDPATHMSG._serialized_end=36037 - _CHECKNODETESTFAILURE._serialized_start=36039 - _CHECKNODETESTFAILURE._serialized_end=36084 - _CHECKNODETESTFAILUREMSG._serialized_start=36086 - _CHECKNODETESTFAILUREMSG._serialized_end=36198 - _FIRSTRUNRESULTERROR._serialized_start=36200 - _FIRSTRUNRESULTERROR._serialized_end=36234 - _FIRSTRUNRESULTERRORMSG._serialized_start=36236 - _FIRSTRUNRESULTERRORMSG._serialized_end=36346 - _AFTERFIRSTRUNRESULTERROR._serialized_start=36348 - _AFTERFIRSTRUNRESULTERROR._serialized_end=36387 - _AFTERFIRSTRUNRESULTERRORMSG._serialized_start=36389 - _AFTERFIRSTRUNRESULTERRORMSG._serialized_end=36509 - _ENDOFRUNSUMMARY._serialized_start=36511 - _ENDOFRUNSUMMARY._serialized_end=36598 - _ENDOFRUNSUMMARYMSG._serialized_start=36600 - _ENDOFRUNSUMMARYMSG._serialized_end=36702 - _LOGSKIPBECAUSEERROR._serialized_start=36704 - _LOGSKIPBECAUSEERROR._serialized_end=36789 - _LOGSKIPBECAUSEERRORMSG._serialized_start=36791 - _LOGSKIPBECAUSEERRORMSG._serialized_end=36901 - _ENSUREGITINSTALLED._serialized_start=36903 - _ENSUREGITINSTALLED._serialized_end=36923 - _ENSUREGITINSTALLEDMSG._serialized_start=36925 - _ENSUREGITINSTALLEDMSG._serialized_end=37033 - _DEPSCREATINGLOCALSYMLINK._serialized_start=37035 - _DEPSCREATINGLOCALSYMLINK._serialized_end=37061 - _DEPSCREATINGLOCALSYMLINKMSG._serialized_start=37063 - _DEPSCREATINGLOCALSYMLINKMSG._serialized_end=37183 - _DEPSSYMLINKNOTAVAILABLE._serialized_start=37185 - _DEPSSYMLINKNOTAVAILABLE._serialized_end=37210 - _DEPSSYMLINKNOTAVAILABLEMSG._serialized_start=37212 - _DEPSSYMLINKNOTAVAILABLEMSG._serialized_end=37330 - _DISABLETRACKING._serialized_start=37332 - _DISABLETRACKING._serialized_end=37349 - _DISABLETRACKINGMSG._serialized_start=37351 - _DISABLETRACKINGMSG._serialized_end=37453 - _SENDINGEVENT._serialized_start=37455 - _SENDINGEVENT._serialized_end=37485 - _SENDINGEVENTMSG._serialized_start=37487 - _SENDINGEVENTMSG._serialized_end=37583 - _SENDEVENTFAILURE._serialized_start=37585 - _SENDEVENTFAILURE._serialized_end=37603 - _SENDEVENTFAILUREMSG._serialized_start=37605 - _SENDEVENTFAILUREMSG._serialized_end=37709 - _FLUSHEVENTS._serialized_start=37711 - _FLUSHEVENTS._serialized_end=37724 - _FLUSHEVENTSMSG._serialized_start=37726 - _FLUSHEVENTSMSG._serialized_end=37820 - _FLUSHEVENTSFAILURE._serialized_start=37822 - _FLUSHEVENTSFAILURE._serialized_end=37842 - _FLUSHEVENTSFAILUREMSG._serialized_start=37844 - _FLUSHEVENTSFAILUREMSG._serialized_end=37952 - _TRACKINGINITIALIZEFAILURE._serialized_start=37954 - _TRACKINGINITIALIZEFAILURE._serialized_end=37999 - _TRACKINGINITIALIZEFAILUREMSG._serialized_start=38001 - _TRACKINGINITIALIZEFAILUREMSG._serialized_end=38123 - _RUNRESULTWARNINGMESSAGE._serialized_start=38125 - _RUNRESULTWARNINGMESSAGE._serialized_end=38163 - _RUNRESULTWARNINGMESSAGEMSG._serialized_start=38165 - _RUNRESULTWARNINGMESSAGEMSG._serialized_end=38283 - _DEBUGCMDOUT._serialized_start=38285 - _DEBUGCMDOUT._serialized_end=38311 - _DEBUGCMDOUTMSG._serialized_start=38313 - _DEBUGCMDOUTMSG._serialized_end=38407 - _DEBUGCMDRESULT._serialized_start=38409 - _DEBUGCMDRESULT._serialized_end=38438 - _DEBUGCMDRESULTMSG._serialized_start=38440 - _DEBUGCMDRESULTMSG._serialized_end=38540 - _LISTCMDOUT._serialized_start=38542 - _LISTCMDOUT._serialized_end=38567 - _LISTCMDOUTMSG._serialized_start=38569 - _LISTCMDOUTMSG._serialized_end=38661 - _NOTE._serialized_start=38663 - _NOTE._serialized_end=38682 - _NOTEMSG._serialized_start=38684 - _NOTEMSG._serialized_end=38764 -# @@protoc_insertion_point(module_scope) diff --git a/core/dbt/exceptions.py b/core/dbt/exceptions.py index 339c1c744bd..27aa863fd17 100644 --- a/core/dbt/exceptions.py +++ b/core/dbt/exceptions.py @@ -1,210 +1,24 @@ -import builtins +import io import json import re -import io -import agate -from typing import Any, Dict, List, Mapping, Optional, Tuple, Union - -from dbt.dataclass_schema import ValidationError -from dbt.events.helpers import env_secrets, scrub_secrets -from dbt.node_types import NodeType, AccessType -from dbt.ui import line_wrap_message - -import dbt.dataclass_schema - - -class MacroReturn(builtins.BaseException): - """ - Hack of all hacks - This is not actually an exception. - It's how we return a value from a macro. - """ - - def __init__(self, value): - self.value = value - - -class Exception(builtins.Exception): - CODE = -32000 - MESSAGE = "Server Error" - - def data(self): - # if overriding, make sure the result is json-serializable. - return { - "type": self.__class__.__name__, - "message": str(self), - } - - -class DbtInternalError(Exception): - def __init__(self, msg: str): - self.stack: List = [] - self.msg = scrub_secrets(msg, env_secrets()) - - @property - def type(self): - return "Internal" - - def process_stack(self): - lines = [] - stack = self.stack - first = True - - if len(stack) > 1: - lines.append("") - - for item in stack: - msg = "called by" - - if first: - msg = "in" - first = False - - lines.append(f"> {msg}") - - return lines - - def __str__(self): - if hasattr(self.msg, "split"): - split_msg = self.msg.split("\n") - else: - split_msg = str(self.msg).split("\n") - - lines = ["{}".format(self.type + " Error")] + split_msg - - lines += self.process_stack() - - return lines[0] + "\n" + "\n".join([" " + line for line in lines[1:]]) - - -class DbtRuntimeError(RuntimeError, Exception): - CODE = 10001 - MESSAGE = "Runtime error" - - def __init__(self, msg: str, node=None): - self.stack: List = [] - self.node = node - self.msg = scrub_secrets(msg, env_secrets()) - - def add_node(self, node=None): - if node is not None and node is not self.node: - if self.node is not None: - self.stack.append(self.node) - self.node = node - - @property - def type(self): - return "Runtime" - - def node_to_string(self, node): - if node is None: - return "<Unknown>" - if not hasattr(node, "name"): - # we probably failed to parse a block, so we can't know the name - return f"{node.resource_type} ({node.original_file_path})" - - if hasattr(node, "contents"): - # handle FileBlocks. They aren't really nodes but we want to render - # out the path we know at least. This indicates an error during - # block parsing. - return f"{node.path.original_file_path}" - return f"{node.resource_type} {node.name} ({node.original_file_path})" - - def process_stack(self): - lines = [] - stack = self.stack + [self.node] - first = True - - if len(stack) > 1: - lines.append("") - - for item in stack: - msg = "called by" - - if first: - msg = "in" - first = False - - lines.append(f"> {msg} {self.node_to_string(item)}") - - return lines - - def validator_error_message(self, exc: builtins.Exception): - """Given a dbt.dataclass_schema.ValidationError (which is basically a - jsonschema.ValidationError), return the relevant parts as a string - """ - if not isinstance(exc, dbt.dataclass_schema.ValidationError): - return str(exc) - path = "[%s]" % "][".join(map(repr, exc.relative_path)) - return f"at path {path}: {exc.message}" - - def __str__(self, prefix: str = "! "): - node_string = "" - - if self.node is not None: - node_string = f" in {self.node_to_string(self.node)}" - - if hasattr(self.msg, "split"): - split_msg = self.msg.split("\n") - else: - split_msg = str(self.msg).split("\n") - - lines = ["{}{}".format(self.type + " Error", node_string)] + split_msg - - lines += self.process_stack() - - return lines[0] + "\n" + "\n".join([" " + line for line in lines[1:]]) - - def data(self): - result = Exception.data(self) - if self.node is None: - return result - - result.update( - { - "raw_code": self.node.raw_code, - # the node isn't always compiled, but if it is, include that! - "compiled_code": getattr(self.node, "compiled_code", None), - } - ) - return result - - -class DbtDatabaseError(DbtRuntimeError): - CODE = 10003 - MESSAGE = "Database Error" - - def process_stack(self): - lines = [] - - if hasattr(self.node, "build_path") and self.node.build_path: - lines.append(f"compiled Code at {self.node.build_path}") - - return lines + DbtRuntimeError.process_stack(self) - - @property - def type(self): - return "Database" - - -class CompilationError(DbtRuntimeError): - CODE = 10004 - MESSAGE = "Compilation Error" - - @property - def type(self): - return "Compilation" - - def _fix_dupe_msg(self, path_1: str, path_2: str, name: str, type_name: str) -> str: - if path_1 == path_2: - return ( - f"remove one of the {type_name} entries for {name} in this file:\n - {path_1!s}\n" - ) - else: - return ( - f"remove the {type_name} entry for {name} in one of these files:\n" - f" - {path_1!s}\n{path_2!s}" - ) +from typing import TYPE_CHECKING, Any, Dict, List, Mapping, Optional, Union + +from dbt.node_types import REFABLE_NODE_TYPES, AccessType, NodeType +from dbt_common.constants import SECRET_ENV_PREFIX +from dbt_common.dataclass_schema import ValidationError +from dbt_common.exceptions import ( + CommandResultError, + CompilationError, + DbtConfigError, + DbtInternalError, + DbtRuntimeError, + DbtValidationError, + env_secrets, + scrub_secrets, +) + +if TYPE_CHECKING: + import agate class ContractBreakingChangeError(DbtRuntimeError): @@ -213,81 +27,27 @@ class ContractBreakingChangeError(DbtRuntimeError): def __init__( self, - contract_enforced_disabled: bool, - columns_removed: List[str], - column_type_changes: List[Tuple[str, str, str]], - enforced_column_constraint_removed: List[Tuple[str, str]], - enforced_model_constraint_removed: List[Tuple[str, List[str]]], - materialization_changed: List[str], + breaking_changes: List[str], node=None, - ): - self.contract_enforced_disabled = contract_enforced_disabled - self.columns_removed = columns_removed - self.column_type_changes = column_type_changes - self.enforced_column_constraint_removed = enforced_column_constraint_removed - self.enforced_model_constraint_removed = enforced_model_constraint_removed - self.materialization_changed = materialization_changed + ) -> None: + self.breaking_changes = breaking_changes super().__init__(self.message(), node) @property def type(self): - return "Breaking Change to Contract" + return "Breaking change to contract" def message(self): - breaking_changes = [] - if self.contract_enforced_disabled: - breaking_changes.append("The contract's enforcement has been disabled.") - if self.columns_removed: - columns_removed_str = "\n - ".join(self.columns_removed) - breaking_changes.append(f"Columns were removed: \n - {columns_removed_str}") - if self.column_type_changes: - column_type_changes_str = "\n - ".join( - [f"{c[0]} ({c[1]} -> {c[2]})" for c in self.column_type_changes] - ) - breaking_changes.append( - f"Columns with data_type changes: \n - {column_type_changes_str}" - ) - if self.enforced_column_constraint_removed: - column_constraint_changes_str = "\n - ".join( - [f"{c[0]} ({c[1]})" for c in self.enforced_column_constraint_removed] - ) - breaking_changes.append( - f"Enforced column level constraints were removed: \n - {column_constraint_changes_str}" - ) - if self.enforced_model_constraint_removed: - model_constraint_changes_str = "\n - ".join( - [f"{c[0]} -> {c[1]}" for c in self.enforced_model_constraint_removed] - ) - breaking_changes.append( - f"Enforced model level constraints were removed: \n - {model_constraint_changes_str}" - ) - if self.materialization_changed: - materialization_changes_str = "\n - ".join( - f"{self.materialization_changed[0]} -> {self.materialization_changed[1]}" - ) - breaking_changes.append( - f"Materialization changed with enforced constraints: \n - {materialization_changes_str}" - ) - - reasons = "\n\n".join(breaking_changes) + reasons = "\n - ".join(self.breaking_changes) return ( "While comparing to previous project state, dbt detected a breaking change to an enforced contract." - f"\n\n{reasons}\n\n" + f"\n - {reasons}\n" "Consider making an additive (non-breaking) change instead, if possible.\n" "Otherwise, create a new model version: https://docs.getdbt.com/docs/collaborate/govern/model-versions" ) -class RecursionError(DbtRuntimeError): - pass - - -class DbtValidationError(DbtRuntimeError): - CODE = 10005 - MESSAGE = "Validation Error" - - class ParsingError(DbtRuntimeError): CODE = 10015 MESSAGE = "Parsing Error" @@ -304,7 +64,7 @@ class dbtPluginError(DbtRuntimeError): # TODO: this isn't raised in the core codebase. Is it raised elsewhere? class JSONValidationError(DbtValidationError): - def __init__(self, typename, errors): + def __init__(self, typename, errors) -> None: self.typename = typename self.errors = errors self.errors_message = ", ".join(errors) @@ -316,48 +76,6 @@ def __reduce__(self): return (JSONValidationError, (self.typename, self.errors)) -class IncompatibleSchemaError(DbtRuntimeError): - def __init__(self, expected: str, found: Optional[str] = None): - self.expected = expected - self.found = found - self.filename = "input file" - - super().__init__(msg=self.get_message()) - - def add_filename(self, filename: str): - self.filename = filename - self.msg = self.get_message() - - def get_message(self) -> str: - found_str = "nothing" - if self.found is not None: - found_str = f'"{self.found}"' - - msg = ( - f'Expected a schema version of "{self.expected}" in ' - f"{self.filename}, but found {found_str}. Are you running with a " - f"different version of dbt?" - ) - return msg - - CODE = 10014 - MESSAGE = "Incompatible Schema" - - -class JinjaRenderingError(CompilationError): - pass - - -class UndefinedMacroError(CompilationError): - def __str__(self, prefix: str = "! ") -> str: - msg = super().__str__(prefix) - return ( - f"{msg}. This can happen when calling a macro that does " - "not exist. Check for typos and/or install package dependencies " - 'with "dbt deps".' - ) - - class AliasError(DbtValidationError): pass @@ -367,29 +85,11 @@ class DependencyError(Exception): MESSAGE = "Dependency Error" -class DbtConfigError(DbtRuntimeError): - CODE = 10007 - MESSAGE = "DBT Configuration Error" - - def __init__(self, msg: str, project=None, result_type="invalid_project", path=None): - self.project = project - super().__init__(msg) - self.result_type = result_type - self.path = path - - def __str__(self, prefix="! ") -> str: - msg = super().__str__(prefix) - if self.path is None: - return msg - else: - return f"{msg}\n\nError encountered in {self.path}" - - class FailFastError(DbtRuntimeError): CODE = 10013 MESSAGE = "FailFast Error" - def __init__(self, msg: str, result=None, node=None): + def __init__(self, msg: str, result=None, node=None) -> None: super().__init__(msg=msg, node=node) self.result = result @@ -410,88 +110,12 @@ class DbtProfileError(DbtConfigError): pass -class SemverError(Exception): - def __init__(self, msg: Optional[str] = None): - self.msg = msg - if msg is not None: - super().__init__(msg) - else: - super().__init__() - - -class VersionsNotCompatibleError(SemverError): - pass - - -class NotImplementedError(Exception): - def __init__(self, msg: str): - self.msg = msg - self.formatted_msg = f"ERROR: {self.msg}" - super().__init__(self.formatted_msg) - - -class FailedToConnectError(DbtDatabaseError): +class DbtExclusivePropertyUseError(DbtConfigError): pass -class CommandError(DbtRuntimeError): - def __init__(self, cwd: str, cmd: List[str], msg: str = "Error running command"): - cmd_scrubbed = list(scrub_secrets(cmd_txt, env_secrets()) for cmd_txt in cmd) - super().__init__(msg) - self.cwd = cwd - self.cmd = cmd_scrubbed - self.args = (cwd, cmd_scrubbed, msg) - - def __str__(self): - if len(self.cmd) == 0: - return f"{self.msg}: No arguments given" - return f'{self.msg}: "{self.cmd[0]}"' - - -class ExecutableError(CommandError): - def __init__(self, cwd: str, cmd: List[str], msg: str): - super().__init__(cwd, cmd, msg) - - -class WorkingDirectoryError(CommandError): - def __init__(self, cwd: str, cmd: List[str], msg: str): - super().__init__(cwd, cmd, msg) - - def __str__(self): - return f'{self.msg}: "{self.cwd}"' - - -class CommandResultError(CommandError): - def __init__( - self, - cwd: str, - cmd: List[str], - returncode: Union[int, Any], - stdout: bytes, - stderr: bytes, - msg: str = "Got a non-zero returncode", - ): - super().__init__(cwd, cmd, msg) - self.returncode = returncode - self.stdout = scrub_secrets(stdout.decode("utf-8"), env_secrets()) - self.stderr = scrub_secrets(stderr.decode("utf-8"), env_secrets()) - self.args = (cwd, self.cmd, returncode, self.stdout, self.stderr, msg) - - def __str__(self): - return f"{self.msg} running: {self.cmd}" - - -class InvalidConnectionError(DbtRuntimeError): - def __init__(self, thread_id, known: List): - self.thread_id = thread_id - self.known = known - super().__init__( - msg="connection never acquired for thread {self.thread_id}, have {self.known}" - ) - - class InvalidSelectorError(DbtRuntimeError): - def __init__(self, name: str): + def __init__(self, name: str) -> None: self.name = name super().__init__(name) @@ -500,26 +124,9 @@ class DuplicateYamlKeyError(CompilationError): pass -class ConnectionError(Exception): - """ - There was a problem with the connection that returned a bad response, - timed out, or resulted in a file that is corrupt. - """ - - pass - - -# event level exception -class EventCompilationError(CompilationError): - def __init__(self, msg: str, node): - self.msg = scrub_secrets(msg, env_secrets()) - self.node = node - super().__init__(msg=self.msg) - - # compilation level exceptions class GraphDependencyNotFoundError(CompilationError): - def __init__(self, node, dependency: str): + def __init__(self, node, dependency: str) -> None: self.node = node self.dependency = dependency super().__init__(msg=self.get_message()) @@ -529,102 +136,37 @@ def get_message(self) -> str: return msg +class ForeignKeyConstraintToSyntaxError(CompilationError): + def __init__(self, node, expression: str) -> None: + self.expression = expression + self.node = node + super().__init__(msg=self.get_message()) + + def get_message(self) -> str: + msg = f"'{self.node.unique_id}' defines a foreign key constraint 'to' expression which is not valid 'ref' or 'source' syntax: {self.expression}." + + return msg + + # client level exceptions class NoSupportedLanguagesFoundError(CompilationError): - def __init__(self, node): + def __init__(self, node) -> None: self.node = node self.msg = f"No supported_languages found in materialization macro {self.node.name}" super().__init__(msg=self.msg) class MaterializtionMacroNotUsedError(CompilationError): - def __init__(self, node): + def __init__(self, node) -> None: self.node = node self.msg = "Only materialization macros can be used with this function" super().__init__(msg=self.msg) -class UndefinedCompilationError(CompilationError): - def __init__(self, name: str, node): - self.name = name - self.node = node - self.msg = f"{self.name} is undefined" - super().__init__(msg=self.msg) - - -class CaughtMacroErrorWithNodeError(CompilationError): - def __init__(self, exc, node): - self.exc = exc - self.node = node - super().__init__(msg=str(exc)) - - -class CaughtMacroError(CompilationError): - def __init__(self, exc): - self.exc = exc - super().__init__(msg=str(exc)) - - -class MacroNameNotStringError(CompilationError): - def __init__(self, kwarg_value): - self.kwarg_value = kwarg_value - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = ( - f"The macro_name parameter ({self.kwarg_value}) " - "to adapter.dispatch was not a string" - ) - return msg - - -class MissingControlFlowStartTagError(CompilationError): - def __init__(self, tag, expected_tag: str, tag_parser): - self.tag = tag - self.expected_tag = expected_tag - self.tag_parser = tag_parser - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - linepos = self.tag_parser.linepos(self.tag.start) - msg = ( - f"Got an unexpected control flow end tag, got {self.tag.block_type_name} but " - f"expected {self.expected_tag} next (@ {linepos})" - ) - return msg - - -class UnexpectedControlFlowEndTagError(CompilationError): - def __init__(self, tag, expected_tag: str, tag_parser): - self.tag = tag - self.expected_tag = expected_tag - self.tag_parser = tag_parser - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - linepos = self.tag_parser.linepos(self.tag.start) - msg = ( - f"Got an unexpected control flow end tag, got {self.tag.block_type_name} but " - f"never saw a preceeding {self.expected_tag} (@ {linepos})" - ) - return msg - - -class UnexpectedMacroEOFError(CompilationError): - def __init__(self, expected_name: str, actual_name: str): - self.expected_name = expected_name - self.actual_name = actual_name - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = f'unexpected EOF, expected {self.expected_name}, got "{self.actual_name}"' - return msg - - class MacroNamespaceNotStringError(CompilationError): - def __init__(self, kwarg_type: Any): + def __init__(self, kwarg_type: Any) -> None: self.kwarg_type = kwarg_type super().__init__(msg=self.get_message()) @@ -636,49 +178,8 @@ def get_message(self) -> str: return msg -class NestedTagsError(CompilationError): - def __init__(self, outer, inner): - self.outer = outer - self.inner = inner - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = ( - f"Got nested tags: {self.outer.block_type_name} (started at {self.outer.start}) did " - f"not have a matching {{{{% end{self.outer.block_type_name} %}}}} before a " - f"subsequent {self.inner.block_type_name} was found (started at {self.inner.start})" - ) - return msg - - -class BlockDefinitionNotAtTopError(CompilationError): - def __init__(self, tag_parser, tag_start): - self.tag_parser = tag_parser - self.tag_start = tag_start - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - position = self.tag_parser.linepos(self.tag_start) - msg = ( - f"Got a block definition inside control flow at {position}. " - "All dbt block definitions must be at the top level" - ) - return msg - - -class MissingCloseTagError(CompilationError): - def __init__(self, block_type_name: str, linecount: int): - self.block_type_name = block_type_name - self.linecount = linecount - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = f"Reached EOF without finding a close tag for {self.block_type_name} (searched from line {self.linecount})" - return msg - - class UnknownGitCloningProblemError(DbtRuntimeError): - def __init__(self, repo: str): + def __init__(self, repo: str) -> None: self.repo = scrub_secrets(repo, env_secrets()) super().__init__(msg=self.get_message()) @@ -691,7 +192,7 @@ def get_message(self) -> str: class NoAdaptersAvailableError(DbtRuntimeError): - def __init__(self): + def __init__(self) -> None: super().__init__(msg=self.get_message()) def get_message(self) -> str: @@ -700,7 +201,7 @@ def get_message(self) -> str: class BadSpecError(DbtInternalError): - def __init__(self, repo, revision, error): + def __init__(self, repo, revision, error) -> None: self.repo = repo self.revision = revision self.stderr = scrub_secrets(error.stderr.strip(), env_secrets()) @@ -712,7 +213,7 @@ def get_message(self) -> str: class GitCloningError(DbtInternalError): - def __init__(self, repo: str, revision: str, error: CommandResultError): + def __init__(self, repo: str, revision: str, error: CommandResultError) -> None: self.repo = repo self.revision = revision self.error = error @@ -734,19 +235,8 @@ class GitCheckoutError(BadSpecError): pass -class MaterializationArgError(CompilationError): - def __init__(self, name: str, argument: str): - self.name = name - self.argument = argument - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = f"materialization '{self.name}' received unknown argument '{self.argument}'." - return msg - - class OperationError(CompilationError): - def __init__(self, operation_name): + def __init__(self, operation_name) -> None: self.operation_name = operation_name super().__init__(msg=self.get_message()) @@ -760,37 +250,23 @@ def get_message(self) -> str: return msg -class SymbolicLinkError(CompilationError): - def __init__(self): - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = ( - "dbt encountered an error when attempting to create a symbolic link. " - "If this error persists, please create an issue at: \n\n" - "https://github.com/dbt-labs/dbt-core" - ) - - return msg - - # context level exceptions class ZipStrictWrongTypeError(CompilationError): - def __init__(self, exc): + def __init__(self, exc) -> None: self.exc = exc msg = str(self.exc) super().__init__(msg=msg) class SetStrictWrongTypeError(CompilationError): - def __init__(self, exc): + def __init__(self, exc) -> None: self.exc = exc msg = str(self.exc) super().__init__(msg=msg) class LoadAgateTableValueError(CompilationError): - def __init__(self, exc: ValueError, node): + def __init__(self, exc: ValueError, node) -> None: self.exc = exc self.node = node msg = str(self.exc) @@ -798,22 +274,15 @@ def __init__(self, exc: ValueError, node): class LoadAgateTableNotSeedError(CompilationError): - def __init__(self, resource_type, node): + def __init__(self, resource_type, node) -> None: self.resource_type = resource_type self.node = node msg = f"can only load_agate_table for seeds (got a {self.resource_type})" super().__init__(msg=msg) -class MacrosSourcesUnWriteableError(CompilationError): - def __init__(self, node): - self.node = node - msg = 'cannot "write" macros or sources' - super().__init__(msg=msg) - - class PackageNotInDepsError(CompilationError): - def __init__(self, package_name: str, node): + def __init__(self, package_name: str, node) -> None: self.package_name = package_name self.node = node msg = f"Node package named {self.package_name} not found!" @@ -821,7 +290,7 @@ def __init__(self, package_name: str, node): class OperationsCannotRefEphemeralNodesError(CompilationError): - def __init__(self, target_name: str, node): + def __init__(self, target_name: str, node) -> None: self.target_name = target_name self.node = node msg = f"Operations can not ref() ephemeral nodes, but {target_name} is ephemeral" @@ -829,7 +298,7 @@ def __init__(self, target_name: str, node): class PersistDocsValueTypeError(CompilationError): - def __init__(self, persist_docs: Any): + def __init__(self, persist_docs: Any) -> None: self.persist_docs = persist_docs msg = ( "Invalid value provided for 'persist_docs'. Expected dict " @@ -839,14 +308,14 @@ def __init__(self, persist_docs: Any): class InlineModelConfigError(CompilationError): - def __init__(self, node): + def __init__(self, node) -> None: self.node = node msg = "Invalid inline model config" super().__init__(msg=msg) class ConflictingConfigKeysError(CompilationError): - def __init__(self, oldkey: str, newkey: str, node): + def __init__(self, oldkey: str, newkey: str, node) -> None: self.oldkey = oldkey self.newkey = newkey self.node = node @@ -855,7 +324,7 @@ def __init__(self, oldkey: str, newkey: str, node): class NumberSourceArgsError(CompilationError): - def __init__(self, args, node): + def __init__(self, args, node) -> None: self.args = args self.node = node msg = f"source() takes exactly two arguments ({len(self.args)} given)" @@ -863,7 +332,7 @@ def __init__(self, args, node): class RequiredVarNotFoundError(CompilationError): - def __init__(self, var_name: str, merged: Dict, node): + def __init__(self, var_name: str, merged: Dict, node) -> None: self.var_name = var_name self.merged = merged self.node = node @@ -879,18 +348,21 @@ def get_message(self) -> str: pretty_vars = json.dumps(dct, sort_keys=True, indent=4) msg = f"Required var '{self.var_name}' not found in config:\nVars supplied to {node_name} = {pretty_vars}" - return msg + return scrub_secrets(msg, self.var_secrets()) + + def var_secrets(self) -> List[str]: + return [v for k, v in self.merged.items() if k.startswith(SECRET_ENV_PREFIX) and v.strip()] class PackageNotFoundForMacroError(CompilationError): - def __init__(self, package_name: str): + def __init__(self, package_name: str) -> None: self.package_name = package_name msg = f"Could not find package '{self.package_name}'" super().__init__(msg=msg) class SecretEnvVarLocationError(ParsingError): - def __init__(self, env_var_name: str): + def __init__(self, env_var_name: str) -> None: self.env_var_name = env_var_name super().__init__(msg=self.get_message()) @@ -902,26 +374,8 @@ def get_message(self) -> str: return msg -class MacroArgTypeError(CompilationError): - def __init__(self, method_name: str, arg_name: str, got_value: Any, expected_type): - self.method_name = method_name - self.arg_name = arg_name - self.got_value = got_value - self.expected_type = expected_type - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - got_type = type(self.got_value) - msg = ( - f"'adapter.{self.method_name}' expects argument " - f"'{self.arg_name}' to be of type '{self.expected_type}', instead got " - f"{self.got_value} ({got_type})" - ) - return msg - - class BooleanError(CompilationError): - def __init__(self, return_value: Any, macro_name: str): + def __init__(self, return_value: Any, macro_name: str) -> None: self.return_value = return_value self.macro_name = macro_name super().__init__(msg=self.get_message()) @@ -935,7 +389,7 @@ def get_message(self) -> str: class RefArgsError(CompilationError): - def __init__(self, node, args): + def __init__(self, node, args) -> None: self.node = node self.args = args super().__init__(msg=self.get_message()) @@ -946,7 +400,7 @@ def get_message(self) -> str: class MetricArgsError(CompilationError): - def __init__(self, node, args): + def __init__(self, node, args) -> None: self.node = node self.args = args super().__init__(msg=self.get_message()) @@ -957,7 +411,7 @@ def get_message(self) -> str: class RefBadContextError(CompilationError): - def __init__(self, node, args): + def __init__(self, node, args) -> None: self.node = node self.args = args.positional_args # type: ignore self.kwargs = args.keyword_args # type: ignore @@ -995,7 +449,7 @@ def get_message(self) -> str: class DocArgsError(CompilationError): - def __init__(self, node, args): + def __init__(self, node, args) -> None: self.node = node self.args = args super().__init__(msg=self.get_message()) @@ -1006,7 +460,9 @@ def get_message(self) -> str: class DocTargetNotFoundError(CompilationError): - def __init__(self, node, target_doc_name: str, target_doc_package: Optional[str] = None): + def __init__( + self, node, target_doc_name: str, target_doc_package: Optional[str] = None + ) -> None: self.node = node self.target_doc_name = target_doc_name self.target_doc_package = target_doc_package @@ -1015,13 +471,13 @@ def __init__(self, node, target_doc_name: str, target_doc_package: Optional[str] def get_message(self) -> str: target_package_string = "" if self.target_doc_package is not None: - target_package_string = f"in package '{self. target_doc_package}' " + target_package_string = f"in package '{self.target_doc_package}' " msg = f"Documentation for '{self.node.unique_id}' depends on doc '{self.target_doc_name}' {target_package_string} which was not found" return msg class MacroDispatchArgError(CompilationError): - def __init__(self, macro_name: str): + def __init__(self, macro_name: str) -> None: self.macro_name = macro_name super().__init__(msg=self.get_message()) @@ -1040,7 +496,7 @@ def get_message(self) -> str: class DuplicateMacroNameError(CompilationError): - def __init__(self, node_1, node_2, namespace: str): + def __init__(self, node_1, node_2, namespace: str) -> None: self.node_1 = node_1 self.node_2 = node_2 self.namespace = namespace @@ -1065,7 +521,7 @@ def get_message(self) -> str: class MacroResultAlreadyLoadedError(CompilationError): - def __init__(self, result_name): + def __init__(self, result_name) -> None: self.result_name = result_name super().__init__(msg=self.get_message()) @@ -1077,7 +533,7 @@ def get_message(self) -> str: # parser level exceptions class DictParseError(ParsingError): - def __init__(self, exc: ValidationError, node): + def __init__(self, exc: ValidationError, node) -> None: self.exc = exc self.node = node msg = self.validator_error_message(exc) @@ -1085,7 +541,7 @@ def __init__(self, exc: ValidationError, node): class ConfigUpdateError(ParsingError): - def __init__(self, exc: ValidationError, node): + def __init__(self, exc: ValidationError, node) -> None: self.exc = exc self.node = node msg = self.validator_error_message(exc) @@ -1093,7 +549,7 @@ def __init__(self, exc: ValidationError, node): class PythonParsingError(ParsingError): - def __init__(self, exc: SyntaxError, node): + def __init__(self, exc: SyntaxError, node) -> None: self.exc = exc self.node = node super().__init__(msg=self.get_message()) @@ -1105,7 +561,7 @@ def get_message(self) -> str: class PythonLiteralEvalError(ParsingError): - def __init__(self, exc: Exception, node): + def __init__(self, exc: Exception, node) -> None: self.exc = exc self.node = node super().__init__(msg=self.get_message()) @@ -1121,7 +577,7 @@ def get_message(self) -> str: class ModelConfigError(ParsingError): - def __init__(self, exc: ValidationError, node): + def __init__(self, exc: ValidationError, node) -> None: self.msg = self.validator_error_message(exc) self.node = node super().__init__(msg=self.msg) @@ -1134,7 +590,7 @@ def __init__( key: str, yaml_data: List, cause, - ): + ) -> None: self.path = path self.key = key self.yaml_data = yaml_data @@ -1159,7 +615,7 @@ def __init__( key: str, yaml_data: Dict[str, Any], cause, - ): + ) -> None: self.path = path self.key = key self.yaml_data = yaml_data @@ -1183,7 +639,7 @@ def __init__( path: str, exc: DbtValidationError, project_name: Optional[str] = None, - ): + ) -> None: self.project_name = project_name self.path = path self.exc = exc @@ -1198,28 +654,28 @@ def get_message(self) -> str: class TestConfigError(ParsingError): - def __init__(self, exc: ValidationError, node): + def __init__(self, exc: ValidationError, node) -> None: self.msg = self.validator_error_message(exc) self.node = node super().__init__(msg=self.msg) class SchemaConfigError(ParsingError): - def __init__(self, exc: ValidationError, node): + def __init__(self, exc: ValidationError, node) -> None: self.msg = self.validator_error_message(exc) self.node = node super().__init__(msg=self.msg) class SnapshopConfigError(ParsingError): - def __init__(self, exc: ValidationError, node): + def __init__(self, exc: ValidationError, node) -> None: self.msg = self.validator_error_message(exc) self.node = node super().__init__(msg=self.msg) class DbtReferenceError(ParsingError): - def __init__(self, unique_id: str, ref_unique_id: str, access: AccessType, scope: str): + def __init__(self, unique_id: str, ref_unique_id: str, access: AccessType, scope: str) -> None: self.unique_id = unique_id self.ref_unique_id = ref_unique_id self.access = access @@ -1235,7 +691,9 @@ def get_message(self) -> str: class InvalidAccessTypeError(ParsingError): - def __init__(self, unique_id: str, field_value: str, materialization: Optional[str] = None): + def __init__( + self, unique_id: str, field_value: str, materialization: Optional[str] = None + ) -> None: self.unique_id = unique_id self.field_value = field_value self.materialization = materialization @@ -1247,20 +705,26 @@ def __init__(self, unique_id: str, field_value: str, materialization: Optional[s super().__init__(msg=msg) +class InvalidUnitTestGivenInput(ParsingError): + def __init__(self, input: str) -> None: + msg = f"Unit test given inputs must be either a 'ref', 'source' or 'this' call. Got: '{input}'." + super().__init__(msg=msg) + + class SameKeyNestedError(CompilationError): - def __init__(self): + def __init__(self) -> None: msg = "Test cannot have the same key at the top-level and in config" super().__init__(msg=msg) class TestArgIncludesModelError(CompilationError): - def __init__(self): + def __init__(self) -> None: msg = 'Test arguments include "model", which is a reserved argument' super().__init__(msg=msg) class UnexpectedTestNamePatternError(CompilationError): - def __init__(self, test_name: str): + def __init__(self, test_name: str) -> None: self.test_name = test_name msg = f"Test name string did not match expected pattern: {self.test_name}" super().__init__(msg=msg) @@ -1274,7 +738,7 @@ def __init__( key: str, err_msg: str, column_name: Optional[str] = None, - ): + ) -> None: self.target_name = target_name self.column_name = column_name self.name = name @@ -1304,37 +768,35 @@ def get_message(self) -> str: class TagsNotListOfStringsError(CompilationError): - def __init__(self, tags: Any): + def __init__(self, tags: Any) -> None: self.tags = tags msg = f"got {self.tags} ({type(self.tags)}) for tags, expected a list of strings" super().__init__(msg=msg) class TagNotStringError(CompilationError): - def __init__(self, tag: Any): + def __init__(self, tag: Any) -> None: self.tag = tag msg = f"got {self.tag} ({type(self.tag)}) for tag, expected a str" super().__init__(msg=msg) class TestNameNotStringError(ParsingError): - def __init__(self, test_name: Any): + def __init__(self, test_name: Any) -> None: self.test_name = test_name super().__init__(msg=self.get_message()) def get_message(self) -> str: - msg = f"test name must be a str, got {type(self.test_name)} (value {self.test_name})" return msg class TestArgsNotDictError(ParsingError): - def __init__(self, test_args: Any): + def __init__(self, test_args: Any) -> None: self.test_args = test_args super().__init__(msg=self.get_message()) def get_message(self) -> str: - msg = f"test arguments must be a dict, got {type(self.test_args)} (value {self.test_args})" return msg @@ -1345,7 +807,6 @@ def __init__(self, test): super().__init__(msg=self.get_message()) def get_message(self) -> str: - msg = ( "test definition dictionary must have exactly one key, got" f" {self.test} instead ({len(self.test)} keys)" @@ -1478,216 +939,6 @@ def get_message(self) -> str: return msg -# Postgres Exceptions -class UnexpectedDbReferenceError(NotImplementedError): - def __init__(self, adapter, database, expected): - self.adapter = adapter - self.database = database - self.expected = expected - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = f"Cross-db references not allowed in {self.adapter} ({self.database} vs {self.expected})" - return msg - - -class CrossDbReferenceProhibitedError(CompilationError): - def __init__(self, adapter, exc_msg: str): - self.adapter = adapter - self.exc_msg = exc_msg - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = f"Cross-db references not allowed in adapter {self.adapter}: Got {self.exc_msg}" - return msg - - -class IndexConfigNotDictError(CompilationError): - def __init__(self, raw_index: Any): - self.raw_index = raw_index - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = ( - f"Invalid index config:\n" - f" Got: {self.raw_index}\n" - f' Expected a dictionary with at minimum a "columns" key' - ) - return msg - - -class IndexConfigError(CompilationError): - def __init__(self, exc: TypeError): - self.exc = exc - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - validator_msg = self.validator_error_message(self.exc) - msg = f"Could not parse index config: {validator_msg}" - return msg - - -# adapters exceptions -class MacroResultError(CompilationError): - def __init__(self, freshness_macro_name: str, table): - self.freshness_macro_name = freshness_macro_name - self.table = table - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = f'Got an invalid result from "{self.freshness_macro_name}" macro: {[tuple(r) for r in self.table]}' - - return msg - - -class SnapshotTargetNotSnapshotTableError(CompilationError): - def __init__(self, missing: List): - self.missing = missing - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = 'Snapshot target is not a snapshot table (missing "{}")'.format( - '", "'.join(self.missing) - ) - return msg - - -class SnapshotTargetIncompleteError(CompilationError): - def __init__(self, extra: List, missing: List): - self.extra = extra - self.missing = missing - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = ( - 'Snapshot target has ("{}") but not ("{}") - is it an ' - "unmigrated previous version archive?".format( - '", "'.join(self.extra), '", "'.join(self.missing) - ) - ) - return msg - - -class RenameToNoneAttemptedError(CompilationError): - def __init__(self, src_name: str, dst_name: str, name: str): - self.src_name = src_name - self.dst_name = dst_name - self.name = name - self.msg = f"Attempted to rename {self.src_name} to {self.dst_name} for {self.name}" - super().__init__(msg=self.msg) - - -class NullRelationDropAttemptedError(CompilationError): - def __init__(self, name: str): - self.name = name - self.msg = f"Attempted to drop a null relation for {self.name}" - super().__init__(msg=self.msg) - - -class NullRelationCacheAttemptedError(CompilationError): - def __init__(self, name: str): - self.name = name - self.msg = f"Attempted to cache a null relation for {self.name}" - super().__init__(msg=self.msg) - - -class QuoteConfigTypeError(CompilationError): - def __init__(self, quote_config: Any): - self.quote_config = quote_config - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = ( - 'The seed configuration value of "quote_columns" has an ' - f"invalid type {type(self.quote_config)}" - ) - return msg - - -class MultipleDatabasesNotAllowedError(CompilationError): - def __init__(self, databases): - self.databases = databases - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = str(self.databases) - return msg - - -class RelationTypeNullError(CompilationError): - def __init__(self, relation): - self.relation = relation - self.msg = f"Tried to drop relation {self.relation}, but its type is null." - super().__init__(msg=self.msg) - - -class MaterializationNotAvailableError(CompilationError): - def __init__(self, materialization, adapter_type: str): - self.materialization = materialization - self.adapter_type = adapter_type - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = f"Materialization '{self.materialization}' is not available for {self.adapter_type}!" - return msg - - -class RelationReturnedMultipleResultsError(CompilationError): - def __init__(self, kwargs: Mapping[str, Any], matches: List): - self.kwargs = kwargs - self.matches = matches - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = ( - "get_relation returned more than one relation with the given args. " - "Please specify a database or schema to narrow down the result set." - f"\n{self.kwargs}\n\n{self.matches}" - ) - return msg - - -class ApproximateMatchError(CompilationError): - def __init__(self, target, relation): - self.target = target - self.relation = relation - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - - msg = ( - "When searching for a relation, dbt found an approximate match. " - "Instead of guessing \nwhich relation to use, dbt will move on. " - f"Please delete {self.relation}, or rename it to be less ambiguous." - f"\nSearched for: {self.target}\nFound: {self.relation}" - ) - - return msg - - -class UnexpectedNullError(DbtDatabaseError): - def __init__(self, field_name: str, source): - self.field_name = field_name - self.source = source - msg = ( - f"Expected a non-null value when querying field '{self.field_name}' of table " - f" {self.source} but received value 'null' instead" - ) - super().__init__(msg) - - -class UnexpectedNonTimestampError(DbtDatabaseError): - def __init__(self, field_name: str, source, dt: Any): - self.field_name = field_name - self.source = source - self.type_name = type(dt).__name__ - msg = ( - f"Expected a timestamp value when querying field '{self.field_name}' of table " - f"{self.source} but received value of type '{self.type_name}' instead" - ) - super().__init__(msg) - - # deps exceptions class MultipleVersionGitDepsError(DependencyError): def __init__(self, git: str, requested): @@ -1871,67 +1122,7 @@ def get_message(self) -> str: return msg -class DuplicateMacroInPackageError(CompilationError): - def __init__(self, macro, macro_mapping: Mapping): - self.macro = macro - self.macro_mapping = macro_mapping - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - other_path = self.macro_mapping[self.macro.unique_id].original_file_path - # subtract 2 for the "Compilation Error" indent - # note that the line wrap eats newlines, so if you want newlines, - # this is the result :( - msg = line_wrap_message( - f"""\ - dbt found two macros named "{self.macro.name}" in the project - "{self.macro.package_name}". - - - To fix this error, rename or remove one of the following - macros: - - - {self.macro.original_file_path} - - - {other_path} - """, - subtract=2, - ) - return msg - - -class DuplicateMaterializationNameError(CompilationError): - def __init__(self, macro, other_macro): - self.macro = macro - self.other_macro = other_macro - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - macro_name = self.macro.name - macro_package_name = self.macro.package_name - other_package_name = self.other_macro.macro.package_name - - msg = ( - f"Found two materializations with the name {macro_name} (packages " - f"{macro_package_name} and {other_package_name}). dbt cannot resolve " - "this ambiguity" - ) - return msg - - # jinja exceptions -class ColumnTypeMissingError(CompilationError): - def __init__(self, column_names: List): - self.column_names = column_names - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = ( - "Contracted models require data_type to be defined for each column. " - "Please ensure that the column name and data_type are defined within " - f"the YAML configuration for the {self.column_names} column(s)." - ) - return msg class PatchTargetNotFoundError(CompilationError): @@ -1948,42 +1139,6 @@ def get_message(self) -> str: return msg -class MacroNotFoundError(CompilationError): - def __init__(self, node, target_macro_id: str): - self.node = node - self.target_macro_id = target_macro_id - msg = f"'{self.node.unique_id}' references macro '{self.target_macro_id}' which is not defined!" - - super().__init__(msg=msg) - - -class MissingConfigError(CompilationError): - def __init__(self, unique_id: str, name: str): - self.unique_id = unique_id - self.name = name - msg = ( - f"Model '{self.unique_id}' does not define a required config parameter '{self.name}'." - ) - super().__init__(msg=msg) - - -class MissingMaterializationError(CompilationError): - def __init__(self, materialization, adapter_type): - self.materialization = materialization - self.adapter_type = adapter_type - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - - valid_types = "'default'" - - if self.adapter_type != "default": - valid_types = f"'default' and '{self.adapter_type}'" - - msg = f"No materialization '{self.materialization}' was found for adapter {self.adapter_type}! (searched types {valid_types})" - return msg - - class MissingRelationError(CompilationError): def __init__(self, relation, model=None): self.relation = relation @@ -2054,85 +1209,6 @@ def get_message(self) -> str: return msg -class CacheInconsistencyError(DbtInternalError): - def __init__(self, msg: str): - self.msg = msg - formatted_msg = f"Cache inconsistency detected: {self.msg}" - super().__init__(msg=formatted_msg) - - -class NewNameAlreadyInCacheError(CacheInconsistencyError): - def __init__(self, old_key: str, new_key: str): - self.old_key = old_key - self.new_key = new_key - msg = ( - f'in rename of "{self.old_key}" -> "{self.new_key}", new name is in the cache already' - ) - super().__init__(msg) - - -class ReferencedLinkNotCachedError(CacheInconsistencyError): - def __init__(self, referenced_key: str): - self.referenced_key = referenced_key - msg = f"in add_link, referenced link key {self.referenced_key} not in cache!" - super().__init__(msg) - - -class DependentLinkNotCachedError(CacheInconsistencyError): - def __init__(self, dependent_key: str): - self.dependent_key = dependent_key - msg = f"in add_link, dependent link key {self.dependent_key} not in cache!" - super().__init__(msg) - - -class TruncatedModelNameCausedCollisionError(CacheInconsistencyError): - def __init__(self, new_key, relations: Dict): - self.new_key = new_key - self.relations = relations - super().__init__(self.get_message()) - - def get_message(self) -> str: - # Tell user when collision caused by model names truncated during - # materialization. - match = re.search("__dbt_backup|__dbt_tmp$", self.new_key.identifier) - if match: - truncated_model_name_prefix = self.new_key.identifier[: match.start()] - message_addendum = ( - "\n\nName collisions can occur when the length of two " - "models' names approach your database's builtin limit. " - "Try restructuring your project such that no two models " - f"share the prefix '{truncated_model_name_prefix}'. " - "Then, clean your warehouse of any removed models." - ) - else: - message_addendum = "" - - msg = f"in rename, new key {self.new_key} already in cache: {list(self.relations.keys())}{message_addendum}" - - return msg - - -class NoneRelationFoundError(CacheInconsistencyError): - def __init__(self): - msg = "in get_relations, a None relation was found in the cache!" - super().__init__(msg) - - -# this is part of the context and also raised in dbt.contracts.relation.py -class DataclassNotDictError(CompilationError): - def __init__(self, obj: Any): - self.obj = obj - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = ( - f'The object ("{self.obj}") was used as a dictionary. This ' - "capability has been removed from objects of this type." - ) - - return msg - - class DependencyNotFoundError(CompilationError): def __init__(self, node, node_description, required_pkg): self.node = node @@ -2190,7 +1266,7 @@ def get_message(self) -> str: action = "looking for" # duplicate 'ref' targets - if node_type in NodeType.refable(): + if node_type in REFABLE_NODE_TYPES: formatted_name = f'ref("{duped_name}")' # duplicate sources elif node_type == NodeType.Source: @@ -2260,33 +1336,15 @@ def get_message(self) -> str: return msg -class RelationWrongTypeError(CompilationError): - def __init__(self, relation, expected_type, model=None): - self.relation = relation - self.expected_type = expected_type - self.model = model - super().__init__(msg=self.get_message()) - - def get_message(self) -> str: - msg = ( - f"Trying to create {self.expected_type} {self.relation}, " - f"but it currently exists as a {self.relation.type}. Either " - f"drop {self.relation} manually, or run dbt with " - "`--full-refresh` and dbt will drop it for you." - ) - - return msg - - class ContractError(CompilationError): def __init__(self, yaml_columns, sql_columns): self.yaml_columns = yaml_columns self.sql_columns = sql_columns super().__init__(msg=self.get_message()) - def get_mismatches(self) -> agate.Table: + def get_mismatches(self) -> "agate.Table": # avoid a circular import - from dbt.clients.agate_helper import table_from_data_flat + from dbt_common.clients.agate_helper import table_from_data_flat column_names = ["column_name", "definition_type", "contract_type", "mismatch_reason"] # list of mismatches @@ -2335,7 +1393,7 @@ def get_message(self) -> str: "This model has an enforced contract, and its 'columns' specification is missing" ) - table: agate.Table = self.get_mismatches() + table: "agate.Table" = self.get_mismatches() # Hack to get Agate table output as string output = io.StringIO() table.print_table(output=output, max_rows=None, max_column_width=50) # type: ignore diff --git a/core/dbt/flags.py b/core/dbt/flags.py index 891d510f2e1..97ee9598f2f 100644 --- a/core/dbt/flags.py +++ b/core/dbt/flags.py @@ -1,30 +1,8 @@ # Do not import the os package because we expose this package in jinja -from os import getenv as os_getenv from argparse import Namespace -from multiprocessing import get_context -from typing import Optional from pathlib import Path - -# for setting up logger for legacy logger -def env_set_truthy(key: str) -> Optional[str]: - """Return the value if it was set to a "truthy" string value or None - otherwise. - """ - value = os_getenv(key) - if not value or value.lower() in ("0", "false", "f"): - return None - return value - - -# for setting up logger for legacy logger -ENABLE_LEGACY_LOGGER = env_set_truthy("DBT_ENABLE_LEGACY_LOGGER") - -# This is not a flag, it's a place to store the lock -MP_CONTEXT = get_context() - - -# this roughly follows the patten of EVENT_MANAGER in dbt/events/functions.py +# this roughly follows the patten of EVENT_MANAGER in dbt/common/events/functions.py # During de-globlization, we'll need to handle both similarly # Match USE_COLORS default with default in dbt.cli.params.use_colors for use in --version GLOBAL_FLAGS = Namespace(USE_COLORS=True) # type: ignore @@ -39,27 +17,29 @@ def get_flags(): return GLOBAL_FLAGS -def set_from_args(args: Namespace, user_config): +def set_from_args(args: Namespace, project_flags): global GLOBAL_FLAGS - from dbt.cli.main import cli from dbt.cli.flags import Flags, convert_config + from dbt.cli.main import cli - # we set attributes of args after initialize the flags, but user_config + # we set attributes of args after initialize the flags, but project_flags # is being read in the Flags constructor, so we need to read it here and pass in - # to make sure we use the correct user_config - if (hasattr(args, "PROFILES_DIR") or hasattr(args, "profiles_dir")) and not user_config: - from dbt.config.profile import read_user_config + # to make sure we use the correct project_flags + profiles_dir = getattr(args, "PROFILES_DIR", None) or getattr(args, "profiles_dir", None) + project_dir = getattr(args, "PROJECT_DIR", None) or getattr(args, "project_dir", None) + if profiles_dir and project_dir: + from dbt.config.project import read_project_flags - profiles_dir = getattr(args, "PROFILES_DIR", None) or getattr(args, "profiles_dir") - user_config = read_user_config(profiles_dir) + project_flags = read_project_flags(project_dir, profiles_dir) # make a dummy context to get the flags, totally arbitrary ctx = cli.make_context("run", ["run"]) - flags = Flags(ctx, user_config) + flags = Flags(ctx, project_flags) for arg_name, args_param_value in vars(args).items(): args_param_value = convert_config(arg_name, args_param_value) object.__setattr__(flags, arg_name.upper(), args_param_value) object.__setattr__(flags, arg_name.lower(), args_param_value) + flags.set_common_global_flags() GLOBAL_FLAGS = flags # type: ignore @@ -88,6 +68,7 @@ def get_flag_dict(): "target_path", "log_path", "invocation_command", + "empty", } return {key: getattr(GLOBAL_FLAGS, key.upper(), None) for key in flag_attr} diff --git a/core/dbt/graph/README.md b/core/dbt/graph/README.md index 61bfd614a18..1daa9d9fce8 100644 --- a/core/dbt/graph/README.md +++ b/core/dbt/graph/README.md @@ -1 +1,9 @@ # Graph README + +## Graph Selector Creation + +### Selector Loading +During dbt execution, the `@requires.project` decorator creates the final selector objects used in the graph. The `SelectorConfig` class loads selectors from the project configuration, while the `selector_config_from_data` function parses these selectors. + +#### Indirect Selection Default Value +In `@requires.preflight`, dbt reads CLI flags, environment variables, and the parameter's default value. It resolves these inputs based on their precedence order and stores the resolved value in global flags. When loading selectors, the [`selection_criteria_from_dict`](https://github.com/dbt-labs/dbt-core/blob/b316c5f18021fef3d7fd6ec255427054b7d2205e/core/dbt/graph/selector_spec.py#L111) function resolves the indirect selection value to the global flags value if not set. This ensures correct resolution of the indirect selection value. diff --git a/core/dbt/graph/__init__.py b/core/dbt/graph/__init__.py index 67d979cc0fb..f89b1edfc69 100644 --- a/core/dbt/graph/__init__.py +++ b/core/dbt/graph/__init__.py @@ -1,17 +1,11 @@ +from .cli import parse_difference, parse_from_selectors_definition # noqa: F401 +from .graph import Graph, UniqueId # noqa: F401 +from .queue import GraphQueue # noqa: F401 +from .selector import NodeSelector, ResourceTypeSelector # noqa: F401 from .selector_spec import ( # noqa: F401 - SelectionUnion, - SelectionSpec, - SelectionIntersection, - SelectionDifference, SelectionCriteria, + SelectionDifference, + SelectionIntersection, + SelectionSpec, + SelectionUnion, ) -from .selector import ( # noqa: F401 - ResourceTypeSelector, - NodeSelector, -) -from .cli import ( # noqa: F401 - parse_difference, - parse_from_selectors_definition, -) -from .queue import GraphQueue # noqa: F401 -from .graph import Graph, UniqueId # noqa: F401 diff --git a/core/dbt/graph/cli.py b/core/dbt/graph/cli.py index 2950e88415e..412ad54caae 100644 --- a/core/dbt/graph/cli.py +++ b/core/dbt/graph/cli.py @@ -1,34 +1,32 @@ # special support for CLI argument parsing. # TODO: Remove as part of https://github.com/dbt-labs/dbt-core/issues/6701 -from dbt.flags import get_flags -from copy import deepcopy import itertools -from dbt.clients.yaml_helper import yaml, Loader, Dumper # noqa: F401 - -from typing import Dict, List, Optional, Tuple, Any, Union +from copy import deepcopy +from typing import Any, Dict, List, Optional, Tuple, Union +from dbt.clients.yaml_helper import Dumper, Loader, yaml # noqa: F401 from dbt.contracts.selection import SelectorDefinition, SelectorFile -from dbt.exceptions import DbtInternalError, DbtValidationError +from dbt.flags import get_flags +from dbt_common.exceptions import DbtInternalError, DbtValidationError from .selector_spec import ( - SelectionUnion, - SelectionSpec, - SelectionIntersection, - SelectionDifference, - SelectionCriteria, IndirectSelection, + SelectionCriteria, + SelectionDifference, + SelectionIntersection, + SelectionSpec, + SelectionUnion, ) INTERSECTION_DELIMITER = "," -DEFAULT_INCLUDES: List[str] = ["fqn:*", "source:*", "exposure:*", "metric:*"] +DEFAULT_INCLUDES: List[str] = ["fqn:*", "source:*", "exposure:*", "metric:*", "semantic_model:*"] DEFAULT_EXCLUDES: List[str] = [] def parse_union( components: List[str], expect_exists: bool, - indirect_selection: IndirectSelection = IndirectSelection.Eager, ) -> SelectionUnion: # turn ['a b', 'c'] -> ['a', 'b', 'c'] raw_specs = itertools.chain.from_iterable(r.split(" ") for r in components) @@ -37,7 +35,7 @@ def parse_union( # ['a', 'b', 'c,d'] -> union('a', 'b', intersection('c', 'd')) for raw_spec in raw_specs: intersection_components: List[SelectionSpec] = [ - SelectionCriteria.from_single_spec(part, indirect_selection=indirect_selection) + SelectionCriteria.from_single_spec(part) for part in raw_spec.split(INTERSECTION_DELIMITER) ] union_components.append( @@ -56,41 +54,25 @@ def parse_union( ) -def parse_union_from_default( - raw: Optional[List[str]], - default: List[str], - indirect_selection: IndirectSelection = IndirectSelection.Eager, -) -> SelectionUnion: +def parse_union_from_default(raw: Optional[List[str]], default: List[str]) -> SelectionUnion: components: List[str] expect_exists: bool if raw is None: - return parse_union( - components=default, expect_exists=False, indirect_selection=indirect_selection - ) + return parse_union(components=default, expect_exists=False) else: - return parse_union( - components=raw, expect_exists=True, indirect_selection=indirect_selection - ) + return parse_union(components=raw, expect_exists=True) def parse_difference( - include: Optional[List[str]], exclude: Optional[List[str]], indirect_selection: Any + include: Optional[List[str]], exclude: Optional[List[str]] ) -> SelectionDifference: if include == (): include = None - included = parse_union_from_default( - include, DEFAULT_INCLUDES, indirect_selection=IndirectSelection(indirect_selection) - ) - flags = get_flags() - excluded = parse_union_from_default( - exclude, DEFAULT_EXCLUDES, indirect_selection=IndirectSelection(flags.INDIRECT_SELECTION) - ) - return SelectionDifference( - components=[included, excluded], - indirect_selection=IndirectSelection(flags.INDIRECT_SELECTION), - ) + included = parse_union_from_default(include, DEFAULT_INCLUDES) + excluded = parse_union_from_default(exclude, DEFAULT_EXCLUDES) + return SelectionDifference(components=[included, excluded]) RawDefinition = Union[str, Dict[str, Any]] diff --git a/core/dbt/graph/graph.py b/core/dbt/graph/graph.py index 69a2f21258a..cf569f3547d 100644 --- a/core/dbt/graph/graph.py +++ b/core/dbt/graph/graph.py @@ -1,8 +1,10 @@ -from typing import Set, Iterable, Iterator, Optional, NewType +from functools import partial from itertools import product +from typing import Iterable, Iterator, NewType, Optional, Set + import networkx as nx # type: ignore -from dbt.exceptions import DbtInternalError +from dbt_common.exceptions import DbtInternalError UniqueId = NewType("UniqueId", str) @@ -12,8 +14,8 @@ class Graph: and how they interact with the graph. """ - def __init__(self, graph): - self.graph = graph + def __init__(self, graph) -> None: + self.graph: nx.DiGraph = graph def nodes(self) -> Set[UniqueId]: return set(self.graph.nodes()) @@ -42,16 +44,14 @@ def descendants(self, node: UniqueId, max_depth: Optional[int] = None) -> Set[Un return {child for _, child in nx.bfs_edges(filtered_graph, node, depth_limit=max_depth)} def exclude_edge_type(self, edge_type_to_exclude): - return nx.restricted_view( + return nx.subgraph_view( self.graph, - nodes=[], - edges=( - (a, b) - for a, b in self.graph.edges - if self.graph[a][b].get("edge_type") == edge_type_to_exclude - ), + filter_edge=partial(self.filter_edges_by_type, edge_type=edge_type_to_exclude), ) + def filter_edges_by_type(self, first_node, second_node, edge_type): + return self.graph.get_edge_data(first_node, second_node).get("edge_type") != edge_type + def select_childrens_parents(self, selected: Set[UniqueId]) -> Set[UniqueId]: ancestors_for = self.select_children(selected) | selected return self.select_parents(ancestors_for) | ancestors_for @@ -59,18 +59,40 @@ def select_childrens_parents(self, selected: Set[UniqueId]) -> Set[UniqueId]: def select_children( self, selected: Set[UniqueId], max_depth: Optional[int] = None ) -> Set[UniqueId]: - descendants: Set[UniqueId] = set() - for node in selected: - descendants.update(self.descendants(node, max_depth)) - return descendants + """Returns all nodes which are descendants of the 'selected' set. + Nodes in the 'selected' set are counted as children only if + they are descendants of other nodes in the 'selected' set.""" + children: Set[UniqueId] = set() + i = 0 + while len(selected) > 0 and (max_depth is None or i < max_depth): + next_layer: Set[UniqueId] = set() + for node in selected: + next_layer.update(self.descendants(node, 1)) + next_layer = next_layer - children # Avoid re-searching + children.update(next_layer) + selected = next_layer + i += 1 + + return children def select_parents( self, selected: Set[UniqueId], max_depth: Optional[int] = None ) -> Set[UniqueId]: - ancestors: Set[UniqueId] = set() - for node in selected: - ancestors.update(self.ancestors(node, max_depth)) - return ancestors + """Returns all nodes which are ancestors of the 'selected' set. + Nodes in the 'selected' set are counted as parents only if + they are ancestors of other nodes in the 'selected' set.""" + parents: Set[UniqueId] = set() + i = 0 + while len(selected) > 0 and (max_depth is None or i < max_depth): + next_layer: Set[UniqueId] = set() + for node in selected: + next_layer.update(self.ancestors(node, 1)) + next_layer = next_layer - parents # Avoid re-searching + parents.update(next_layer) + selected = next_layer + i += 1 + + return parents def select_successors(self, selected: Set[UniqueId]) -> Set[UniqueId]: successors: Set[UniqueId] = set() @@ -84,10 +106,10 @@ def get_subset_graph(self, selected: Iterable[UniqueId]) -> "Graph": removed nodes are preserved as explicit new edges. """ - new_graph = self.graph.copy() - include_nodes = set(selected) + new_graph: nx.DiGraph = self.graph.copy() + include_nodes: Set[UniqueId] = set(selected) - still_removing = True + still_removing: bool = True while still_removing: nodes_to_remove = list( node @@ -130,6 +152,8 @@ def get_subset_graph(self, selected: Iterable[UniqueId]) -> "Graph": return Graph(new_graph) def subgraph(self, nodes: Iterable[UniqueId]) -> "Graph": + # Take the original networkx graph and return a subgraph containing only + # the selected unique_id nodes. return Graph(self.graph.subgraph(nodes)) def get_dependent_nodes(self, node: UniqueId): diff --git a/core/dbt/graph/queue.py b/core/dbt/graph/queue.py index a21a9afc630..18ea15ac773 100644 --- a/core/dbt/graph/queue.py +++ b/core/dbt/graph/queue.py @@ -1,19 +1,20 @@ -import networkx as nx # type: ignore import threading - from queue import PriorityQueue -from typing import Dict, Set, List, Generator, Optional +from typing import Dict, Generator, List, Optional, Set -from .graph import UniqueId +import networkx as nx # type: ignore + +from dbt.contracts.graph.manifest import Manifest from dbt.contracts.graph.nodes import ( - SourceDefinition, Exposure, - Metric, GraphMemberNode, + Metric, + SourceDefinition, ) -from dbt.contracts.graph.manifest import Manifest from dbt.node_types import NodeType +from .graph import UniqueId + class GraphQueue: """A fancy queue that is backed by the dependency graph. @@ -24,8 +25,15 @@ class GraphQueue: the same time, as there is an unlocked race! """ - def __init__(self, graph: nx.DiGraph, manifest: Manifest, selected: Set[UniqueId]): - self.graph = graph + def __init__( + self, + graph: nx.DiGraph, + manifest: Manifest, + selected: Set[UniqueId], + preserve_edges: bool = True, + ) -> None: + # 'create_empty_copy' returns a copy of the graph G with all of the edges removed, and leaves nodes intact. + self.graph = graph if preserve_edges else nx.classes.function.create_empty_copy(graph) self.manifest = manifest self._selected = selected # store the queue as a priority queue. diff --git a/core/dbt/graph/selector.py b/core/dbt/graph/selector.py index edb865ce7fd..cc0b4ebe9fc 100644 --- a/core/dbt/graph/selector.py +++ b/core/dbt/graph/selector.py @@ -1,22 +1,18 @@ -from typing import Set, List, Optional, Tuple +from typing import List, Optional, Set, Tuple -from .graph import Graph, UniqueId -from .queue import GraphQueue -from .selector_methods import MethodManager -from .selector_spec import SelectionCriteria, SelectionSpec, IndirectSelection - -from dbt.events.functions import fire_event, warn_or_error -from dbt.events.types import SelectorReportInvalidSelector, NoNodesForSelectionCriteria -from dbt.node_types import NodeType -from dbt.exceptions import ( - DbtInternalError, - InvalidSelectorError, -) -from dbt.contracts.graph.nodes import GraphMemberNode +from dbt import selected_resources from dbt.contracts.graph.manifest import Manifest +from dbt.contracts.graph.nodes import GraphMemberNode from dbt.contracts.state import PreviousState +from dbt.events.types import NoNodesForSelectionCriteria, SelectorReportInvalidSelector +from dbt.exceptions import DbtInternalError, InvalidSelectorError +from dbt.node_types import NodeType +from dbt_common.events.functions import fire_event, warn_or_error -from dbt import selected_resources +from .graph import Graph, UniqueId +from .queue import GraphQueue +from .selector_methods import MethodManager +from .selector_spec import IndirectSelection, SelectionCriteria, SelectionSpec def get_package_names(nodes): @@ -31,6 +27,8 @@ def can_select_indirectly(node): """ if node.resource_type == NodeType.Test: return True + elif node.resource_type == NodeType.Unit: + return True else: return False @@ -44,10 +42,10 @@ def __init__( manifest: Manifest, previous_state: Optional[PreviousState] = None, include_empty_nodes: bool = False, - ): + ) -> None: super().__init__(manifest, previous_state) - self.full_graph = graph - self.include_empty_nodes = include_empty_nodes + self.full_graph: Graph = graph + self.include_empty_nodes: bool = include_empty_nodes # build a subgraph containing only non-empty, enabled nodes and enabled # sources. @@ -89,12 +87,15 @@ def get_nodes_from_criteria( ) return set(), set() + neighbors = self.collect_specified_neighbors(spec, collected) + selected = collected | neighbors + + # if --indirect-selection EMPTY, do not expand to adjacent tests if spec.indirect_selection == IndirectSelection.Empty: - return collected, set() + return selected, set() else: - neighbors = self.collect_specified_neighbors(spec, collected) direct_nodes, indirect_nodes = self.expand_selection( - selected=(collected | neighbors), indirect_selection=spec.indirect_selection + selected=selected, indirect_selection=spec.indirect_selection ) return direct_nodes, indirect_nodes @@ -169,13 +170,24 @@ def _is_graph_member(self, unique_id: UniqueId) -> bool: metric = self.manifest.metrics[unique_id] return metric.config.enabled elif unique_id in self.manifest.semantic_models: + semantic_model = self.manifest.semantic_models[unique_id] + return semantic_model.config.enabled + elif unique_id in self.manifest.unit_tests: return True + elif unique_id in self.manifest.saved_queries: + saved_query = self.manifest.saved_queries[unique_id] + return saved_query.config.enabled + node = self.manifest.nodes[unique_id] - if self.include_empty_nodes: - return node.config.enabled + return node.config.enabled + + def _is_empty_node(self, unique_id: UniqueId) -> bool: + if unique_id in self.manifest.nodes: + node = self.manifest.nodes[unique_id] + return node.empty else: - return not node.empty and node.config.enabled + return False def node_is_match(self, node: GraphMemberNode) -> bool: """Determine if a node is a match for the selector. Non-match nodes @@ -195,6 +207,10 @@ def _is_match(self, unique_id: UniqueId) -> bool: node = self.manifest.metrics[unique_id] elif unique_id in self.manifest.semantic_models: node = self.manifest.semantic_models[unique_id] + elif unique_id in self.manifest.unit_tests: + node = self.manifest.unit_tests[unique_id] + elif unique_id in self.manifest.saved_queries: + node = self.manifest.saved_queries[unique_id] else: raise DbtInternalError(f"Node {unique_id} not found in the manifest!") return self.node_is_match(node) @@ -203,7 +219,12 @@ def filter_selection(self, selected: Set[UniqueId]) -> Set[UniqueId]: """Return the subset of selected nodes that is a match for this selector. """ - return {unique_id for unique_id in selected if self._is_match(unique_id)} + return { + unique_id + for unique_id in selected + if self._is_match(unique_id) + and (self.include_empty_nodes or not self._is_empty_node(unique_id)) + } def expand_selection( self, @@ -240,8 +261,13 @@ def expand_selection( ) for unique_id in self.graph.select_successors(selected): - if unique_id in self.manifest.nodes: - node = self.manifest.nodes[unique_id] + if unique_id in self.manifest.nodes or unique_id in self.manifest.unit_tests: + if unique_id in self.manifest.nodes: + node = self.manifest.nodes[unique_id] + elif unique_id in self.manifest.unit_tests: + node = self.manifest.unit_tests[unique_id] # type: ignore + # Test nodes that are not selected themselves, but whose parents are selected. + # (Does not include unit tests because they can only have one parent.) if can_select_indirectly(node): # should we add it in directly? if indirect_selection == IndirectSelection.Eager or set( @@ -305,15 +331,18 @@ def get_selected(self, spec: SelectionSpec) -> Set[UniqueId]: return filtered_nodes - def get_graph_queue(self, spec: SelectionSpec) -> GraphQueue: + def get_graph_queue(self, spec: SelectionSpec, preserve_edges: bool = True) -> GraphQueue: """Returns a queue over nodes in the graph that tracks progress of - dependecies. + dependencies. """ + # Filtering happens in get_selected selected_nodes = self.get_selected(spec) + # Save to global variable selected_resources.set_selected_resources(selected_nodes) + # Construct a new graph using the selected_nodes new_graph = self.full_graph.get_subset_graph(selected_nodes) # should we give a way here for consumers to mutate the graph? - return GraphQueue(new_graph.graph, self.manifest, selected_nodes) + return GraphQueue(new_graph.graph, self.manifest, selected_nodes, preserve_edges) class ResourceTypeSelector(NodeSelector): @@ -324,7 +353,7 @@ def __init__( previous_state: Optional[PreviousState], resource_types: List[NodeType], include_empty_nodes: bool = False, - ): + ) -> None: super().__init__( graph=graph, manifest=manifest, diff --git a/core/dbt/graph/selector_methods.py b/core/dbt/graph/selector_methods.py index c73be57e0d2..dbeaf7ed4c3 100644 --- a/core/dbt/graph/selector_methods.py +++ b/core/dbt/graph/selector_methods.py @@ -2,32 +2,41 @@ from fnmatch import fnmatch from itertools import chain from pathlib import Path -from typing import Set, List, Dict, Iterator, Tuple, Any, Union, Type, Optional, Callable - -from dbt.dataclass_schema import StrEnum - -from .graph import UniqueId +from typing import ( + Any, + Callable, + Dict, + Iterator, + List, + Optional, + Set, + Tuple, + Type, + Union, +) -from dbt.contracts.graph.manifest import Manifest, WritableManifest +from dbt.contracts.graph.manifest import Manifest from dbt.contracts.graph.nodes import ( - SingularTestNode, Exposure, - Metric, GenericTestNode, - SourceDefinition, - ResultNode, ManifestNode, + Metric, ModelNode, + ResultNode, + SavedQuery, + SemanticModel, + SingularTestNode, + SourceDefinition, + UnitTestDefinition, ) from dbt.contracts.graph.unparsed import UnparsedVersion from dbt.contracts.state import PreviousState -from dbt.exceptions import ( - DbtInternalError, - DbtRuntimeError, -) from dbt.node_types import NodeType -from dbt.events.contextvars import get_project_root +from dbt_common.dataclass_schema import StrEnum +from dbt_common.events.contextvars import get_project_root +from dbt_common.exceptions import DbtInternalError, DbtRuntimeError +from .graph import UniqueId SELECTOR_GLOB = "*" SELECTOR_DELIMITER = ":" @@ -51,8 +60,10 @@ class MethodName(StrEnum): Metric = "metric" Result = "result" SourceStatus = "source_status" - Wildcard = "wildcard" Version = "version" + SemanticModel = "semantic_model" + SavedQuery = "saved_query" + UnitTest = "unit_test" def is_selected_node(fqn: List[str], node_selector: str, is_versioned: bool) -> bool: @@ -97,13 +108,15 @@ def is_selected_node(fqn: List[str], node_selector: str, is_versioned: bool) -> return True -SelectorTarget = Union[SourceDefinition, ManifestNode, Exposure, Metric] +SelectorTarget = Union[ + SourceDefinition, ManifestNode, Exposure, Metric, SemanticModel, UnitTestDefinition, SavedQuery +] class SelectorMethod(metaclass=abc.ABCMeta): def __init__( self, manifest: Manifest, previous_state: Optional[PreviousState], arguments: List[str] - ): + ) -> None: self.manifest: Manifest = manifest self.previous_state = previous_state self.arguments: List[str] = arguments @@ -144,6 +157,41 @@ def metric_nodes(self, included_nodes: Set[UniqueId]) -> Iterator[Tuple[UniqueId continue yield unique_id, metric + def unit_tests( + self, included_nodes: Set[UniqueId] + ) -> Iterator[Tuple[UniqueId, UnitTestDefinition]]: + for unique_id, unit_test in self.manifest.unit_tests.items(): + unique_id = UniqueId(unique_id) + if unique_id not in included_nodes: + continue + yield unique_id, unit_test + + def parsed_and_unit_nodes(self, included_nodes: Set[UniqueId]): + yield from chain( + self.parsed_nodes(included_nodes), + self.unit_tests(included_nodes), + ) + + def semantic_model_nodes( + self, included_nodes: Set[UniqueId] + ) -> Iterator[Tuple[UniqueId, SemanticModel]]: + + for key, semantic_model in self.manifest.semantic_models.items(): + unique_id = UniqueId(key) + if unique_id not in included_nodes: + continue + yield unique_id, semantic_model + + def saved_query_nodes( + self, included_nodes: Set[UniqueId] + ) -> Iterator[Tuple[UniqueId, SavedQuery]]: + + for key, saved_query in self.manifest.saved_queries.items(): + unique_id = UniqueId(key) + if unique_id not in included_nodes: + continue + yield unique_id, saved_query + def all_nodes( self, included_nodes: Set[UniqueId] ) -> Iterator[Tuple[UniqueId, SelectorTarget]]: @@ -152,6 +200,9 @@ def all_nodes( self.source_nodes(included_nodes), self.exposure_nodes(included_nodes), self.metric_nodes(included_nodes), + self.unit_tests(included_nodes), + self.semantic_model_nodes(included_nodes), + self.saved_query_nodes(included_nodes), ) def configurable_nodes( @@ -167,6 +218,9 @@ def non_source_nodes( self.parsed_nodes(included_nodes), self.exposure_nodes(included_nodes), self.metric_nodes(included_nodes), + self.unit_tests(included_nodes), + self.semantic_model_nodes(included_nodes), + self.saved_query_nodes(included_nodes), ) def groupable_nodes( @@ -210,36 +264,37 @@ def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[Uniqu :param str selector: The selector or node name """ - parsed_nodes = list(self.parsed_nodes(included_nodes)) - for node, real_node in parsed_nodes: - if self.node_is_match(selector, real_node.fqn, real_node.is_versioned): - yield node + non_source_nodes = list(self.non_source_nodes(included_nodes)) + for unique_id, node in non_source_nodes: + if self.node_is_match(selector, node.fqn, node.is_versioned): + yield unique_id class TagSelectorMethod(SelectorMethod): def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]: """yields nodes from included that have the specified tag""" - for node, real_node in self.all_nodes(included_nodes): - if any(fnmatch(tag, selector) for tag in real_node.tags): - yield node + for unique_id, node in self.all_nodes(included_nodes): + if hasattr(node, "tags") and any(fnmatch(tag, selector) for tag in node.tags): + yield unique_id class GroupSelectorMethod(SelectorMethod): def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]: """yields nodes from included in the specified group""" - for node, real_node in self.groupable_nodes(included_nodes): - if selector == real_node.config.get("group"): - yield node + for unique_id, node in self.groupable_nodes(included_nodes): + node_group = node.config.get("group") + if node_group and fnmatch(node_group, selector): + yield unique_id class AccessSelectorMethod(SelectorMethod): def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]: """yields model nodes matching the specified access level""" - for node, real_node in self.parsed_nodes(included_nodes): - if not isinstance(real_node, ModelNode): + for unique_id, node in self.parsed_nodes(included_nodes): + if not isinstance(node, ModelNode): continue - if selector == real_node.access: - yield node + if selector == node.access: + yield unique_id class SourceSelectorMethod(SelectorMethod): @@ -262,14 +317,14 @@ def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[Uniqu ).format(selector) raise DbtRuntimeError(msg) - for node, real_node in self.source_nodes(included_nodes): - if not fnmatch(real_node.package_name, target_package): + for unique_id, node in self.source_nodes(included_nodes): + if not fnmatch(node.package_name, target_package): continue - if not fnmatch(real_node.source_name, target_source): + if not fnmatch(node.source_name, target_source): continue - if not fnmatch(real_node.name, target_table): + if not fnmatch(node.name, target_table): continue - yield node + yield unique_id class ExposureSelectorMethod(SelectorMethod): @@ -288,13 +343,13 @@ def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[Uniqu ).format(selector) raise DbtRuntimeError(msg) - for node, real_node in self.exposure_nodes(included_nodes): - if not fnmatch(real_node.package_name, target_package): + for unique_id, node in self.exposure_nodes(included_nodes): + if not fnmatch(node.package_name, target_package): continue - if not fnmatch(real_node.name, target_name): + if not fnmatch(node.name, target_name): continue - yield node + yield unique_id class MetricSelectorMethod(SelectorMethod): @@ -313,13 +368,88 @@ def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[Uniqu ).format(selector) raise DbtRuntimeError(msg) - for node, real_node in self.metric_nodes(included_nodes): - if not fnmatch(real_node.package_name, target_package): + for unique_id, node in self.metric_nodes(included_nodes): + if not fnmatch(node.package_name, target_package): + continue + if not fnmatch(node.name, target_name): + continue + + yield unique_id + + +class SemanticModelSelectorMethod(SelectorMethod): + def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]: + parts = selector.split(".") + target_package = SELECTOR_GLOB + if len(parts) == 1: + target_name = parts[0] + elif len(parts) == 2: + target_package, target_name = parts + else: + msg = ( + 'Invalid semantic model selector value "{}". Semantic models must be of ' + "the form ${{semantic_model_name}} or " + "${{semantic_model_package.semantic_model_name}}" + ).format(selector) + raise DbtRuntimeError(msg) + + for unique_id, node in self.semantic_model_nodes(included_nodes): + if not fnmatch(node.package_name, target_package): + continue + if not fnmatch(node.name, target_name): + continue + + yield unique_id + + +class SavedQuerySelectorMethod(SelectorMethod): + def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]: + parts = selector.split(".") + target_package = SELECTOR_GLOB + if len(parts) == 1: + target_name = parts[0] + elif len(parts) == 2: + target_package, target_name = parts + else: + msg = ( + 'Invalid saved query selector value "{}". Saved queries must be of ' + "the form ${{saved_query_name}} or " + "${{saved_query_package.saved_query_name}}" + ).format(selector) + raise DbtRuntimeError(msg) + + for unique_id, node in self.saved_query_nodes(included_nodes): + if not fnmatch(node.package_name, target_package): continue - if not fnmatch(real_node.name, target_name): + if not fnmatch(node.name, target_name): continue - yield node + yield unique_id + + +class UnitTestSelectorMethod(SelectorMethod): + def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]: + parts = selector.split(".") + target_package = SELECTOR_GLOB + if len(parts) == 1: + target_name = parts[0] + elif len(parts) == 2: + target_package, target_name = parts + else: + msg = ( + 'Invalid unit test selector value "{}". Saved queries must be of ' + "the form ${{unit_test_name}} or " + "${{unit_test_package_name.unit_test_name}}" + ).format(selector) + raise DbtRuntimeError(msg) + + for unique_id, node in self.unit_tests(included_nodes): + if not fnmatch(node.package_name, target_package): + continue + if not fnmatch(node.name, target_name): + continue + + yield unique_id class PathSelectorMethod(SelectorMethod): @@ -332,35 +462,39 @@ def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[Uniqu else: root = Path.cwd() paths = set(p.relative_to(root) for p in root.glob(selector)) - for node, real_node in self.all_nodes(included_nodes): - ofp = Path(real_node.original_file_path) + for unique_id, node in self.all_nodes(included_nodes): + ofp = Path(node.original_file_path) if ofp in paths: - yield node - if hasattr(real_node, "patch_path") and real_node.patch_path: # type: ignore - pfp = real_node.patch_path.split("://")[1] # type: ignore + yield unique_id + if hasattr(node, "patch_path") and node.patch_path: # type: ignore + pfp = node.patch_path.split("://")[1] # type: ignore ymlfp = Path(pfp) if ymlfp in paths: - yield node + yield unique_id if any(parent in paths for parent in ofp.parents): - yield node + yield unique_id class FileSelectorMethod(SelectorMethod): def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]: """Yields nodes from included that match the given file name.""" - for node, real_node in self.all_nodes(included_nodes): - if fnmatch(Path(real_node.original_file_path).name, selector): - yield node - elif fnmatch(Path(real_node.original_file_path).stem, selector): - yield node + for unique_id, node in self.all_nodes(included_nodes): + if fnmatch(Path(node.original_file_path).name, selector): + yield unique_id + elif fnmatch(Path(node.original_file_path).stem, selector): + yield unique_id class PackageSelectorMethod(SelectorMethod): def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]: """Yields nodes from included that have the specified package""" - for node, real_node in self.all_nodes(included_nodes): - if fnmatch(real_node.package_name, selector): - yield node + # `this` is an alias for the current dbt project name + if selector == "this" and self.manifest.metadata.project_name is not None: + selector = self.manifest.metadata.project_name + + for unique_id, node in self.all_nodes(included_nodes): + if fnmatch(node.package_name, selector): + yield unique_id def _getattr_descend(obj: Any, attrs: List[str]) -> Any: @@ -402,9 +536,9 @@ def search( # search sources is kind of useless now source configs only have # 'enabled', which you can't really filter on anyway, but maybe we'll # add more someday, so search them anyway. - for node, real_node in self.configurable_nodes(included_nodes): + for unique_id, node in self.configurable_nodes(included_nodes): try: - value = _getattr_descend(real_node.config, parts) + value = _getattr_descend(node.config, parts) except AttributeError: continue else: @@ -414,7 +548,7 @@ def search( or (CaseInsensitive(selector) == "true" and True in value) or (CaseInsensitive(selector) == "false" and False in value) ): - yield node + yield unique_id else: if ( (selector == value) @@ -422,7 +556,7 @@ def search( or (CaseInsensitive(selector) == "false") and value is False ): - yield node + yield unique_id class ResourceTypeSelectorMethod(SelectorMethod): @@ -431,43 +565,50 @@ def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[Uniqu resource_type = NodeType(selector) except ValueError as exc: raise DbtRuntimeError(f'Invalid resource_type selector "{selector}"') from exc - for node, real_node in self.parsed_nodes(included_nodes): - if real_node.resource_type == resource_type: - yield node + for unique_id, node in self.all_nodes(included_nodes): + if node.resource_type == resource_type: + yield unique_id class TestNameSelectorMethod(SelectorMethod): __test__ = False def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]: - for node, real_node in self.parsed_nodes(included_nodes): - if real_node.resource_type == NodeType.Test and hasattr(real_node, "test_metadata"): - if fnmatch(real_node.test_metadata.name, selector): # type: ignore[union-attr] - yield node + for unique_id, node in self.parsed_and_unit_nodes(included_nodes): + if node.resource_type == NodeType.Test and hasattr(node, "test_metadata"): + if fnmatch(node.test_metadata.name, selector): # type: ignore[union-attr] + yield unique_id + elif node.resource_type == NodeType.Unit: + if fnmatch(node.name, selector): + yield unique_id class TestTypeSelectorMethod(SelectorMethod): __test__ = False def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]: - search_type: Type + search_types: List[Any] # continue supporting 'schema' + 'data' for backwards compatibility if selector in ("generic", "schema"): - search_type = GenericTestNode - elif selector in ("singular", "data"): - search_type = SingularTestNode + search_types = [GenericTestNode] + elif selector in ("data"): + search_types = [GenericTestNode, SingularTestNode] + elif selector in ("singular"): + search_types = [SingularTestNode] + elif selector in ("unit"): + search_types = [UnitTestDefinition] else: raise DbtRuntimeError( - f'Invalid test type selector {selector}: expected "generic" or ' '"singular"' + f'Invalid test type selector {selector}: expected "generic", "singular", "unit", or "data"' ) - for node, real_node in self.parsed_nodes(included_nodes): - if isinstance(real_node, search_type): - yield node + for unique_id, node in self.parsed_and_unit_nodes(included_nodes): + if isinstance(node, tuple(search_types)): + yield unique_id class StateSelectorMethod(SelectorMethod): - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: super().__init__(*args, **kwargs) self.modified_macros: Optional[List[str]] = None @@ -487,7 +628,7 @@ def _macros_modified(self) -> List[str]: else: modified.append(uid) - for uid, macro in old_macros.items(): + for uid, _ in old_macros.items(): if uid not in new_macros: modified.append(uid) @@ -539,7 +680,10 @@ def check_macros_modified(self, node): def check_modified_content( self, old: Optional[SelectorTarget], new: SelectorTarget, adapter_type: str ) -> bool: - if isinstance(new, (SourceDefinition, Exposure, Metric)): + if isinstance( + new, + (SourceDefinition, Exposure, Metric, SemanticModel, UnitTestDefinition, SavedQuery), + ): # these all overwrite `same_contents` different_contents = not new.same_contents(old) # type: ignore else: @@ -577,7 +721,9 @@ def check_modified_contract( ) -> Callable[[Optional[SelectorTarget], SelectorTarget], bool]: # get a function that compares two selector target based on compare method provided def check_modified_contract(old: Optional[SelectorTarget], new: SelectorTarget) -> bool: - if hasattr(new, compare_method): + if new is None and hasattr(old, compare_method + "_removed"): + return getattr(old, compare_method + "_removed")() + elif hasattr(new, compare_method): # when old body does not exist or old and new are not the same return not old or not getattr(new, compare_method)(old, adapter_type) # type: ignore else: @@ -585,9 +731,6 @@ def check_modified_contract(old: Optional[SelectorTarget], new: SelectorTarget) return check_modified_contract - def check_new(self, old: Optional[SelectorTarget], new: SelectorTarget) -> bool: - return old is None - def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]: if self.previous_state is None or self.previous_state.manifest is None: raise DbtRuntimeError("Got a state selector method, but no comparison manifest") @@ -617,19 +760,25 @@ def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[Uniqu f'Got an invalid selector "{selector}", expected one of ' f'"{list(state_checks)}"' ) - manifest: WritableManifest = self.previous_state.manifest + manifest: Manifest = self.previous_state.manifest - for node, real_node in self.all_nodes(included_nodes): + for unique_id, node in self.all_nodes(included_nodes): previous_node: Optional[SelectorTarget] = None - if node in manifest.nodes: - previous_node = manifest.nodes[node] - elif node in manifest.sources: - previous_node = manifest.sources[node] - elif node in manifest.exposures: - previous_node = manifest.exposures[node] - elif node in manifest.metrics: - previous_node = manifest.metrics[node] + if unique_id in manifest.nodes: + previous_node = manifest.nodes[unique_id] + elif unique_id in manifest.sources: + previous_node = SourceDefinition.from_resource(manifest.sources[unique_id]) + elif unique_id in manifest.exposures: + previous_node = Exposure.from_resource(manifest.exposures[unique_id]) + elif unique_id in manifest.metrics: + previous_node = Metric.from_resource(manifest.metrics[unique_id]) + elif unique_id in manifest.semantic_models: + previous_node = SemanticModel.from_resource(manifest.semantic_models[unique_id]) + elif unique_id in manifest.unit_tests: + previous_node = UnitTestDefinition.from_resource(manifest.unit_tests[unique_id]) + elif unique_id in manifest.saved_queries: + previous_node = SavedQuery.from_resource(manifest.saved_queries[unique_id]) keyword_args = {} if checker.__name__ in [ @@ -639,8 +788,24 @@ def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[Uniqu ]: keyword_args["adapter_type"] = adapter_type # type: ignore - if checker(previous_node, real_node, **keyword_args): # type: ignore - yield node + if checker(previous_node, node, **keyword_args): # type: ignore + yield unique_id + + # checkers that can handle removed nodes + if checker.__name__ in ["check_modified_contract"]: + # ignore included_nodes, since those cannot contain removed nodes + for previous_unique_id, previous_node in manifest.nodes.items(): + # detect removed (deleted, renamed, or disabled) nodes + removed_node = None + if previous_unique_id in self.manifest.disabled.keys(): + removed_node = self.manifest.disabled[previous_unique_id][0] + elif previous_unique_id not in self.manifest.nodes.keys(): + removed_node = previous_node + + if removed_node: + # do not yield -- removed nodes should never be selected for downstream execution + # as they are not part of the current project's manifest.nodes + checker(removed_node, None, **keyword_args) # type: ignore class ResultSelectorMethod(SelectorMethod): @@ -650,9 +815,9 @@ def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[Uniqu matches = set( result.unique_id for result in self.previous_state.results if result.status == selector ) - for node, real_node in self.all_nodes(included_nodes): - if node in matches: - yield node + for unique_id, node in self.all_nodes(included_nodes): + if unique_id in matches: + yield unique_id class SourceStatusSelectorMethod(SelectorMethod): @@ -704,37 +869,37 @@ def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[Uniqu ): matches.remove(unique_id) - for node, real_node in self.all_nodes(included_nodes): - if node in matches: - yield node + for unique_id, node in self.all_nodes(included_nodes): + if unique_id in matches: + yield unique_id class VersionSelectorMethod(SelectorMethod): def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[UniqueId]: - for node, real_node in self.parsed_nodes(included_nodes): - if isinstance(real_node, ModelNode): + for unique_id, node in self.parsed_nodes(included_nodes): + if isinstance(node, ModelNode): if selector == "latest": - if real_node.is_latest_version: - yield node + if node.is_latest_version: + yield unique_id elif selector == "prerelease": if ( - real_node.version - and real_node.latest_version - and UnparsedVersion(v=real_node.version) - > UnparsedVersion(v=real_node.latest_version) + node.version + and node.latest_version + and UnparsedVersion(v=node.version) + > UnparsedVersion(v=node.latest_version) ): - yield node + yield unique_id elif selector == "old": if ( - real_node.version - and real_node.latest_version - and UnparsedVersion(v=real_node.version) - < UnparsedVersion(v=real_node.latest_version) + node.version + and node.latest_version + and UnparsedVersion(v=node.version) + < UnparsedVersion(v=node.latest_version) ): - yield node + yield unique_id elif selector == "none": - if real_node.version is None: - yield node + if node.version is None: + yield unique_id else: raise DbtRuntimeError( f'Invalid version type selector {selector}: expected one of: "latest", "prerelease", "old", or "none"' @@ -761,13 +926,16 @@ class MethodManager: MethodName.Result: ResultSelectorMethod, MethodName.SourceStatus: SourceStatusSelectorMethod, MethodName.Version: VersionSelectorMethod, + MethodName.SemanticModel: SemanticModelSelectorMethod, + MethodName.SavedQuery: SavedQuerySelectorMethod, + MethodName.UnitTest: UnitTestSelectorMethod, } def __init__( self, manifest: Manifest, previous_state: Optional[PreviousState], - ): + ) -> None: self.manifest = manifest self.previous_state = previous_state diff --git a/core/dbt/graph/selector_spec.py b/core/dbt/graph/selector_spec.py index cf8481ccf65..e801aef7396 100644 --- a/core/dbt/graph/selector_spec.py +++ b/core/dbt/graph/selector_spec.py @@ -2,13 +2,15 @@ import re from abc import ABCMeta, abstractmethod from dataclasses import dataclass -from dbt.dataclass_schema import StrEnum, dbtClassMixin +from typing import Any, Dict, Iterable, Iterator, List, Optional, Set, Tuple, Union + +from dbt.exceptions import InvalidSelectorError +from dbt.flags import get_flags +from dbt_common.dataclass_schema import StrEnum, dbtClassMixin +from dbt_common.exceptions import DbtRuntimeError -from typing import Set, Iterator, List, Optional, Dict, Union, Any, Iterable, Tuple from .graph import UniqueId from .selector_methods import MethodName -from dbt.exceptions import DbtRuntimeError, InvalidSelectorError - RAW_SELECTOR_PATTERN = re.compile( r"\A" @@ -99,6 +101,7 @@ def parse_method(cls, groupdict: Dict[str, Any]) -> Tuple[MethodName, List[str]] except ValueError as exc: raise InvalidSelectorError(f"'{method_parts[0]}' is not a valid method name") from exc + # Following is for cases like config.severity and config.materialized method_arguments: List[str] = method_parts[1:] return method_name, method_arguments @@ -108,7 +111,6 @@ def selection_criteria_from_dict( cls, raw: Any, dct: Dict[str, Any], - indirect_selection: IndirectSelection = IndirectSelection.Eager, ) -> "SelectionCriteria": if "value" not in dct: raise DbtRuntimeError(f'Invalid node spec "{raw}" - no search value!') @@ -119,7 +121,7 @@ def selection_criteria_from_dict( # If defined field in selector, override CLI flag indirect_selection = IndirectSelection( - dct.get("indirect_selection", None) or indirect_selection + dct.get("indirect_selection", get_flags().INDIRECT_SELECTION) ) return cls( @@ -156,17 +158,13 @@ def dict_from_single_spec(cls, raw: str): return dct @classmethod - def from_single_spec( - cls, raw: str, indirect_selection: IndirectSelection = IndirectSelection.Eager - ) -> "SelectionCriteria": + def from_single_spec(cls, raw: str) -> "SelectionCriteria": result = RAW_SELECTOR_PATTERN.match(raw) if result is None: # bad spec! raise DbtRuntimeError(f'Invalid selector spec "{raw}"') - return cls.selection_criteria_from_dict( - raw, result.groupdict(), indirect_selection=indirect_selection - ) + return cls.selection_criteria_from_dict(raw, result.groupdict()) class BaseSelectionGroup(dbtClassMixin, Iterable[SelectionSpec], metaclass=ABCMeta): @@ -176,7 +174,7 @@ def __init__( indirect_selection: IndirectSelection = IndirectSelection.Eager, expect_exists: bool = False, raw: Any = None, - ): + ) -> None: self.components: List[SelectionSpec] = list(components) self.expect_exists = expect_exists self.raw = raw diff --git a/core/dbt/helper_types.py b/core/dbt/helper_types.py deleted file mode 100644 index 77e25c68ce8..00000000000 --- a/core/dbt/helper_types.py +++ /dev/null @@ -1,184 +0,0 @@ -# never name this package "types", or mypy will crash in ugly ways - -# necessary for annotating constructors -from __future__ import annotations - -from dataclasses import dataclass, field -from datetime import timedelta -from pathlib import Path -from typing import Tuple, AbstractSet, Union -from hologram import FieldEncoder, JsonDict -from mashumaro.types import SerializableType -from typing import Callable, cast, Generic, Optional, TypeVar, List - -from dbt.dataclass_schema import ( - dbtClassMixin, - ValidationError, - StrEnum, -) -import dbt.events.types as dbt_event_types - - -class Port(int, SerializableType): - @classmethod - def _deserialize(cls, value: Union[int, str]) -> "Port": - try: - value = int(value) - except ValueError: - raise ValidationError(f"Cannot encode {value} into port number") - - return Port(value) - - def _serialize(self) -> int: - return self - - -class PortEncoder(FieldEncoder): - @property - def json_schema(self): - return {"type": "integer", "minimum": 0, "maximum": 65535} - - -class TimeDeltaFieldEncoder(FieldEncoder[timedelta]): - """Encodes timedeltas to dictionaries""" - - def to_wire(self, value: timedelta) -> float: - return value.total_seconds() - - def to_python(self, value) -> timedelta: - if isinstance(value, timedelta): - return value - try: - return timedelta(seconds=value) - except TypeError: - raise ValidationError("cannot encode {} into timedelta".format(value)) from None - - @property - def json_schema(self) -> JsonDict: - return {"type": "number"} - - -class PathEncoder(FieldEncoder): - def to_wire(self, value: Path) -> str: - return str(value) - - def to_python(self, value) -> Path: - if isinstance(value, Path): - return value - try: - return Path(value) - except TypeError: - raise ValidationError("cannot encode {} into timedelta".format(value)) from None - - @property - def json_schema(self) -> JsonDict: - return {"type": "string"} - - -class NVEnum(StrEnum): - novalue = "novalue" - - def __eq__(self, other): - return isinstance(other, NVEnum) - - -@dataclass -class NoValue(dbtClassMixin): - """Sometimes, you want a way to say none that isn't None""" - - novalue: NVEnum = field(default_factory=lambda: NVEnum.novalue) - - -@dataclass -class IncludeExclude(dbtClassMixin): - INCLUDE_ALL = ("all", "*") - - include: Union[str, List[str]] - exclude: List[str] = field(default_factory=list) - - def __post_init__(self): - if isinstance(self.include, str) and self.include not in self.INCLUDE_ALL: - raise ValidationError( - f"include must be one of {self.INCLUDE_ALL} or a list of strings" - ) - - if self.exclude and self.include not in self.INCLUDE_ALL: - raise ValidationError( - f"exclude can only be specified if include is one of {self.INCLUDE_ALL}" - ) - - if isinstance(self.include, list): - self._validate_items(self.include) - - if isinstance(self.exclude, list): - self._validate_items(self.exclude) - - def includes(self, item_name: str): - return ( - item_name in self.include or self.include in self.INCLUDE_ALL - ) and item_name not in self.exclude - - def _validate_items(self, items: List[str]): - pass - - -class WarnErrorOptions(IncludeExclude): - def _validate_items(self, items: List[str]): - valid_exception_names = set( - [name for name, cls in dbt_event_types.__dict__.items() if isinstance(cls, type)] - ) - for item in items: - if item not in valid_exception_names: - raise ValidationError(f"{item} is not a valid dbt error name.") - - -dbtClassMixin.register_field_encoders( - { - Port: PortEncoder(), - timedelta: TimeDeltaFieldEncoder(), - Path: PathEncoder(), - } -) - - -FQNPath = Tuple[str, ...] -PathSet = AbstractSet[FQNPath] - -T = TypeVar("T") - - -# A data type for representing lazily evaluated values. -# -# usage: -# x = Lazy.defer(lambda: expensive_fn()) -# y = x.force() -# -# inspired by the purescript data type -# https://pursuit.purescript.org/packages/purescript-lazy/5.0.0/docs/Data.Lazy -@dataclass -class Lazy(Generic[T]): - _f: Callable[[], T] - memo: Optional[T] = None - - # constructor for lazy values - @classmethod - def defer(cls, f: Callable[[], T]) -> Lazy[T]: - return Lazy(f) - - # workaround for open mypy issue: - # https://github.com/python/mypy/issues/6910 - def _typed_eval_f(self) -> T: - return cast(Callable[[], T], getattr(self, "_f"))() - - # evaluates the function if the value has not been memoized already - def force(self) -> T: - if self.memo is None: - self.memo = self._typed_eval_f() - return self.memo - - -# This class is used in to_target_dict, so that accesses to missing keys -# will return an empty string instead of Undefined -class DictDefaultEmptyStr(dict): - def __getitem__(self, key): - return dict.get(self, key, "") diff --git a/core/dbt/hooks.py b/core/dbt/hooks.py index 816d4b3e7de..131b28a2449 100644 --- a/core/dbt/hooks.py +++ b/core/dbt/hooks.py @@ -1,7 +1,7 @@ -from dbt.dataclass_schema import StrEnum import json +from typing import Any, Dict, Union -from typing import Union, Dict, Any +from dbt_common.dataclass_schema import StrEnum class ModelHookType(StrEnum): diff --git a/core/dbt/include/__init__.py b/core/dbt/include/__init__.py new file mode 100644 index 00000000000..b36383a6102 --- /dev/null +++ b/core/dbt/include/__init__.py @@ -0,0 +1,3 @@ +from pkgutil import extend_path + +__path__ = extend_path(__path__, __name__) diff --git a/core/dbt/include/global_project/__init__.py b/core/dbt/include/global_project/__init__.py deleted file mode 100644 index 1ef0113c7d8..00000000000 --- a/core/dbt/include/global_project/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -import os - -PACKAGE_PATH = os.path.dirname(__file__) -PROJECT_NAME = "dbt" - -DOCS_INDEX_FILE_PATH = os.path.normpath(os.path.join(PACKAGE_PATH, "..", "index.html")) diff --git a/core/dbt/include/global_project/dbt_project.yml b/core/dbt/include/global_project/dbt_project.yml deleted file mode 100644 index fe15d183c58..00000000000 --- a/core/dbt/include/global_project/dbt_project.yml +++ /dev/null @@ -1,7 +0,0 @@ -config-version: 2 -name: dbt -version: 1.0 - -docs-paths: ["docs"] -macro-paths: ["macros"] -test-paths: ["tests"] diff --git a/core/dbt/include/global_project/docs/overview.md b/core/dbt/include/global_project/docs/overview.md deleted file mode 100644 index 36a4c0aa199..00000000000 --- a/core/dbt/include/global_project/docs/overview.md +++ /dev/null @@ -1,43 +0,0 @@ - -{% docs __overview__ %} - -### Welcome! - -Welcome to the auto-generated documentation for your dbt project! - -### Navigation - -You can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models -in your project. - -#### Project Tab -The `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the -models defined in your dbt project, as well as models imported from dbt packages. - -#### Database Tab -The `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view -shows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown -in this interface, as they do not exist in the database. - -### Graph Exploration -You can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models. - -On model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand` -button at the top-right of this lineage pane, you'll be able to see all of the models that are used to build, -or are built from, the model you're exploring. - -Once expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the -models in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax). - -Note that you can also right-click on models to interactively filter and explore the graph. - ---- - -### More information - -- [What is dbt](https://docs.getdbt.com/docs/introduction)? -- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint) -- [Installation](https://docs.getdbt.com/docs/installation) -- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion - -{% enddocs %} diff --git a/core/dbt/include/global_project/macros/adapters/apply_grants.sql b/core/dbt/include/global_project/macros/adapters/apply_grants.sql deleted file mode 100644 index 10906e7ffa7..00000000000 --- a/core/dbt/include/global_project/macros/adapters/apply_grants.sql +++ /dev/null @@ -1,167 +0,0 @@ -{# ------- BOOLEAN MACROS --------- #} - -{# - -- COPY GRANTS - -- When a relational object (view or table) is replaced in this database, - -- do previous grants carry over to the new object? This may depend on: - -- whether we use alter-rename-swap versus CREATE OR REPLACE - -- user-supplied configuration (e.g. copy_grants on Snowflake) - -- By default, play it safe, assume TRUE: that grants ARE copied over. - -- This means dbt will first "show" current grants and then calculate diffs. - -- It may require an additional query than is strictly necessary, - -- but better safe than sorry. -#} - -{% macro copy_grants() %} - {{ return(adapter.dispatch('copy_grants', 'dbt')()) }} -{% endmacro %} - -{% macro default__copy_grants() %} - {{ return(True) }} -{% endmacro %} - - -{# - -- SUPPORT MULTIPLE GRANTEES PER DCL STATEMENT - -- Does this database support 'grant {privilege} to {grantee_1}, {grantee_2}, ...' - -- Or must these be separate statements: - -- `grant {privilege} to {grantee_1}`; - -- `grant {privilege} to {grantee_2}`; - -- By default, pick the former, because it's what we prefer when available. -#} - -{% macro support_multiple_grantees_per_dcl_statement() %} - {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }} -{% endmacro %} - -{%- macro default__support_multiple_grantees_per_dcl_statement() -%} - {{ return(True) }} -{%- endmacro -%} - - -{% macro should_revoke(existing_relation, full_refresh_mode=True) %} - - {% if not existing_relation %} - {#-- The table doesn't already exist, so no grants to copy over --#} - {{ return(False) }} - {% elif full_refresh_mode %} - {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#} - {{ return(copy_grants()) }} - {% else %} - {#-- The table is being merged/upserted/inserted -- grants will be carried over --#} - {{ return(True) }} - {% endif %} - -{% endmacro %} - -{# ------- DCL STATEMENT TEMPLATES --------- #} - -{% macro get_show_grant_sql(relation) %} - {{ return(adapter.dispatch("get_show_grant_sql", "dbt")(relation)) }} -{% endmacro %} - -{% macro default__get_show_grant_sql(relation) %} - show grants on {{ relation }} -{% endmacro %} - - -{% macro get_grant_sql(relation, privilege, grantees) %} - {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }} -{% endmacro %} - -{%- macro default__get_grant_sql(relation, privilege, grantees) -%} - grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }} -{%- endmacro -%} - - -{% macro get_revoke_sql(relation, privilege, grantees) %} - {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }} -{% endmacro %} - -{%- macro default__get_revoke_sql(relation, privilege, grantees) -%} - revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }} -{%- endmacro -%} - - -{# ------- RUNTIME APPLICATION --------- #} - -{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %} - {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }} -{% endmacro %} - -{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%} - {# - -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked. - -- Depending on whether this database supports multiple grantees per statement, pass in the list of - -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair. - -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql` - #} - {%- set dcl_statements = [] -%} - {%- for privilege, grantees in grant_config.items() %} - {%- if support_multiple_grantees_per_dcl_statement() and grantees -%} - {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%} - {%- do dcl_statements.append(dcl) -%} - {%- else -%} - {%- for grantee in grantees -%} - {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %} - {%- do dcl_statements.append(dcl) -%} - {% endfor -%} - {%- endif -%} - {%- endfor -%} - {{ return(dcl_statements) }} -{%- endmacro %} - - -{% macro call_dcl_statements(dcl_statement_list) %} - {{ return(adapter.dispatch("call_dcl_statements", "dbt")(dcl_statement_list)) }} -{% endmacro %} - -{% macro default__call_dcl_statements(dcl_statement_list) %} - {# - -- By default, supply all grant + revoke statements in a single semicolon-separated block, - -- so that they're all processed together. - - -- Some databases do not support this. Those adapters will need to override this macro - -- to run each statement individually. - #} - {% call statement('grants') %} - {% for dcl_statement in dcl_statement_list %} - {{ dcl_statement }}; - {% endfor %} - {% endcall %} -{% endmacro %} - - -{% macro apply_grants(relation, grant_config, should_revoke) %} - {{ return(adapter.dispatch("apply_grants", "dbt")(relation, grant_config, should_revoke)) }} -{% endmacro %} - -{% macro default__apply_grants(relation, grant_config, should_revoke=True) %} - {#-- If grant_config is {} or None, this is a no-op --#} - {% if grant_config %} - {% if should_revoke %} - {#-- We think previous grants may have carried over --#} - {#-- Show current grants and calculate diffs --#} - {% set current_grants_table = run_query(get_show_grant_sql(relation)) %} - {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %} - {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %} - {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %} - {% if not (needs_granting or needs_revoking) %} - {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}} - {% endif %} - {% else %} - {#-- We don't think there's any chance of previous grants having carried over. --#} - {#-- Jump straight to granting what the user has configured. --#} - {% set needs_revoking = {} %} - {% set needs_granting = grant_config %} - {% endif %} - {% if needs_granting or needs_revoking %} - {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %} - {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %} - {% set dcl_statement_list = revoke_statement_list + grant_statement_list %} - {% if dcl_statement_list %} - {{ call_dcl_statements(dcl_statement_list) }} - {% endif %} - {% endif %} - {% endif %} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/adapters/columns.sql b/core/dbt/include/global_project/macros/adapters/columns.sql deleted file mode 100644 index b5a03ec53b5..00000000000 --- a/core/dbt/include/global_project/macros/adapters/columns.sql +++ /dev/null @@ -1,132 +0,0 @@ -{% macro get_columns_in_relation(relation) -%} - {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }} -{% endmacro %} - -{% macro default__get_columns_in_relation(relation) -%} - {{ exceptions.raise_not_implemented( - 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }} -{% endmacro %} - -{# helper for adapter-specific implementations of get_columns_in_relation #} -{% macro sql_convert_columns_in_relation(table) -%} - {% set columns = [] %} - {% for row in table %} - {% do columns.append(api.Column(*row)) %} - {% endfor %} - {{ return(columns) }} -{% endmacro %} - - -{% macro get_empty_subquery_sql(select_sql, select_sql_header=none) -%} - {{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql, select_sql_header)) }} -{% endmacro %} - -{# - Builds a query that results in the same schema as the given select_sql statement, without necessitating a data scan. - Useful for running a query in a 'pre-flight' context, such as model contract enforcement (assert_columns_equivalent macro). -#} -{% macro default__get_empty_subquery_sql(select_sql, select_sql_header=none) %} - {%- if select_sql_header is not none -%} - {{ select_sql_header }} - {%- endif -%} - select * from ( - {{ select_sql }} - ) as __dbt_sbq - where false - limit 0 -{% endmacro %} - - -{% macro get_empty_schema_sql(columns) -%} - {{ return(adapter.dispatch('get_empty_schema_sql', 'dbt')(columns)) }} -{% endmacro %} - -{% macro default__get_empty_schema_sql(columns) %} - {%- set col_err = [] -%} - select - {% for i in columns %} - {%- set col = columns[i] -%} - {%- if col['data_type'] is not defined -%} - {{ col_err.append(col['name']) }} - {%- endif -%} - {% set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] %} - cast(null as {{ col['data_type'] }}) as {{ col_name }}{{ ", " if not loop.last }} - {%- endfor -%} - {%- if (col_err | length) > 0 -%} - {{ exceptions.column_type_missing(column_names=col_err) }} - {%- endif -%} -{% endmacro %} - -{% macro get_column_schema_from_query(select_sql, select_sql_header=none) -%} - {% set columns = [] %} - {# -- Using an 'empty subquery' here to get the same schema as the given select_sql statement, without necessitating a data scan.#} - {% set sql = get_empty_subquery_sql(select_sql, select_sql_header) %} - {% set column_schema = adapter.get_column_schema_from_query(sql) %} - {{ return(column_schema) }} -{% endmacro %} - --- here for back compat -{% macro get_columns_in_query(select_sql) -%} - {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }} -{% endmacro %} - -{% macro default__get_columns_in_query(select_sql) %} - {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%} - {{ get_empty_subquery_sql(select_sql) }} - {% endcall %} - {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }} -{% endmacro %} - -{% macro alter_column_type(relation, column_name, new_column_type) -%} - {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }} -{% endmacro %} - -{% macro default__alter_column_type(relation, column_name, new_column_type) -%} - {# - 1. Create a new column (w/ temp name and correct type) - 2. Copy data over to it - 3. Drop the existing column (cascade!) - 4. Rename the new column to existing column - #} - {%- set tmp_column = column_name + "__dbt_alter" -%} - - {% call statement('alter_column_type') %} - alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }}; - update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }}; - alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade; - alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }} - {% endcall %} - -{% endmacro %} - - -{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%} - {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }} -{% endmacro %} - -{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %} - - {% if add_columns is none %} - {% set add_columns = [] %} - {% endif %} - {% if remove_columns is none %} - {% set remove_columns = [] %} - {% endif %} - - {% set sql -%} - - alter {{ relation.type }} {{ relation }} - - {% for column in add_columns %} - add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }} - {% endfor %}{{ ',' if add_columns and remove_columns }} - - {% for column in remove_columns %} - drop column {{ column.name }}{{ ',' if not loop.last }} - {% endfor %} - - {%- endset -%} - - {% do run_query(sql) %} - -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/adapters/drop_relation.sql b/core/dbt/include/global_project/macros/adapters/drop_relation.sql deleted file mode 100644 index bd254c78d51..00000000000 --- a/core/dbt/include/global_project/macros/adapters/drop_relation.sql +++ /dev/null @@ -1,44 +0,0 @@ -{% macro drop_relation(relation) -%} - {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }} -{% endmacro %} - -{% macro default__drop_relation(relation) -%} - {% call statement('drop_relation', auto_begin=False) -%} - {%- if relation.is_table -%} - {{- drop_table(relation) -}} - {%- elif relation.is_view -%} - {{- drop_view(relation) -}} - {%- elif relation.is_materialized_view -%} - {{- drop_materialized_view(relation) -}} - {%- else -%} - drop {{ relation.type }} if exists {{ relation }} cascade - {%- endif -%} - {%- endcall %} -{% endmacro %} - - -{% macro drop_table(relation) -%} - {{ return(adapter.dispatch('drop_table', 'dbt')(relation)) }} -{%- endmacro %} - -{% macro default__drop_table(relation) -%} - drop table if exists {{ relation }} cascade -{%- endmacro %} - - -{% macro drop_view(relation) -%} - {{ return(adapter.dispatch('drop_view', 'dbt')(relation)) }} -{%- endmacro %} - -{% macro default__drop_view(relation) -%} - drop view if exists {{ relation }} cascade -{%- endmacro %} - - -{% macro drop_materialized_view(relation) -%} - {{ return(adapter.dispatch('drop_materialized_view', 'dbt')(relation)) }} -{%- endmacro %} - -{% macro default__drop_materialized_view(relation) -%} - drop materialized view if exists {{ relation }} cascade -{%- endmacro %} diff --git a/core/dbt/include/global_project/macros/adapters/freshness.sql b/core/dbt/include/global_project/macros/adapters/freshness.sql deleted file mode 100644 index f18499a2391..00000000000 --- a/core/dbt/include/global_project/macros/adapters/freshness.sql +++ /dev/null @@ -1,16 +0,0 @@ -{% macro collect_freshness(source, loaded_at_field, filter) %} - {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}} -{% endmacro %} - -{% macro default__collect_freshness(source, loaded_at_field, filter) %} - {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%} - select - max({{ loaded_at_field }}) as max_loaded_at, - {{ current_timestamp() }} as snapshotted_at - from {{ source }} - {% if filter %} - where {{ filter }} - {% endif %} - {% endcall %} - {{ return(load_result('collect_freshness')) }} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/adapters/indexes.sql b/core/dbt/include/global_project/macros/adapters/indexes.sql deleted file mode 100644 index b8663a7f971..00000000000 --- a/core/dbt/include/global_project/macros/adapters/indexes.sql +++ /dev/null @@ -1,41 +0,0 @@ -{% macro get_create_index_sql(relation, index_dict) -%} - {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }} -{% endmacro %} - -{% macro default__get_create_index_sql(relation, index_dict) -%} - {% do return(None) %} -{% endmacro %} - - -{% macro create_indexes(relation) -%} - {{ adapter.dispatch('create_indexes', 'dbt')(relation) }} -{%- endmacro %} - -{% macro default__create_indexes(relation) -%} - {%- set _indexes = config.get('indexes', default=[]) -%} - - {% for _index_dict in _indexes %} - {% set create_index_sql = get_create_index_sql(relation, _index_dict) %} - {% if create_index_sql %} - {% do run_query(create_index_sql) %} - {% endif %} - {% endfor %} -{% endmacro %} - - -{% macro get_drop_index_sql(relation, index_name) -%} - {{ adapter.dispatch('get_drop_index_sql', 'dbt')(relation, index_name) }} -{%- endmacro %} - -{% macro default__get_drop_index_sql(relation, index_name) -%} - {{ exceptions.raise_compiler_error("`get_drop_index_sql has not been implemented for this adapter.") }} -{%- endmacro %} - - -{% macro get_show_indexes_sql(relation) -%} - {{ adapter.dispatch('get_show_indexes_sql', 'dbt')(relation) }} -{%- endmacro %} - -{% macro default__get_show_indexes_sql(relation) -%} - {{ exceptions.raise_compiler_error("`get_show_indexes_sql has not been implemented for this adapter.") }} -{%- endmacro %} diff --git a/core/dbt/include/global_project/macros/adapters/metadata.sql b/core/dbt/include/global_project/macros/adapters/metadata.sql deleted file mode 100644 index 9e45c500a3f..00000000000 --- a/core/dbt/include/global_project/macros/adapters/metadata.sql +++ /dev/null @@ -1,74 +0,0 @@ -{% macro get_catalog(information_schema, schemas) -%} - {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }} -{%- endmacro %} - -{% macro default__get_catalog(information_schema, schemas) -%} - - {% set typename = adapter.type() %} - {% set msg -%} - get_catalog not implemented for {{ typename }} - {%- endset %} - - {{ exceptions.raise_compiler_error(msg) }} -{% endmacro %} - - -{% macro information_schema_name(database) %} - {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }} -{% endmacro %} - -{% macro default__information_schema_name(database) -%} - {%- if database -%} - {{ database }}.INFORMATION_SCHEMA - {%- else -%} - INFORMATION_SCHEMA - {%- endif -%} -{%- endmacro %} - - -{% macro list_schemas(database) -%} - {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }} -{% endmacro %} - -{% macro default__list_schemas(database) -%} - {% set sql %} - select distinct schema_name - from {{ information_schema_name(database) }}.SCHEMATA - where catalog_name ilike '{{ database }}' - {% endset %} - {{ return(run_query(sql)) }} -{% endmacro %} - - -{% macro check_schema_exists(information_schema, schema) -%} - {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }} -{% endmacro %} - -{% macro default__check_schema_exists(information_schema, schema) -%} - {% set sql -%} - select count(*) - from {{ information_schema.replace(information_schema_view='SCHEMATA') }} - where catalog_name='{{ information_schema.database }}' - and schema_name='{{ schema }}' - {%- endset %} - {{ return(run_query(sql)) }} -{% endmacro %} - - -{% macro list_relations_without_caching(schema_relation) %} - {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }} -{% endmacro %} - -{% macro default__list_relations_without_caching(schema_relation) %} - {{ exceptions.raise_not_implemented( - 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }} -{% endmacro %} - -{% macro get_relations() %} - {{ return(adapter.dispatch('get_relations', 'dbt')()) }} -{% endmacro %} - -{% macro default__get_relations() %} - {{ exceptions.raise_not_implemented( - 'get_relations macro not implemented for adapter '+adapter.type()) }} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/adapters/persist_docs.sql b/core/dbt/include/global_project/macros/adapters/persist_docs.sql deleted file mode 100644 index 8749e59f606..00000000000 --- a/core/dbt/include/global_project/macros/adapters/persist_docs.sql +++ /dev/null @@ -1,33 +0,0 @@ -{% macro alter_column_comment(relation, column_dict) -%} - {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }} -{% endmacro %} - -{% macro default__alter_column_comment(relation, column_dict) -%} - {{ exceptions.raise_not_implemented( - 'alter_column_comment macro not implemented for adapter '+adapter.type()) }} -{% endmacro %} - - -{% macro alter_relation_comment(relation, relation_comment) -%} - {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }} -{% endmacro %} - -{% macro default__alter_relation_comment(relation, relation_comment) -%} - {{ exceptions.raise_not_implemented( - 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }} -{% endmacro %} - - -{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%} - {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }} -{% endmacro %} - -{% macro default__persist_docs(relation, model, for_relation, for_columns) -%} - {% if for_relation and config.persist_relation_docs() and model.description %} - {% do run_query(alter_relation_comment(relation, model.description)) %} - {% endif %} - - {% if for_columns and config.persist_column_docs() and model.columns %} - {% do run_query(alter_column_comment(relation, model.columns)) %} - {% endif %} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/adapters/relation.sql b/core/dbt/include/global_project/macros/adapters/relation.sql deleted file mode 100644 index f0dde7f20f0..00000000000 --- a/core/dbt/include/global_project/macros/adapters/relation.sql +++ /dev/null @@ -1,98 +0,0 @@ -{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %} - {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }} -{% endmacro %} - -{% macro default__make_intermediate_relation(base_relation, suffix) %} - {{ return(default__make_temp_relation(base_relation, suffix)) }} -{% endmacro %} - -{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %} - {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }} -{% endmacro %} - -{% macro default__make_temp_relation(base_relation, suffix) %} - {%- set temp_identifier = base_relation.identifier ~ suffix -%} - {%- set temp_relation = base_relation.incorporate( - path={"identifier": temp_identifier}) -%} - - {{ return(temp_relation) }} -{% endmacro %} - -{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %} - {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }} -{% endmacro %} - -{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %} - {%- set backup_identifier = base_relation.identifier ~ suffix -%} - {%- set backup_relation = base_relation.incorporate( - path={"identifier": backup_identifier}, - type=backup_relation_type - ) -%} - {{ return(backup_relation) }} -{% endmacro %} - - -{% macro truncate_relation(relation) -%} - {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }} -{% endmacro %} - -{% macro default__truncate_relation(relation) -%} - {% call statement('truncate_relation') -%} - truncate table {{ relation }} - {%- endcall %} -{% endmacro %} - - -{% macro rename_relation(from_relation, to_relation) -%} - {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }} -{% endmacro %} - -{% macro default__rename_relation(from_relation, to_relation) -%} - {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %} - {% call statement('rename_relation') -%} - alter table {{ from_relation }} rename to {{ target_name }} - {%- endcall %} -{% endmacro %} - - -{% macro get_or_create_relation(database, schema, identifier, type) -%} - {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }} -{% endmacro %} - -{% macro default__get_or_create_relation(database, schema, identifier, type) %} - {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %} - - {% if target_relation %} - {% do return([true, target_relation]) %} - {% endif %} - - {%- set new_relation = api.Relation.create( - database=database, - schema=schema, - identifier=identifier, - type=type - ) -%} - {% do return([false, new_relation]) %} -{% endmacro %} - - --- a user-friendly interface into adapter.get_relation -{% macro load_cached_relation(relation) %} - {% do return(adapter.get_relation( - database=relation.database, - schema=relation.schema, - identifier=relation.identifier - )) -%} -{% endmacro %} - --- old name for backwards compatibility -{% macro load_relation(relation) %} - {{ return(load_cached_relation(relation)) }} -{% endmacro %} - - -{% macro drop_relation_if_exists(relation) %} - {% if relation is not none %} - {{ adapter.drop_relation(relation) }} - {% endif %} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/adapters/schema.sql b/core/dbt/include/global_project/macros/adapters/schema.sql deleted file mode 100644 index 9e0c7559286..00000000000 --- a/core/dbt/include/global_project/macros/adapters/schema.sql +++ /dev/null @@ -1,20 +0,0 @@ -{% macro create_schema(relation) -%} - {{ adapter.dispatch('create_schema', 'dbt')(relation) }} -{% endmacro %} - -{% macro default__create_schema(relation) -%} - {%- call statement('create_schema') -%} - create schema if not exists {{ relation.without_identifier() }} - {% endcall %} -{% endmacro %} - - -{% macro drop_schema(relation) -%} - {{ adapter.dispatch('drop_schema', 'dbt')(relation) }} -{% endmacro %} - -{% macro default__drop_schema(relation) -%} - {%- call statement('drop_schema') -%} - drop schema if exists {{ relation.without_identifier() }} cascade - {% endcall %} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/adapters/timestamps.sql b/core/dbt/include/global_project/macros/adapters/timestamps.sql deleted file mode 100644 index 64b5fd3ddda..00000000000 --- a/core/dbt/include/global_project/macros/adapters/timestamps.sql +++ /dev/null @@ -1,44 +0,0 @@ -{%- macro current_timestamp() -%} - {{ adapter.dispatch('current_timestamp', 'dbt')() }} -{%- endmacro -%} - -{% macro default__current_timestamp() -%} - {{ exceptions.raise_not_implemented( - 'current_timestamp macro not implemented for adapter ' + adapter.type()) }} -{%- endmacro %} - -{%- macro snapshot_get_time() -%} - {{ adapter.dispatch('snapshot_get_time', 'dbt')() }} -{%- endmacro -%} - -{% macro default__snapshot_get_time() %} - {{ current_timestamp() }} -{% endmacro %} - ---------------------------------------------- - -/* {# - DEPRECATED: DO NOT USE IN NEW PROJECTS - - This is ONLY to handle the fact that Snowflake + Postgres had functionally - different implementations of {{ dbt.current_timestamp }} + {{ dbt_utils.current_timestamp }} - - If you had a project or package that called {{ dbt_utils.current_timestamp() }}, you should - continue to use this macro to guarantee identical behavior on those two databases. -#} */ - -{% macro current_timestamp_backcompat() %} - {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }} -{% endmacro %} - -{% macro default__current_timestamp_backcompat() %} - current_timestamp::timestamp -{% endmacro %} - -{% macro current_timestamp_in_utc_backcompat() %} - {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }} -{% endmacro %} - -{% macro default__current_timestamp_in_utc_backcompat() %} - {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/adapters/validate_sql.sql b/core/dbt/include/global_project/macros/adapters/validate_sql.sql deleted file mode 100644 index ba01117ecae..00000000000 --- a/core/dbt/include/global_project/macros/adapters/validate_sql.sql +++ /dev/null @@ -1,10 +0,0 @@ -{% macro validate_sql(sql) -%} - {{ return(adapter.dispatch('validate_sql', 'dbt')(sql)) }} -{% endmacro %} - -{% macro default__validate_sql(sql) -%} - {% call statement('validate_sql') -%} - explain {{ sql }} - {% endcall %} - {{ return(load_result('validate_sql')) }} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/etc/datetime.sql b/core/dbt/include/global_project/macros/etc/datetime.sql deleted file mode 100644 index 33c55549794..00000000000 --- a/core/dbt/include/global_project/macros/etc/datetime.sql +++ /dev/null @@ -1,62 +0,0 @@ -{% macro convert_datetime(date_str, date_fmt) %} - - {% set error_msg -%} - The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}' - {%- endset %} - - {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %} - {{ return(res) }} - -{% endmacro %} - - -{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt="%Y%m%d", out_fmt="%Y%m%d") %} - {% set end_date_str = start_date_str if end_date_str is none else end_date_str %} - - {% set start_date = convert_datetime(start_date_str, in_fmt) %} - {% set end_date = convert_datetime(end_date_str, in_fmt) %} - - {% set day_count = (end_date - start_date).days %} - {% if day_count < 0 %} - {% set msg -%} - Partition start date is after the end date ({{ start_date }}, {{ end_date }}) - {%- endset %} - - {{ exceptions.raise_compiler_error(msg, model) }} - {% endif %} - - {% set date_list = [] %} - {% for i in range(0, day_count + 1) %} - {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %} - {% if not out_fmt %} - {% set _ = date_list.append(the_date) %} - {% else %} - {% set _ = date_list.append(the_date.strftime(out_fmt)) %} - {% endif %} - {% endfor %} - - {{ return(date_list) }} -{% endmacro %} - - -{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %} - {% set partition_range = (raw_partition_date | string).split(",") %} - - {% if (partition_range | length) == 1 %} - {% set start_date = partition_range[0] %} - {% set end_date = none %} - {% elif (partition_range | length) == 2 %} - {% set start_date = partition_range[0] %} - {% set end_date = partition_range[1] %} - {% else %} - {{ exceptions.raise_compiler_error("Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: " ~ raw_partition_date) }} - {% endif %} - - {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }} -{% endmacro %} - - -{% macro py_current_timestring() %} - {% set dt = modules.datetime.datetime.now() %} - {% do return(dt.strftime("%Y%m%d%H%M%S%f")) %} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/etc/statement.sql b/core/dbt/include/global_project/macros/etc/statement.sql deleted file mode 100644 index 8fb98f8c811..00000000000 --- a/core/dbt/include/global_project/macros/etc/statement.sql +++ /dev/null @@ -1,52 +0,0 @@ -{#-- -The macro override naming method (spark__statement) only works for macros which are called with adapter.dispatch. For macros called directly, you can just redefine them. ---#} -{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%} - {%- if execute: -%} - {%- set compiled_code = caller() -%} - - {%- if name == 'main' -%} - {{ log('Writing runtime {} for node "{}"'.format(language, model['unique_id'])) }} - {{ write(compiled_code) }} - {%- endif -%} - {%- if language == 'sql'-%} - {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%} - {%- elif language == 'python' -%} - {%- set res = submit_python_job(model, compiled_code) -%} - {#-- TODO: What should table be for python models? --#} - {%- set table = None -%} - {%- else -%} - {% do exceptions.raise_compiler_error("statement macro didn't get supported language") %} - {%- endif -%} - - {%- if name is not none -%} - {{ store_result(name, response=res, agate_table=table) }} - {%- endif -%} - - {%- endif -%} -{%- endmacro %} - - -{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%} - {%- set sql = caller() -%} - - {%- if name == 'main' -%} - {{ log('Writing runtime SQL for node "{}"'.format(model['unique_id'])) }} - {{ write(sql) }} - {%- endif -%} - - {%- if name is not none -%} - {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }} - {%- endif -%} - -{%- endmacro %} - - -{# a user-friendly interface into statements #} -{% macro run_query(sql) %} - {% call statement("run_query_statement", fetch_result=true, auto_begin=false) %} - {{ sql }} - {% endcall %} - - {% do return(load_result("run_query_statement").table) %} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/generic_test_sql/accepted_values.sql b/core/dbt/include/global_project/macros/generic_test_sql/accepted_values.sql deleted file mode 100644 index 9b5a0b0e4dd..00000000000 --- a/core/dbt/include/global_project/macros/generic_test_sql/accepted_values.sql +++ /dev/null @@ -1,27 +0,0 @@ -{% macro default__test_accepted_values(model, column_name, values, quote=True) %} - -with all_values as ( - - select - {{ column_name }} as value_field, - count(*) as n_records - - from {{ model }} - group by {{ column_name }} - -) - -select * -from all_values -where value_field not in ( - {% for value in values -%} - {% if quote -%} - '{{ value }}' - {%- else -%} - {{ value }} - {%- endif -%} - {%- if not loop.last -%},{%- endif %} - {%- endfor %} -) - -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/generic_test_sql/not_null.sql b/core/dbt/include/global_project/macros/generic_test_sql/not_null.sql deleted file mode 100644 index 73e3401f930..00000000000 --- a/core/dbt/include/global_project/macros/generic_test_sql/not_null.sql +++ /dev/null @@ -1,9 +0,0 @@ -{% macro default__test_not_null(model, column_name) %} - -{% set column_list = '*' if should_store_failures() else column_name %} - -select {{ column_list }} -from {{ model }} -where {{ column_name }} is null - -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/generic_test_sql/relationships.sql b/core/dbt/include/global_project/macros/generic_test_sql/relationships.sql deleted file mode 100644 index db779a43e7d..00000000000 --- a/core/dbt/include/global_project/macros/generic_test_sql/relationships.sql +++ /dev/null @@ -1,23 +0,0 @@ -{% macro default__test_relationships(model, column_name, to, field) %} - -with child as ( - select {{ column_name }} as from_field - from {{ model }} - where {{ column_name }} is not null -), - -parent as ( - select {{ field }} as to_field - from {{ to }} -) - -select - from_field - -from child -left join parent - on child.from_field = parent.to_field - -where parent.to_field is null - -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/generic_test_sql/unique.sql b/core/dbt/include/global_project/macros/generic_test_sql/unique.sql deleted file mode 100644 index ed18c5c93a3..00000000000 --- a/core/dbt/include/global_project/macros/generic_test_sql/unique.sql +++ /dev/null @@ -1,12 +0,0 @@ -{% macro default__test_unique(model, column_name) %} - -select - {{ column_name }} as unique_field, - count(*) as n_records - -from {{ model }} -where {{ column_name }} is not null -group by {{ column_name }} -having count(*) > 1 - -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/get_custom_name/get_custom_alias.sql b/core/dbt/include/global_project/macros/get_custom_name/get_custom_alias.sql deleted file mode 100644 index 187d3970d52..00000000000 --- a/core/dbt/include/global_project/macros/get_custom_name/get_custom_alias.sql +++ /dev/null @@ -1,36 +0,0 @@ - -{# - Renders a alias name given a custom alias name. If the custom - alias name is none, then the resulting alias is just the filename of the - model. If an alias override is specified, then that is used. - - This macro can be overriden in projects to define different semantics - for rendering a alias name. - - Arguments: - custom_alias_name: The custom alias name specified for a model, or none - node: The available node that an alias is being generated for, or none - -#} - -{% macro generate_alias_name(custom_alias_name=none, node=none) -%} - {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %} -{%- endmacro %} - -{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%} - - {%- if custom_alias_name -%} - - {{ custom_alias_name | trim }} - - {%- elif node.version -%} - - {{ return(node.name ~ "_v" ~ (node.version | replace(".", "_"))) }} - - {%- else -%} - - {{ node.name }} - - {%- endif -%} - -{%- endmacro %} diff --git a/core/dbt/include/global_project/macros/get_custom_name/get_custom_database.sql b/core/dbt/include/global_project/macros/get_custom_name/get_custom_database.sql deleted file mode 100644 index bb9b8c23807..00000000000 --- a/core/dbt/include/global_project/macros/get_custom_name/get_custom_database.sql +++ /dev/null @@ -1,32 +0,0 @@ -{# - Renders a database name given a custom database name. If the custom - database name is none, then the resulting database is just the "database" - value in the specified target. If a database override is specified, then - the resulting database is the default database concatenated with the - custom database. - - This macro can be overriden in projects to define different semantics - for rendering a database name. - - Arguments: - custom_database_name: The custom database name specified for a model, or none - node: The node the database is being generated for - -#} -{% macro generate_database_name(custom_database_name=none, node=none) -%} - {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %} -{%- endmacro %} - -{% macro default__generate_database_name(custom_database_name=none, node=none) -%} - {%- set default_database = target.database -%} - {%- if custom_database_name is none -%} - - {{ default_database }} - - {%- else -%} - - {{ custom_database_name }} - - {%- endif -%} - -{%- endmacro %} diff --git a/core/dbt/include/global_project/macros/get_custom_name/get_custom_schema.sql b/core/dbt/include/global_project/macros/get_custom_name/get_custom_schema.sql deleted file mode 100644 index 20348ec3e7b..00000000000 --- a/core/dbt/include/global_project/macros/get_custom_name/get_custom_schema.sql +++ /dev/null @@ -1,60 +0,0 @@ - -{# - Renders a schema name given a custom schema name. If the custom - schema name is none, then the resulting schema is just the "schema" - value in the specified target. If a schema override is specified, then - the resulting schema is the default schema concatenated with the - custom schema. - - This macro can be overriden in projects to define different semantics - for rendering a schema name. - - Arguments: - custom_schema_name: The custom schema name specified for a model, or none - node: The node the schema is being generated for - -#} -{% macro generate_schema_name(custom_schema_name=none, node=none) -%} - {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }} -{% endmacro %} - -{% macro default__generate_schema_name(custom_schema_name, node) -%} - - {%- set default_schema = target.schema -%} - {%- if custom_schema_name is none -%} - - {{ default_schema }} - - {%- else -%} - - {{ default_schema }}_{{ custom_schema_name | trim }} - - {%- endif -%} - -{%- endmacro %} - - -{# - Renders a schema name given a custom schema name. In production, this macro - will render out the overriden schema name for a model. Otherwise, the default - schema specified in the active target is used. - - Arguments: - custom_schema_name: The custom schema name specified for a model, or none - node: The node the schema is being generated for - -#} -{% macro generate_schema_name_for_env(custom_schema_name, node) -%} - - {%- set default_schema = target.schema -%} - {%- if target.name == 'prod' and custom_schema_name is not none -%} - - {{ custom_schema_name | trim }} - - {%- else -%} - - {{ default_schema }} - - {%- endif -%} - -{%- endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/configs.sql b/core/dbt/include/global_project/macros/materializations/configs.sql deleted file mode 100644 index d15ccb8e603..00000000000 --- a/core/dbt/include/global_project/macros/materializations/configs.sql +++ /dev/null @@ -1,21 +0,0 @@ -{% macro set_sql_header(config) -%} - {{ config.set('sql_header', caller()) }} -{%- endmacro %} - - -{% macro should_full_refresh() %} - {% set config_full_refresh = config.get('full_refresh') %} - {% if config_full_refresh is none %} - {% set config_full_refresh = flags.FULL_REFRESH %} - {% endif %} - {% do return(config_full_refresh) %} -{% endmacro %} - - -{% macro should_store_failures() %} - {% set config_store_failures = config.get('store_failures') %} - {% if config_store_failures is none %} - {% set config_store_failures = flags.STORE_FAILURES %} - {% endif %} - {% do return(config_store_failures) %} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/hooks.sql b/core/dbt/include/global_project/macros/materializations/hooks.sql deleted file mode 100644 index 2e198196c4f..00000000000 --- a/core/dbt/include/global_project/macros/materializations/hooks.sql +++ /dev/null @@ -1,35 +0,0 @@ -{% macro run_hooks(hooks, inside_transaction=True) %} - {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %} - {% if not inside_transaction and loop.first %} - {% call statement(auto_begin=inside_transaction) %} - commit; - {% endcall %} - {% endif %} - {% set rendered = render(hook.get('sql')) | trim %} - {% if (rendered | length) > 0 %} - {% call statement(auto_begin=inside_transaction) %} - {{ rendered }} - {% endcall %} - {% endif %} - {% endfor %} -{% endmacro %} - - -{% macro make_hook_config(sql, inside_transaction) %} - {{ tojson({"sql": sql, "transaction": inside_transaction}) }} -{% endmacro %} - - -{% macro before_begin(sql) %} - {{ make_hook_config(sql, inside_transaction=False) }} -{% endmacro %} - - -{% macro in_transaction(sql) %} - {{ make_hook_config(sql, inside_transaction=True) }} -{% endmacro %} - - -{% macro after_commit(sql) %} - {{ make_hook_config(sql, inside_transaction=False) }} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/models/clone/can_clone_table.sql b/core/dbt/include/global_project/macros/materializations/models/clone/can_clone_table.sql deleted file mode 100644 index 89628bfab35..00000000000 --- a/core/dbt/include/global_project/macros/materializations/models/clone/can_clone_table.sql +++ /dev/null @@ -1,7 +0,0 @@ -{% macro can_clone_table() %} - {{ return(adapter.dispatch('can_clone_table', 'dbt')()) }} -{% endmacro %} - -{% macro default__can_clone_table() %} - {{ return(False) }} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/models/clone/clone.sql b/core/dbt/include/global_project/macros/materializations/models/clone/clone.sql deleted file mode 100644 index b78ca9d01ab..00000000000 --- a/core/dbt/include/global_project/macros/materializations/models/clone/clone.sql +++ /dev/null @@ -1,62 +0,0 @@ -{%- materialization clone, default -%} - - {%- set relations = {'relations': []} -%} - - {%- if not defer_relation -%} - -- nothing to do - {{ log("No relation found in state manifest for " ~ model.unique_id, info=True) }} - {{ return(relations) }} - {%- endif -%} - - {%- set existing_relation = load_cached_relation(this) -%} - - {%- if existing_relation and not flags.FULL_REFRESH -%} - -- noop! - {{ log("Relation " ~ existing_relation ~ " already exists", info=True) }} - {{ return(relations) }} - {%- endif -%} - - {%- set other_existing_relation = load_cached_relation(defer_relation) -%} - - -- If this is a database that can do zero-copy cloning of tables, and the other relation is a table, then this will be a table - -- Otherwise, this will be a view - - {% set can_clone_table = can_clone_table() %} - - {%- if other_existing_relation and other_existing_relation.type == 'table' and can_clone_table -%} - - {%- set target_relation = this.incorporate(type='table') -%} - {% if existing_relation is not none and not existing_relation.is_table %} - {{ log("Dropping relation " ~ existing_relation ~ " because it is of type " ~ existing_relation.type) }} - {{ drop_relation_if_exists(existing_relation) }} - {% endif %} - - -- as a general rule, data platforms that can clone tables can also do atomic 'create or replace' - {% call statement('main') %} - {{ create_or_replace_clone(target_relation, defer_relation) }} - {% endcall %} - - {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %} - {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} - {% do persist_docs(target_relation, model) %} - - {{ return({'relations': [target_relation]}) }} - - {%- else -%} - - {%- set target_relation = this.incorporate(type='view') -%} - - -- reuse the view materialization - -- TODO: support actual dispatch for materialization macros - -- Tracking ticket: https://github.com/dbt-labs/dbt-core/issues/7799 - {% set search_name = "materialization_view_" ~ adapter.type() %} - {% if not search_name in context %} - {% set search_name = "materialization_view_default" %} - {% endif %} - {% set materialization_macro = context[search_name] %} - {% set relations = materialization_macro() %} - {{ return(relations) }} - - {%- endif -%} - -{%- endmaterialization -%} diff --git a/core/dbt/include/global_project/macros/materializations/models/clone/create_or_replace_clone.sql b/core/dbt/include/global_project/macros/materializations/models/clone/create_or_replace_clone.sql deleted file mode 100644 index 204e9e874e4..00000000000 --- a/core/dbt/include/global_project/macros/materializations/models/clone/create_or_replace_clone.sql +++ /dev/null @@ -1,7 +0,0 @@ -{% macro create_or_replace_clone(this_relation, defer_relation) %} - {{ return(adapter.dispatch('create_or_replace_clone', 'dbt')(this_relation, defer_relation)) }} -{% endmacro %} - -{% macro default__create_or_replace_clone(this_relation, defer_relation) %} - create or replace table {{ this_relation }} clone {{ defer_relation }} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/models/incremental/column_helpers.sql b/core/dbt/include/global_project/macros/materializations/models/incremental/column_helpers.sql deleted file mode 100644 index 03f9b406a3c..00000000000 --- a/core/dbt/include/global_project/macros/materializations/models/incremental/column_helpers.sql +++ /dev/null @@ -1,80 +0,0 @@ -/* {# - Helper macros for internal use with incremental materializations. - Use with care if calling elsewhere. -#} */ - - -{% macro get_quoted_csv(column_names) %} - - {% set quoted = [] %} - {% for col in column_names -%} - {%- do quoted.append(adapter.quote(col)) -%} - {%- endfor %} - - {%- set dest_cols_csv = quoted | join(', ') -%} - {{ return(dest_cols_csv) }} - -{% endmacro %} - - -{% macro diff_columns(source_columns, target_columns) %} - - {% set result = [] %} - {% set source_names = source_columns | map(attribute = 'column') | list %} - {% set target_names = target_columns | map(attribute = 'column') | list %} - - {# --check whether the name attribute exists in the target - this does not perform a data type check #} - {% for sc in source_columns %} - {% if sc.name not in target_names %} - {{ result.append(sc) }} - {% endif %} - {% endfor %} - - {{ return(result) }} - -{% endmacro %} - - -{% macro diff_column_data_types(source_columns, target_columns) %} - - {% set result = [] %} - {% for sc in source_columns %} - {% set tc = target_columns | selectattr("name", "equalto", sc.name) | list | first %} - {% if tc %} - {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %} - {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }} - {% endif %} - {% endif %} - {% endfor %} - - {{ return(result) }} - -{% endmacro %} - -{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %} - {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }} -{% endmacro %} - -{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %} - {%- set default_cols = dest_columns | map(attribute="quoted") | list -%} - - {%- if merge_update_columns and merge_exclude_columns -%} - {{ exceptions.raise_compiler_error( - 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config' - )}} - {%- elif merge_update_columns -%} - {%- set update_columns = merge_update_columns -%} - {%- elif merge_exclude_columns -%} - {%- set update_columns = [] -%} - {%- for column in dest_columns -%} - {% if column.column | lower not in merge_exclude_columns | map("lower") | list %} - {%- do update_columns.append(column.quoted) -%} - {% endif %} - {%- endfor -%} - {%- else -%} - {%- set update_columns = default_cols -%} - {%- endif -%} - - {{ return(update_columns) }} - -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/models/incremental/incremental.sql b/core/dbt/include/global_project/macros/materializations/models/incremental/incremental.sql deleted file mode 100644 index e8ff5c1ea4f..00000000000 --- a/core/dbt/include/global_project/macros/materializations/models/incremental/incremental.sql +++ /dev/null @@ -1,92 +0,0 @@ - -{% materialization incremental, default -%} - - -- relations - {%- set existing_relation = load_cached_relation(this) -%} - {%- set target_relation = this.incorporate(type='table') -%} - {%- set temp_relation = make_temp_relation(target_relation)-%} - {%- set intermediate_relation = make_intermediate_relation(target_relation)-%} - {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%} - {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%} - - -- configs - {%- set unique_key = config.get('unique_key') -%} - {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%} - {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%} - - -- the temp_ and backup_ relations should not already exist in the database; get_relation - -- will return None in that case. Otherwise, we get a relation that we can drop - -- later, before we try to use this name for the current operation. This has to happen before - -- BEGIN, in a separate transaction - {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%} - {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%} - -- grab current tables grants config for comparision later on - {% set grant_config = config.get('grants') %} - {{ drop_relation_if_exists(preexisting_intermediate_relation) }} - {{ drop_relation_if_exists(preexisting_backup_relation) }} - - {{ run_hooks(pre_hooks, inside_transaction=False) }} - - -- `BEGIN` happens here: - {{ run_hooks(pre_hooks, inside_transaction=True) }} - - {% set to_drop = [] %} - - {% if existing_relation is none %} - {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %} - {% elif full_refresh_mode %} - {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %} - {% set need_swap = true %} - {% else %} - {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %} - {% do adapter.expand_target_column_types( - from_relation=temp_relation, - to_relation=target_relation) %} - {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#} - {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %} - {% if not dest_columns %} - {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %} - {% endif %} - - {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#} - {% set incremental_strategy = config.get('incremental_strategy') or 'default' %} - {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %} - {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %} - {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %} - {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %} - - {% endif %} - - {% call statement("main") %} - {{ build_sql }} - {% endcall %} - - {% if need_swap %} - {% do adapter.rename_relation(target_relation, backup_relation) %} - {% do adapter.rename_relation(intermediate_relation, target_relation) %} - {% do to_drop.append(backup_relation) %} - {% endif %} - - {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %} - {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} - - {% do persist_docs(target_relation, model) %} - - {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %} - {% do create_indexes(target_relation) %} - {% endif %} - - {{ run_hooks(post_hooks, inside_transaction=True) }} - - -- `COMMIT` happens here - {% do adapter.commit() %} - - {% for rel in to_drop %} - {% do adapter.drop_relation(rel) %} - {% endfor %} - - {{ run_hooks(post_hooks, inside_transaction=False) }} - - {{ return({'relations': [target_relation]}) }} - -{%- endmaterialization %} diff --git a/core/dbt/include/global_project/macros/materializations/models/incremental/is_incremental.sql b/core/dbt/include/global_project/macros/materializations/models/incremental/is_incremental.sql deleted file mode 100644 index 10f45e0238a..00000000000 --- a/core/dbt/include/global_project/macros/materializations/models/incremental/is_incremental.sql +++ /dev/null @@ -1,13 +0,0 @@ - -{% macro is_incremental() %} - {#-- do not run introspective queries in parsing #} - {% if not execute %} - {{ return(False) }} - {% else %} - {% set relation = adapter.get_relation(this.database, this.schema, this.table) %} - {{ return(relation is not none - and relation.type == 'table' - and model.config.materialized == 'incremental' - and not should_full_refresh()) }} - {% endif %} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/models/incremental/merge.sql b/core/dbt/include/global_project/macros/materializations/models/incremental/merge.sql deleted file mode 100644 index ca972c9f258..00000000000 --- a/core/dbt/include/global_project/macros/materializations/models/incremental/merge.sql +++ /dev/null @@ -1,131 +0,0 @@ -{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%} - -- back compat for old kwarg name - {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %} - {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }} -{%- endmacro %} - -{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%} - {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%} - {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute="name")) -%} - {%- set merge_update_columns = config.get('merge_update_columns') -%} - {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%} - {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%} - {%- set sql_header = config.get('sql_header', none) -%} - - {% if unique_key %} - {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %} - {% for key in unique_key %} - {% set this_key_match %} - DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }} - {% endset %} - {% do predicates.append(this_key_match) %} - {% endfor %} - {% else %} - {% set unique_key_match %} - DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }} - {% endset %} - {% do predicates.append(unique_key_match) %} - {% endif %} - {% else %} - {% do predicates.append('FALSE') %} - {% endif %} - - {{ sql_header if sql_header is not none }} - - merge into {{ target }} as DBT_INTERNAL_DEST - using {{ source }} as DBT_INTERNAL_SOURCE - on {{"(" ~ predicates | join(") and (") ~ ")"}} - - {% if unique_key %} - when matched then update set - {% for column_name in update_columns -%} - {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }} - {%- if not loop.last %}, {%- endif %} - {%- endfor %} - {% endif %} - - when not matched then insert - ({{ dest_cols_csv }}) - values - ({{ dest_cols_csv }}) - -{% endmacro %} - - -{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%} - {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }} -{%- endmacro %} - -{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%} - - {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute="name")) -%} - - {% if unique_key %} - {% if unique_key is sequence and unique_key is not string %} - delete from {{target }} - using {{ source }} - where ( - {% for key in unique_key %} - {{ source }}.{{ key }} = {{ target }}.{{ key }} - {{ "and " if not loop.last}} - {% endfor %} - {% if incremental_predicates %} - {% for predicate in incremental_predicates %} - and {{ predicate }} - {% endfor %} - {% endif %} - ); - {% else %} - delete from {{ target }} - where ( - {{ unique_key }}) in ( - select ({{ unique_key }}) - from {{ source }} - ) - {%- if incremental_predicates %} - {% for predicate in incremental_predicates %} - and {{ predicate }} - {% endfor %} - {%- endif -%}; - - {% endif %} - {% endif %} - - insert into {{ target }} ({{ dest_cols_csv }}) - ( - select {{ dest_cols_csv }} - from {{ source }} - ) - -{%- endmacro %} - - -{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%} - {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }} -{%- endmacro %} - -{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%} - {#-- The only time include_sql_header is True: --#} - {#-- BigQuery + insert_overwrite strategy + "static" partitions config --#} - {#-- We should consider including the sql header at the materialization level instead --#} - - {%- set predicates = [] if predicates is none else [] + predicates -%} - {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute="name")) -%} - {%- set sql_header = config.get('sql_header', none) -%} - - {{ sql_header if sql_header is not none and include_sql_header }} - - merge into {{ target }} as DBT_INTERNAL_DEST - using {{ source }} as DBT_INTERNAL_SOURCE - on FALSE - - when not matched by source - {% if predicates %} and {{ predicates | join(' and ') }} {% endif %} - then delete - - when not matched then insert - ({{ dest_cols_csv }}) - values - ({{ dest_cols_csv }}) - -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/models/incremental/on_schema_change.sql b/core/dbt/include/global_project/macros/materializations/models/incremental/on_schema_change.sql deleted file mode 100644 index 76fe372f41b..00000000000 --- a/core/dbt/include/global_project/macros/materializations/models/incremental/on_schema_change.sql +++ /dev/null @@ -1,144 +0,0 @@ -{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %} - - {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %} - - {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %} - {% do log(log_message) %} - - {{ return(default) }} - - {% else %} - - {{ return(on_schema_change) }} - - {% endif %} - -{% endmacro %} - - -{% macro check_for_schema_changes(source_relation, target_relation) %} - - {% set schema_changed = False %} - - {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%} - {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%} - {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%} - {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%} - - {% set new_target_types = diff_column_data_types(source_columns, target_columns) %} - - {% if source_not_in_target != [] %} - {% set schema_changed = True %} - {% elif target_not_in_source != [] or new_target_types != [] %} - {% set schema_changed = True %} - {% elif new_target_types != [] %} - {% set schema_changed = True %} - {% endif %} - - {% set changes_dict = { - 'schema_changed': schema_changed, - 'source_not_in_target': source_not_in_target, - 'target_not_in_source': target_not_in_source, - 'source_columns': source_columns, - 'target_columns': target_columns, - 'new_target_types': new_target_types - } %} - - {% set msg %} - In {{ target_relation }}: - Schema changed: {{ schema_changed }} - Source columns not in target: {{ source_not_in_target }} - Target columns not in source: {{ target_not_in_source }} - New column types: {{ new_target_types }} - {% endset %} - - {% do log(msg) %} - - {{ return(changes_dict) }} - -{% endmacro %} - - -{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %} - - {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%} - - {%- if on_schema_change == 'append_new_columns'-%} - {%- if add_to_target_arr | length > 0 -%} - {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%} - {%- endif -%} - - {% elif on_schema_change == 'sync_all_columns' %} - {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%} - {%- set new_target_types = schema_changes_dict['new_target_types'] -%} - - {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %} - {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%} - {% endif %} - - {% if new_target_types != [] %} - {% for ntt in new_target_types %} - {% set column_name = ntt['column_name'] %} - {% set new_type = ntt['new_type'] %} - {% do alter_column_type(target_relation, column_name, new_type) %} - {% endfor %} - {% endif %} - - {% endif %} - - {% set schema_change_message %} - In {{ target_relation }}: - Schema change approach: {{ on_schema_change }} - Columns added: {{ add_to_target_arr }} - Columns removed: {{ remove_from_target_arr }} - Data types changed: {{ new_target_types }} - {% endset %} - - {% do log(schema_change_message) %} - -{% endmacro %} - - -{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %} - - {% if on_schema_change == 'ignore' %} - - {{ return({}) }} - - {% else %} - - {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %} - - {% if schema_changes_dict['schema_changed'] %} - - {% if on_schema_change == 'fail' %} - - {% set fail_msg %} - The source and target schemas on this incremental model are out of sync! - They can be reconciled in several ways: - - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation. - - Re-run the incremental model with `full_refresh: True` to update the target schema. - - update the schema manually and re-run the process. - - Additional troubleshooting context: - Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }} - Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }} - New column types: {{ schema_changes_dict['new_target_types'] }} - {% endset %} - - {% do exceptions.raise_compiler_error(fail_msg) %} - - {# -- unless we ignore, run the sync operation per the config #} - {% else %} - - {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %} - - {% endif %} - - {% endif %} - - {{ return(schema_changes_dict['source_columns']) }} - - {% endif %} - -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/models/incremental/strategies.sql b/core/dbt/include/global_project/macros/materializations/models/incremental/strategies.sql deleted file mode 100644 index 72082ccad32..00000000000 --- a/core/dbt/include/global_project/macros/materializations/models/incremental/strategies.sql +++ /dev/null @@ -1,79 +0,0 @@ -{% macro get_incremental_append_sql(arg_dict) %} - - {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }} - -{% endmacro %} - - -{% macro default__get_incremental_append_sql(arg_dict) %} - - {% do return(get_insert_into_sql(arg_dict["target_relation"], arg_dict["temp_relation"], arg_dict["dest_columns"])) %} - -{% endmacro %} - - -{# snowflake #} -{% macro get_incremental_delete_insert_sql(arg_dict) %} - - {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }} - -{% endmacro %} - -{% macro default__get_incremental_delete_insert_sql(arg_dict) %} - - {% do return(get_delete_insert_merge_sql(arg_dict["target_relation"], arg_dict["temp_relation"], arg_dict["unique_key"], arg_dict["dest_columns"], arg_dict["incremental_predicates"])) %} - -{% endmacro %} - - -{# snowflake, bigquery, spark #} -{% macro get_incremental_merge_sql(arg_dict) %} - - {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }} - -{% endmacro %} - -{% macro default__get_incremental_merge_sql(arg_dict) %} - - {% do return(get_merge_sql(arg_dict["target_relation"], arg_dict["temp_relation"], arg_dict["unique_key"], arg_dict["dest_columns"], arg_dict["incremental_predicates"])) %} - -{% endmacro %} - - -{% macro get_incremental_insert_overwrite_sql(arg_dict) %} - - {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }} - -{% endmacro %} - -{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %} - - {% do return(get_insert_overwrite_merge_sql(arg_dict["target_relation"], arg_dict["temp_relation"], arg_dict["dest_columns"], arg_dict["incremental_predicates"])) %} - -{% endmacro %} - - -{% macro get_incremental_default_sql(arg_dict) %} - - {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }} - -{% endmacro %} - -{% macro default__get_incremental_default_sql(arg_dict) %} - - {% do return(get_incremental_append_sql(arg_dict)) %} - -{% endmacro %} - - -{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %} - - {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute="name")) -%} - - insert into {{ target_relation }} ({{ dest_cols_csv }}) - ( - select {{ dest_cols_csv }} - from {{ temp_relation }} - ) - -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/models/materialized_view/alter_materialized_view.sql b/core/dbt/include/global_project/macros/materializations/models/materialized_view/alter_materialized_view.sql deleted file mode 100644 index b9ccdc2f141..00000000000 --- a/core/dbt/include/global_project/macros/materializations/models/materialized_view/alter_materialized_view.sql +++ /dev/null @@ -1,30 +0,0 @@ -{% macro get_alter_materialized_view_as_sql( - relation, - configuration_changes, - sql, - existing_relation, - backup_relation, - intermediate_relation -) %} - {{- log('Applying ALTER to: ' ~ relation) -}} - {{- adapter.dispatch('get_alter_materialized_view_as_sql', 'dbt')( - relation, - configuration_changes, - sql, - existing_relation, - backup_relation, - intermediate_relation - ) -}} -{% endmacro %} - - -{% macro default__get_alter_materialized_view_as_sql( - relation, - configuration_changes, - sql, - existing_relation, - backup_relation, - intermediate_relation -) %} - {{ exceptions.raise_compiler_error("Materialized views have not been implemented for this adapter.") }} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/models/materialized_view/create_materialized_view.sql b/core/dbt/include/global_project/macros/materializations/models/materialized_view/create_materialized_view.sql deleted file mode 100644 index 4b2ebeb3aa1..00000000000 --- a/core/dbt/include/global_project/macros/materializations/models/materialized_view/create_materialized_view.sql +++ /dev/null @@ -1,9 +0,0 @@ -{% macro get_create_materialized_view_as_sql(relation, sql) -%} - {{- log('Applying CREATE to: ' ~ relation) -}} - {{- adapter.dispatch('get_create_materialized_view_as_sql', 'dbt')(relation, sql) -}} -{%- endmacro %} - - -{% macro default__get_create_materialized_view_as_sql(relation, sql) -%} - {{ exceptions.raise_compiler_error("Materialized views have not been implemented for this adapter.") }} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/models/materialized_view/get_materialized_view_configuration_changes.sql b/core/dbt/include/global_project/macros/materializations/models/materialized_view/get_materialized_view_configuration_changes.sql deleted file mode 100644 index b1639b1631e..00000000000 --- a/core/dbt/include/global_project/macros/materializations/models/materialized_view/get_materialized_view_configuration_changes.sql +++ /dev/null @@ -1,23 +0,0 @@ -{% macro get_materialized_view_configuration_changes(existing_relation, new_config) %} - /* {# - It's recommended that configuration changes be formatted as follows: - {"<change_category>": [{"action": "<name>", "context": ...}]} - - For example: - { - "indexes": [ - {"action": "drop", "context": "index_abc"}, - {"action": "create", "context": {"columns": ["column_1", "column_2"], "type": "hash", "unique": True}}, - ], - } - - Either way, `get_materialized_view_configuration_changes` needs to align with `get_alter_materialized_view_as_sql`. - #} */ - {{- log('Determining configuration changes on: ' ~ existing_relation) -}} - {%- do return(adapter.dispatch('get_materialized_view_configuration_changes', 'dbt')(existing_relation, new_config)) -%} -{% endmacro %} - - -{% macro default__get_materialized_view_configuration_changes(existing_relation, new_config) %} - {{ exceptions.raise_compiler_error("Materialized views have not been implemented for this adapter.") }} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/models/materialized_view/materialized_view.sql b/core/dbt/include/global_project/macros/materializations/models/materialized_view/materialized_view.sql deleted file mode 100644 index 015f6cb8585..00000000000 --- a/core/dbt/include/global_project/macros/materializations/models/materialized_view/materialized_view.sql +++ /dev/null @@ -1,121 +0,0 @@ -{% materialization materialized_view, default %} - {% set existing_relation = load_cached_relation(this) %} - {% set target_relation = this.incorporate(type=this.MaterializedView) %} - {% set intermediate_relation = make_intermediate_relation(target_relation) %} - {% set backup_relation_type = target_relation.MaterializedView if existing_relation is none else existing_relation.type %} - {% set backup_relation = make_backup_relation(target_relation, backup_relation_type) %} - - {{ materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) }} - - {% set build_sql = materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %} - - {% if build_sql == '' %} - {{ materialized_view_execute_no_op(target_relation) }} - {% else %} - {{ materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) }} - {% endif %} - - {{ materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) }} - - {{ return({'relations': [target_relation]}) }} - -{% endmaterialization %} - - -{% macro materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) %} - - -- backup_relation and intermediate_relation should not already exist in the database - -- it's possible these exist because of a previous run that exited unexpectedly - {% set preexisting_backup_relation = load_cached_relation(backup_relation) %} - {% set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) %} - - -- drop the temp relations if they exist already in the database - {{ drop_relation_if_exists(preexisting_backup_relation) }} - {{ drop_relation_if_exists(preexisting_intermediate_relation) }} - - {{ run_hooks(pre_hooks, inside_transaction=False) }} - -{% endmacro %} - - -{% macro materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) %} - - -- drop the temp relations if they exist to leave the database clean for the next run - {{ drop_relation_if_exists(backup_relation) }} - {{ drop_relation_if_exists(intermediate_relation) }} - - {{ run_hooks(post_hooks, inside_transaction=False) }} - -{% endmacro %} - - -{% macro materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %} - - {% set full_refresh_mode = should_full_refresh() %} - - -- determine the scenario we're in: create, full_refresh, alter, refresh data - {% if existing_relation is none %} - {% set build_sql = get_create_materialized_view_as_sql(target_relation, sql) %} - {% elif full_refresh_mode or not existing_relation.is_materialized_view %} - {% set build_sql = get_replace_materialized_view_as_sql(target_relation, sql, existing_relation, backup_relation, intermediate_relation) %} - {% else %} - - -- get config options - {% set on_configuration_change = config.get('on_configuration_change') %} - {% set configuration_changes = get_materialized_view_configuration_changes(existing_relation, config) %} - - {% if configuration_changes is none %} - {% set build_sql = refresh_materialized_view(target_relation) %} - - {% elif on_configuration_change == 'apply' %} - {% set build_sql = get_alter_materialized_view_as_sql(target_relation, configuration_changes, sql, existing_relation, backup_relation, intermediate_relation) %} - {% elif on_configuration_change == 'continue' %} - {% set build_sql = '' %} - {{ exceptions.warn("Configuration changes were identified and `on_configuration_change` was set to `continue` for `" ~ target_relation ~ "`") }} - {% elif on_configuration_change == 'fail' %} - {{ exceptions.raise_fail_fast_error("Configuration changes were identified and `on_configuration_change` was set to `fail` for `" ~ target_relation ~ "`") }} - - {% else %} - -- this only happens if the user provides a value other than `apply`, 'skip', 'fail' - {{ exceptions.raise_compiler_error("Unexpected configuration scenario") }} - - {% endif %} - - {% endif %} - - {% do return(build_sql) %} - -{% endmacro %} - - -{% macro materialized_view_execute_no_op(target_relation) %} - {% do store_raw_result( - name="main", - message="skip " ~ target_relation, - code="skip", - rows_affected="-1" - ) %} -{% endmacro %} - - -{% macro materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) %} - - -- `BEGIN` happens here: - {{ run_hooks(pre_hooks, inside_transaction=True) }} - - {% set grant_config = config.get('grants') %} - - {% call statement(name="main") %} - {{ build_sql }} - {% endcall %} - - {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %} - {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} - - {% do persist_docs(target_relation, model) %} - - {{ run_hooks(post_hooks, inside_transaction=True) }} - - {{ adapter.commit() }} - -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/models/materialized_view/refresh_materialized_view.sql b/core/dbt/include/global_project/macros/materializations/models/materialized_view/refresh_materialized_view.sql deleted file mode 100644 index 16345138593..00000000000 --- a/core/dbt/include/global_project/macros/materializations/models/materialized_view/refresh_materialized_view.sql +++ /dev/null @@ -1,9 +0,0 @@ -{% macro refresh_materialized_view(relation) %} - {{- log('Applying REFRESH to: ' ~ relation) -}} - {{- adapter.dispatch('refresh_materialized_view', 'dbt')(relation) -}} -{% endmacro %} - - -{% macro default__refresh_materialized_view(relation) %} - {{ exceptions.raise_compiler_error("Materialized views have not been implemented for this adapter.") }} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/models/materialized_view/replace_materialized_view.sql b/core/dbt/include/global_project/macros/materializations/models/materialized_view/replace_materialized_view.sql deleted file mode 100644 index 43319c5cc1b..00000000000 --- a/core/dbt/include/global_project/macros/materializations/models/materialized_view/replace_materialized_view.sql +++ /dev/null @@ -1,9 +0,0 @@ -{% macro get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %} - {{- log('Applying REPLACE to: ' ~ relation) -}} - {{- adapter.dispatch('get_replace_materialized_view_as_sql', 'dbt')(relation, sql, existing_relation, backup_relation, intermediate_relation) -}} -{% endmacro %} - - -{% macro default__get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %} - {{ exceptions.raise_compiler_error("Materialized views have not been implemented for this adapter.") }} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/models/table/columns_spec_ddl.sql b/core/dbt/include/global_project/macros/materializations/models/table/columns_spec_ddl.sql deleted file mode 100644 index 7a56d09e189..00000000000 --- a/core/dbt/include/global_project/macros/materializations/models/table/columns_spec_ddl.sql +++ /dev/null @@ -1,89 +0,0 @@ -{%- macro get_table_columns_and_constraints() -%} - {{ adapter.dispatch('get_table_columns_and_constraints', 'dbt')() }} -{%- endmacro -%} - -{% macro default__get_table_columns_and_constraints() -%} - {{ return(table_columns_and_constraints()) }} -{%- endmacro %} - -{% macro table_columns_and_constraints() %} - {# loop through user_provided_columns to create DDL with data types and constraints #} - {%- set raw_column_constraints = adapter.render_raw_columns_constraints(raw_columns=model['columns']) -%} - {%- set raw_model_constraints = adapter.render_raw_model_constraints(raw_constraints=model['constraints']) -%} - ( - {% for c in raw_column_constraints -%} - {{ c }}{{ "," if not loop.last or raw_model_constraints }} - {% endfor %} - {% for c in raw_model_constraints -%} - {{ c }}{{ "," if not loop.last }} - {% endfor -%} - ) -{% endmacro %} - -{%- macro get_assert_columns_equivalent(sql) -%} - {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }} -{%- endmacro -%} - -{% macro default__get_assert_columns_equivalent(sql) -%} - {{ return(assert_columns_equivalent(sql)) }} -{%- endmacro %} - -{# - Compares the column schema provided by a model's sql file to the column schema provided by a model's schema file. - If any differences in name, data_type or number of columns exist between the two schemas, raises a compiler error -#} -{% macro assert_columns_equivalent(sql) %} - - {#-- First ensure the user has defined 'columns' in yaml specification --#} - {%- set user_defined_columns = model['columns'] -%} - {%- if not user_defined_columns -%} - {{ exceptions.raise_contract_error([], []) }} - {%- endif -%} - - {#-- Obtain the column schema provided by sql file. #} - {%- set sql_file_provided_columns = get_column_schema_from_query(sql, config.get('sql_header', none)) -%} - {#--Obtain the column schema provided by the schema file by generating an 'empty schema' query from the model's columns. #} - {%- set schema_file_provided_columns = get_column_schema_from_query(get_empty_schema_sql(user_defined_columns)) -%} - - {#-- create dictionaries with name and formatted data type and strings for exception #} - {%- set sql_columns = format_columns(sql_file_provided_columns) -%} - {%- set yaml_columns = format_columns(schema_file_provided_columns) -%} - - {%- if sql_columns|length != yaml_columns|length -%} - {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%} - {%- endif -%} - - {%- for sql_col in sql_columns -%} - {%- set yaml_col = [] -%} - {%- for this_col in yaml_columns -%} - {%- if this_col['name'] == sql_col['name'] -%} - {%- do yaml_col.append(this_col) -%} - {%- break -%} - {%- endif -%} - {%- endfor -%} - {%- if not yaml_col -%} - {#-- Column with name not found in yaml #} - {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%} - {%- endif -%} - {%- if sql_col['formatted'] != yaml_col[0]['formatted'] -%} - {#-- Column data types don't match #} - {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%} - {%- endif -%} - {%- endfor -%} - -{% endmacro %} - -{% macro format_columns(columns) %} - {% set formatted_columns = [] %} - {% for column in columns %} - {%- set formatted_column = adapter.dispatch('format_column', 'dbt')(column) -%} - {%- do formatted_columns.append(formatted_column) -%} - {% endfor %} - {{ return(formatted_columns) }} -{% endmacro %} - -{% macro default__format_column(column) -%} - {% set data_type = column.dtype %} - {% set formatted = column.column.lower() ~ " " ~ data_type %} - {{ return({'name': column.name, 'data_type': data_type, 'formatted': formatted}) }} -{%- endmacro -%} diff --git a/core/dbt/include/global_project/macros/materializations/models/table/create_table_as.sql b/core/dbt/include/global_project/macros/materializations/models/table/create_table_as.sql deleted file mode 100644 index 8e15d85d9cd..00000000000 --- a/core/dbt/include/global_project/macros/materializations/models/table/create_table_as.sql +++ /dev/null @@ -1,60 +0,0 @@ -{% macro get_create_table_as_sql(temporary, relation, sql) -%} - {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }} -{%- endmacro %} - -{% macro default__get_create_table_as_sql(temporary, relation, sql) -%} - {{ return(create_table_as(temporary, relation, sql)) }} -{% endmacro %} - - -/* {# keep logic under old macro name for backwards compatibility #} */ -{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%} - {# backward compatibility for create_table_as that does not support language #} - {% if language == "sql" %} - {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}} - {% else %} - {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }} - {% endif %} - -{%- endmacro %} - -{% macro default__create_table_as(temporary, relation, sql) -%} - {%- set sql_header = config.get('sql_header', none) -%} - - {{ sql_header if sql_header is not none }} - - create {% if temporary: -%}temporary{%- endif %} table - {{ relation.include(database=(not temporary), schema=(not temporary)) }} - {% set contract_config = config.get('contract') %} - {% if contract_config.enforced %} - {{ get_assert_columns_equivalent(sql) }} - {{ get_table_columns_and_constraints() }} - {%- set sql = get_select_subquery(sql) %} - {% endif %} - as ( - {{ sql }} - ); -{%- endmacro %} - - -{% macro default__get_column_names() %} - {#- loop through user_provided_columns to get column names -#} - {%- set user_provided_columns = model['columns'] -%} - {%- for i in user_provided_columns %} - {%- set col = user_provided_columns[i] -%} - {%- set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] -%} - {{ col_name }}{{ ", " if not loop.last }} - {%- endfor -%} -{% endmacro %} - - -{% macro get_select_subquery(sql) %} - {{ return(adapter.dispatch('get_select_subquery', 'dbt')(sql)) }} -{% endmacro %} - -{% macro default__get_select_subquery(sql) %} - select {{ adapter.dispatch('get_column_names', 'dbt')() }} - from ( - {{ sql }} - ) as model_subq -{%- endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/models/table/table.sql b/core/dbt/include/global_project/macros/materializations/models/table/table.sql deleted file mode 100644 index 3d1122efab8..00000000000 --- a/core/dbt/include/global_project/macros/materializations/models/table/table.sql +++ /dev/null @@ -1,64 +0,0 @@ -{% materialization table, default %} - - {%- set existing_relation = load_cached_relation(this) -%} - {%- set target_relation = this.incorporate(type='table') %} - {%- set intermediate_relation = make_intermediate_relation(target_relation) -%} - -- the intermediate_relation should not already exist in the database; get_relation - -- will return None in that case. Otherwise, we get a relation that we can drop - -- later, before we try to use this name for the current operation - {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%} - /* - See ../view/view.sql for more information about this relation. - */ - {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%} - {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%} - -- as above, the backup_relation should not already exist - {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%} - -- grab current tables grants config for comparision later on - {% set grant_config = config.get('grants') %} - - -- drop the temp relations if they exist already in the database - {{ drop_relation_if_exists(preexisting_intermediate_relation) }} - {{ drop_relation_if_exists(preexisting_backup_relation) }} - - {{ run_hooks(pre_hooks, inside_transaction=False) }} - - -- `BEGIN` happens here: - {{ run_hooks(pre_hooks, inside_transaction=True) }} - - -- build model - {% call statement('main') -%} - {{ get_create_table_as_sql(False, intermediate_relation, sql) }} - {%- endcall %} - - -- cleanup - {% if existing_relation is not none %} - /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped - since the variable was first set. */ - {% set existing_relation = load_cached_relation(existing_relation) %} - {% if existing_relation is not none %} - {{ adapter.rename_relation(existing_relation, backup_relation) }} - {% endif %} - {% endif %} - - {{ adapter.rename_relation(intermediate_relation, target_relation) }} - - {% do create_indexes(target_relation) %} - - {{ run_hooks(post_hooks, inside_transaction=True) }} - - {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %} - {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} - - {% do persist_docs(target_relation, model) %} - - -- `COMMIT` happens here - {{ adapter.commit() }} - - -- finally, drop the existing/backup relation after the commit - {{ drop_relation_if_exists(backup_relation) }} - - {{ run_hooks(post_hooks, inside_transaction=False) }} - - {{ return({'relations': [target_relation]}) }} -{% endmaterialization %} diff --git a/core/dbt/include/global_project/macros/materializations/models/view/create_or_replace_view.sql b/core/dbt/include/global_project/macros/materializations/models/view/create_or_replace_view.sql deleted file mode 100644 index 2846a322e82..00000000000 --- a/core/dbt/include/global_project/macros/materializations/models/view/create_or_replace_view.sql +++ /dev/null @@ -1,44 +0,0 @@ -/* {# - Core materialization implementation. BigQuery and Snowflake are similar - because both can use `create or replace view` where the resulting view schema - is not necessarily the same as the existing view. On Redshift, this would - result in: ERROR: cannot change number of columns in view - - This implementation is superior to the create_temp, swap_with_existing, drop_old - paradigm because transactions don't run DDL queries atomically on Snowflake. By using - `create or replace view`, the materialization becomes atomic in nature. -#} */ - -{% macro create_or_replace_view() %} - {%- set identifier = model['alias'] -%} - - {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%} - {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%} - - {%- set target_relation = api.Relation.create( - identifier=identifier, schema=schema, database=database, - type='view') -%} - {% set grant_config = config.get('grants') %} - - {{ run_hooks(pre_hooks) }} - - -- If there's a table with the same name and we weren't told to full refresh, - -- that's an error. If we were told to full refresh, drop it. This behavior differs - -- for Snowflake and BigQuery, so multiple dispatch is used. - {%- if old_relation is not none and old_relation.is_table -%} - {{ handle_existing_table(should_full_refresh(), old_relation) }} - {%- endif -%} - - -- build model - {% call statement('main') -%} - {{ get_create_view_as_sql(target_relation, sql) }} - {%- endcall %} - - {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %} - {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} - - {{ run_hooks(post_hooks) }} - - {{ return({'relations': [target_relation]}) }} - -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/models/view/create_view_as.sql b/core/dbt/include/global_project/macros/materializations/models/view/create_view_as.sql deleted file mode 100644 index 41cd196c310..00000000000 --- a/core/dbt/include/global_project/macros/materializations/models/view/create_view_as.sql +++ /dev/null @@ -1,27 +0,0 @@ -{% macro get_create_view_as_sql(relation, sql) -%} - {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }} -{%- endmacro %} - -{% macro default__get_create_view_as_sql(relation, sql) -%} - {{ return(create_view_as(relation, sql)) }} -{% endmacro %} - - -/* {# keep logic under old name for backwards compatibility #} */ -{% macro create_view_as(relation, sql) -%} - {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }} -{%- endmacro %} - -{% macro default__create_view_as(relation, sql) -%} - {%- set sql_header = config.get('sql_header', none) -%} - - {{ sql_header if sql_header is not none }} - create view {{ relation }} - {% set contract_config = config.get('contract') %} - {% if contract_config.enforced %} - {{ get_assert_columns_equivalent(sql) }} - {%- endif %} - as ( - {{ sql }} - ); -{%- endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/models/view/helpers.sql b/core/dbt/include/global_project/macros/materializations/models/view/helpers.sql deleted file mode 100644 index 98f57018730..00000000000 --- a/core/dbt/include/global_project/macros/materializations/models/view/helpers.sql +++ /dev/null @@ -1,8 +0,0 @@ -{% macro handle_existing_table(full_refresh, old_relation) %} - {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }} -{% endmacro %} - -{% macro default__handle_existing_table(full_refresh, old_relation) %} - {{ log("Dropping relation " ~ old_relation ~ " because it is of type " ~ old_relation.type) }} - {{ adapter.drop_relation(old_relation) }} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/models/view/view.sql b/core/dbt/include/global_project/macros/materializations/models/view/view.sql deleted file mode 100644 index 59ac6c4b976..00000000000 --- a/core/dbt/include/global_project/macros/materializations/models/view/view.sql +++ /dev/null @@ -1,72 +0,0 @@ -{%- materialization view, default -%} - - {%- set existing_relation = load_cached_relation(this) -%} - {%- set target_relation = this.incorporate(type='view') -%} - {%- set intermediate_relation = make_intermediate_relation(target_relation) -%} - - -- the intermediate_relation should not already exist in the database; get_relation - -- will return None in that case. Otherwise, we get a relation that we can drop - -- later, before we try to use this name for the current operation - {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%} - /* - This relation (probably) doesn't exist yet. If it does exist, it's a leftover from - a previous run, and we're going to try to drop it immediately. At the end of this - materialization, we're going to rename the "existing_relation" to this identifier, - and then we're going to drop it. In order to make sure we run the correct one of: - - drop view ... - - drop table ... - - We need to set the type of this relation to be the type of the existing_relation, if it exists, - or else "view" as a sane default if it does not. Note that if the existing_relation does not - exist, then there is nothing to move out of the way and subsequentally drop. In that case, - this relation will be effectively unused. - */ - {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%} - {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%} - -- as above, the backup_relation should not already exist - {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%} - -- grab current tables grants config for comparision later on - {% set grant_config = config.get('grants') %} - - {{ run_hooks(pre_hooks, inside_transaction=False) }} - - -- drop the temp relations if they exist already in the database - {{ drop_relation_if_exists(preexisting_intermediate_relation) }} - {{ drop_relation_if_exists(preexisting_backup_relation) }} - - -- `BEGIN` happens here: - {{ run_hooks(pre_hooks, inside_transaction=True) }} - - -- build model - {% call statement('main') -%} - {{ get_create_view_as_sql(intermediate_relation, sql) }} - {%- endcall %} - - -- cleanup - -- move the existing view out of the way - {% if existing_relation is not none %} - /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped - since the variable was first set. */ - {% set existing_relation = load_cached_relation(existing_relation) %} - {% if existing_relation is not none %} - {{ adapter.rename_relation(existing_relation, backup_relation) }} - {% endif %} - {% endif %} - {{ adapter.rename_relation(intermediate_relation, target_relation) }} - - {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %} - {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} - - {% do persist_docs(target_relation, model) %} - - {{ run_hooks(post_hooks, inside_transaction=True) }} - - {{ adapter.commit() }} - - {{ drop_relation_if_exists(backup_relation) }} - - {{ run_hooks(post_hooks, inside_transaction=False) }} - - {{ return({'relations': [target_relation]}) }} - -{%- endmaterialization -%} diff --git a/core/dbt/include/global_project/macros/materializations/seeds/helpers.sql b/core/dbt/include/global_project/macros/materializations/seeds/helpers.sql deleted file mode 100644 index 44dbf370dcb..00000000000 --- a/core/dbt/include/global_project/macros/materializations/seeds/helpers.sql +++ /dev/null @@ -1,128 +0,0 @@ - -{% macro create_csv_table(model, agate_table) -%} - {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }} -{%- endmacro %} - -{% macro default__create_csv_table(model, agate_table) %} - {%- set column_override = model['config'].get('column_types', {}) -%} - {%- set quote_seed_column = model['config'].get('quote_columns', None) -%} - - {% set sql %} - create table {{ this.render() }} ( - {%- for col_name in agate_table.column_names -%} - {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%} - {%- set type = column_override.get(col_name, inferred_type) -%} - {%- set column_name = (col_name | string) -%} - {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%} - {%- endfor -%} - ) - {% endset %} - - {% call statement('_') -%} - {{ sql }} - {%- endcall %} - - {{ return(sql) }} -{% endmacro %} - - -{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%} - {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }} -{%- endmacro %} - -{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %} - {% set sql = "" %} - {% if full_refresh %} - {{ adapter.drop_relation(old_relation) }} - {% set sql = create_csv_table(model, agate_table) %} - {% else %} - {{ adapter.truncate_relation(old_relation) }} - {% set sql = "truncate table " ~ old_relation %} - {% endif %} - - {{ return(sql) }} -{% endmacro %} - - -{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %} - {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }} -{% endmacro %} - -{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %} - {{ create_or_truncate_sql }}; - -- dbt seed -- - {{ insert_sql }} -{% endmacro %} - - -{% macro get_binding_char() -%} - {{ adapter.dispatch('get_binding_char', 'dbt')() }} -{%- endmacro %} - -{% macro default__get_binding_char() %} - {{ return('%s') }} -{% endmacro %} - - -{% macro get_batch_size() -%} - {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }} -{%- endmacro %} - -{% macro default__get_batch_size() %} - {{ return(10000) }} -{% endmacro %} - - -{% macro get_seed_column_quoted_csv(model, column_names) %} - {%- set quote_seed_column = model['config'].get('quote_columns', None) -%} - {% set quoted = [] %} - {% for col in column_names -%} - {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%} - {%- endfor %} - - {%- set dest_cols_csv = quoted | join(', ') -%} - {{ return(dest_cols_csv) }} -{% endmacro %} - - -{% macro load_csv_rows(model, agate_table) -%} - {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }} -{%- endmacro %} - -{% macro default__load_csv_rows(model, agate_table) %} - - {% set batch_size = get_batch_size() %} - - {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %} - {% set bindings = [] %} - - {% set statements = [] %} - - {% for chunk in agate_table.rows | batch(batch_size) %} - {% set bindings = [] %} - - {% for row in chunk %} - {% do bindings.extend(row) %} - {% endfor %} - - {% set sql %} - insert into {{ this.render() }} ({{ cols_sql }}) values - {% for row in chunk -%} - ({%- for column in agate_table.column_names -%} - {{ get_binding_char() }} - {%- if not loop.last%},{%- endif %} - {%- endfor -%}) - {%- if not loop.last%},{%- endif %} - {%- endfor %} - {% endset %} - - {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %} - - {% if loop.index0 == 0 %} - {% do statements.append(sql) %} - {% endif %} - {% endfor %} - - {# Return SQL so we can render it out into the compiled files #} - {{ return(statements[0]) }} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/seeds/seed.sql b/core/dbt/include/global_project/macros/materializations/seeds/seed.sql deleted file mode 100644 index 3b66252da96..00000000000 --- a/core/dbt/include/global_project/macros/materializations/seeds/seed.sql +++ /dev/null @@ -1,60 +0,0 @@ -{% materialization seed, default %} - - {%- set identifier = model['alias'] -%} - {%- set full_refresh_mode = (should_full_refresh()) -%} - - {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%} - - {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%} - {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%} - - {%- set grant_config = config.get('grants') -%} - {%- set agate_table = load_agate_table() -%} - -- grab current tables grants config for comparison later on - - {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%} - - {{ run_hooks(pre_hooks, inside_transaction=False) }} - - -- `BEGIN` happens here: - {{ run_hooks(pre_hooks, inside_transaction=True) }} - - -- build model - {% set create_table_sql = "" %} - {% if exists_as_view %} - {{ exceptions.raise_compiler_error("Cannot seed to '{}', it is a view".format(old_relation)) }} - {% elif exists_as_table %} - {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %} - {% else %} - {% set create_table_sql = create_csv_table(model, agate_table) %} - {% endif %} - - {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %} - {% set rows_affected = (agate_table.rows | length) %} - {% set sql = load_csv_rows(model, agate_table) %} - - {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %} - {{ get_csv_sql(create_table_sql, sql) }}; - {% endcall %} - - {% set target_relation = this.incorporate(type='table') %} - - {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %} - {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} - - {% do persist_docs(target_relation, model) %} - - {% if full_refresh_mode or not exists_as_table %} - {% do create_indexes(target_relation) %} - {% endif %} - - {{ run_hooks(post_hooks, inside_transaction=True) }} - - -- `COMMIT` happens here - {{ adapter.commit() }} - - {{ run_hooks(post_hooks, inside_transaction=False) }} - - {{ return({'relations': [target_relation]}) }} - -{% endmaterialization %} diff --git a/core/dbt/include/global_project/macros/materializations/snapshots/helpers.sql b/core/dbt/include/global_project/macros/materializations/snapshots/helpers.sql deleted file mode 100644 index 7fd4bfd5186..00000000000 --- a/core/dbt/include/global_project/macros/materializations/snapshots/helpers.sql +++ /dev/null @@ -1,181 +0,0 @@ -{# - Add new columns to the table if applicable -#} -{% macro create_columns(relation, columns) %} - {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }} -{% endmacro %} - -{% macro default__create_columns(relation, columns) %} - {% for column in columns %} - {% call statement() %} - alter table {{ relation }} add column "{{ column.name }}" {{ column.data_type }}; - {% endcall %} - {% endfor %} -{% endmacro %} - - -{% macro post_snapshot(staging_relation) %} - {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }} -{% endmacro %} - -{% macro default__post_snapshot(staging_relation) %} - {# no-op #} -{% endmacro %} - -{% macro get_true_sql() %} - {{ adapter.dispatch('get_true_sql', 'dbt')() }} -{% endmacro %} - -{% macro default__get_true_sql() %} - {{ return('TRUE') }} -{% endmacro %} - -{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%} - {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }} -{% endmacro %} - -{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%} - - with snapshot_query as ( - - {{ source_sql }} - - ), - - snapshotted_data as ( - - select *, - {{ strategy.unique_key }} as dbt_unique_key - - from {{ target_relation }} - where dbt_valid_to is null - - ), - - insertions_source_data as ( - - select - *, - {{ strategy.unique_key }} as dbt_unique_key, - {{ strategy.updated_at }} as dbt_updated_at, - {{ strategy.updated_at }} as dbt_valid_from, - nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to, - {{ strategy.scd_id }} as dbt_scd_id - - from snapshot_query - ), - - updates_source_data as ( - - select - *, - {{ strategy.unique_key }} as dbt_unique_key, - {{ strategy.updated_at }} as dbt_updated_at, - {{ strategy.updated_at }} as dbt_valid_from, - {{ strategy.updated_at }} as dbt_valid_to - - from snapshot_query - ), - - {%- if strategy.invalidate_hard_deletes %} - - deletes_source_data as ( - - select - *, - {{ strategy.unique_key }} as dbt_unique_key - from snapshot_query - ), - {% endif %} - - insertions as ( - - select - 'insert' as dbt_change_type, - source_data.* - - from insertions_source_data as source_data - left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key - where snapshotted_data.dbt_unique_key is null - or ( - snapshotted_data.dbt_unique_key is not null - and ( - {{ strategy.row_changed }} - ) - ) - - ), - - updates as ( - - select - 'update' as dbt_change_type, - source_data.*, - snapshotted_data.dbt_scd_id - - from updates_source_data as source_data - join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key - where ( - {{ strategy.row_changed }} - ) - ) - - {%- if strategy.invalidate_hard_deletes -%} - , - - deletes as ( - - select - 'delete' as dbt_change_type, - source_data.*, - {{ snapshot_get_time() }} as dbt_valid_from, - {{ snapshot_get_time() }} as dbt_updated_at, - {{ snapshot_get_time() }} as dbt_valid_to, - snapshotted_data.dbt_scd_id - - from snapshotted_data - left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key - where source_data.dbt_unique_key is null - ) - {%- endif %} - - select * from insertions - union all - select * from updates - {%- if strategy.invalidate_hard_deletes %} - union all - select * from deletes - {%- endif %} - -{%- endmacro %} - - -{% macro build_snapshot_table(strategy, sql) -%} - {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }} -{% endmacro %} - -{% macro default__build_snapshot_table(strategy, sql) %} - - select *, - {{ strategy.scd_id }} as dbt_scd_id, - {{ strategy.updated_at }} as dbt_updated_at, - {{ strategy.updated_at }} as dbt_valid_from, - nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to - from ( - {{ sql }} - ) sbq - -{% endmacro %} - - -{% macro build_snapshot_staging_table(strategy, sql, target_relation) %} - {% set temp_relation = make_temp_relation(target_relation) %} - - {% set select = snapshot_staging_table(strategy, sql, target_relation) %} - - {% call statement('build_snapshot_staging_relation') %} - {{ create_table_as(True, temp_relation, select) }} - {% endcall %} - - {% do return(temp_relation) %} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/snapshots/snapshot.sql b/core/dbt/include/global_project/macros/materializations/snapshots/snapshot.sql deleted file mode 100644 index b0fe9222ab6..00000000000 --- a/core/dbt/include/global_project/macros/materializations/snapshots/snapshot.sql +++ /dev/null @@ -1,99 +0,0 @@ -{% materialization snapshot, default %} - {%- set config = model['config'] -%} - - {%- set target_table = model.get('alias', model.get('name')) -%} - - {%- set strategy_name = config.get('strategy') -%} - {%- set unique_key = config.get('unique_key') %} - -- grab current tables grants config for comparision later on - {%- set grant_config = config.get('grants') -%} - - {% set target_relation_exists, target_relation = get_or_create_relation( - database=model.database, - schema=model.schema, - identifier=target_table, - type='table') -%} - - {%- if not target_relation.is_table -%} - {% do exceptions.relation_wrong_type(target_relation, 'table') %} - {%- endif -%} - - - {{ run_hooks(pre_hooks, inside_transaction=False) }} - - {{ run_hooks(pre_hooks, inside_transaction=True) }} - - {% set strategy_macro = strategy_dispatch(strategy_name) %} - {% set strategy = strategy_macro(model, "snapshotted_data", "source_data", config, target_relation_exists) %} - - {% if not target_relation_exists %} - - {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %} - {% set final_sql = create_table_as(False, target_relation, build_sql) %} - - {% else %} - - {{ adapter.valid_snapshot_target(target_relation) }} - - {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %} - - -- this may no-op if the database does not require column expansion - {% do adapter.expand_target_column_types(from_relation=staging_table, - to_relation=target_relation) %} - - {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation) - | rejectattr('name', 'equalto', 'dbt_change_type') - | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE') - | rejectattr('name', 'equalto', 'dbt_unique_key') - | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY') - | list %} - - {% do create_columns(target_relation, missing_columns) %} - - {% set source_columns = adapter.get_columns_in_relation(staging_table) - | rejectattr('name', 'equalto', 'dbt_change_type') - | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE') - | rejectattr('name', 'equalto', 'dbt_unique_key') - | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY') - | list %} - - {% set quoted_source_columns = [] %} - {% for column in source_columns %} - {% do quoted_source_columns.append(adapter.quote(column.name)) %} - {% endfor %} - - {% set final_sql = snapshot_merge_sql( - target = target_relation, - source = staging_table, - insert_cols = quoted_source_columns - ) - %} - - {% endif %} - - {% call statement('main') %} - {{ final_sql }} - {% endcall %} - - {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %} - {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %} - - {% do persist_docs(target_relation, model) %} - - {% if not target_relation_exists %} - {% do create_indexes(target_relation) %} - {% endif %} - - {{ run_hooks(post_hooks, inside_transaction=True) }} - - {{ adapter.commit() }} - - {% if staging_table is defined %} - {% do post_snapshot(staging_table) %} - {% endif %} - - {{ run_hooks(post_hooks, inside_transaction=False) }} - - {{ return({'relations': [target_relation]}) }} - -{% endmaterialization %} diff --git a/core/dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql b/core/dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql deleted file mode 100644 index 6bc50fd3bf4..00000000000 --- a/core/dbt/include/global_project/macros/materializations/snapshots/snapshot_merge.sql +++ /dev/null @@ -1,25 +0,0 @@ - -{% macro snapshot_merge_sql(target, source, insert_cols) -%} - {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }} -{%- endmacro %} - - -{% macro default__snapshot_merge_sql(target, source, insert_cols) -%} - {%- set insert_cols_csv = insert_cols | join(', ') -%} - - merge into {{ target }} as DBT_INTERNAL_DEST - using {{ source }} as DBT_INTERNAL_SOURCE - on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id - - when matched - and DBT_INTERNAL_DEST.dbt_valid_to is null - and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete') - then update - set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to - - when not matched - and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert' - then insert ({{ insert_cols_csv }}) - values ({{ insert_cols_csv }}) - -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/snapshots/strategies.sql b/core/dbt/include/global_project/macros/materializations/snapshots/strategies.sql deleted file mode 100644 index d22cc33636f..00000000000 --- a/core/dbt/include/global_project/macros/materializations/snapshots/strategies.sql +++ /dev/null @@ -1,174 +0,0 @@ -{# - Dispatch strategies by name, optionally qualified to a package -#} -{% macro strategy_dispatch(name) -%} -{% set original_name = name %} - {% if '.' in name %} - {% set package_name, name = name.split(".", 1) %} - {% else %} - {% set package_name = none %} - {% endif %} - - {% if package_name is none %} - {% set package_context = context %} - {% elif package_name in context %} - {% set package_context = context[package_name] %} - {% else %} - {% set error_msg %} - Could not find package '{{package_name}}', called with '{{original_name}}' - {% endset %} - {{ exceptions.raise_compiler_error(error_msg | trim) }} - {% endif %} - - {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%} - - {% if search_name not in package_context %} - {% set error_msg %} - The specified strategy macro '{{name}}' was not found in package '{{ package_name }}' - {% endset %} - {{ exceptions.raise_compiler_error(error_msg | trim) }} - {% endif %} - {{ return(package_context[search_name]) }} -{%- endmacro %} - - -{# - Create SCD Hash SQL fields cross-db -#} -{% macro snapshot_hash_arguments(args) -%} - {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }} -{%- endmacro %} - -{% macro default__snapshot_hash_arguments(args) -%} - md5({%- for arg in args -%} - coalesce(cast({{ arg }} as varchar ), '') - {% if not loop.last %} || '|' || {% endif %} - {%- endfor -%}) -{%- endmacro %} - -{# - Core strategy definitions -#} -{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %} - {% set primary_key = config['unique_key'] %} - {% set updated_at = config['updated_at'] %} - {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %} - - {#/* - The snapshot relation might not have an {{ updated_at }} value if the - snapshot strategy is changed from `check` to `timestamp`. We - should use a dbt-created column for the comparison in the snapshot - table instead of assuming that the user-supplied {{ updated_at }} - will be present in the historical data. - - See https://github.com/dbt-labs/dbt-core/issues/2350 - */ #} - {% set row_changed_expr -%} - ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }}) - {%- endset %} - - {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %} - - {% do return({ - "unique_key": primary_key, - "updated_at": updated_at, - "row_changed": row_changed_expr, - "scd_id": scd_id_expr, - "invalidate_hard_deletes": invalidate_hard_deletes - }) %} -{% endmacro %} - - -{% macro snapshot_string_as_time(timestamp) -%} - {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }} -{%- endmacro %} - -{% macro default__snapshot_string_as_time(timestamp) %} - {% do exceptions.raise_not_implemented( - 'snapshot_string_as_time macro not implemented for adapter '+adapter.type() - ) %} -{% endmacro %} - - -{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%} - {%- if not target_exists -%} - {#-- no table yet -> return whatever the query does --#} - {{ return((false, query_columns)) }} - {%- endif -%} - - {#-- handle any schema changes --#} - {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%} - - {% if check_cols_config == 'all' %} - {%- set query_columns = get_columns_in_query(node['compiled_code']) -%} - - {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %} - {#-- query for proper casing/quoting, to support comparison below --#} - {%- set select_check_cols_from_target -%} - {#-- N.B. The whitespace below is necessary to avoid edge case issue with comments --#} - {#-- See: https://github.com/dbt-labs/dbt-core/issues/6781 --#} - select {{ check_cols_config | join(', ') }} from ( - {{ node['compiled_code'] }} - ) subq - {%- endset -%} - {% set query_columns = get_columns_in_query(select_check_cols_from_target) %} - - {% else %} - {% do exceptions.raise_compiler_error("Invalid value for 'check_cols': " ~ check_cols_config) %} - {% endif %} - - {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%} - {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#} - {%- set ns.column_added = false -%} - - {%- set intersection = [] -%} - {%- for col in query_columns -%} - {%- if col in existing_cols -%} - {%- do intersection.append(adapter.quote(col)) -%} - {%- else -%} - {% set ns.column_added = true %} - {%- endif -%} - {%- endfor -%} - {{ return((ns.column_added, intersection)) }} -{%- endmacro %} - - -{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %} - {% set check_cols_config = config['check_cols'] %} - {% set primary_key = config['unique_key'] %} - {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %} - {% set updated_at = config.get('updated_at', snapshot_get_time()) %} - - {% set column_added = false %} - - {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %} - - {%- set row_changed_expr -%} - ( - {%- if column_added -%} - {{ get_true_sql() }} - {%- else -%} - {%- for col in check_cols -%} - {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }} - or - ( - (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null)) - or - ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null)) - ) - {%- if not loop.last %} or {% endif -%} - {%- endfor -%} - {%- endif -%} - ) - {%- endset %} - - {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %} - - {% do return({ - "unique_key": primary_key, - "updated_at": updated_at, - "row_changed": row_changed_expr, - "scd_id": scd_id_expr, - "invalidate_hard_deletes": invalidate_hard_deletes - }) %} -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/tests/helpers.sql b/core/dbt/include/global_project/macros/materializations/tests/helpers.sql deleted file mode 100644 index efc55288076..00000000000 --- a/core/dbt/include/global_project/macros/materializations/tests/helpers.sql +++ /dev/null @@ -1,14 +0,0 @@ -{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%} - {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }} -{%- endmacro %} - -{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%} - select - {{ fail_calc }} as failures, - {{ fail_calc }} {{ warn_if }} as should_warn, - {{ fail_calc }} {{ error_if }} as should_error - from ( - {{ main_sql }} - {{ "limit " ~ limit if limit != none }} - ) dbt_internal_test -{%- endmacro %} diff --git a/core/dbt/include/global_project/macros/materializations/tests/test.sql b/core/dbt/include/global_project/macros/materializations/tests/test.sql deleted file mode 100644 index fb6755058fd..00000000000 --- a/core/dbt/include/global_project/macros/materializations/tests/test.sql +++ /dev/null @@ -1,48 +0,0 @@ -{%- materialization test, default -%} - - {% set relations = [] %} - - {% if should_store_failures() %} - - {% set identifier = model['alias'] %} - {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %} - {% set target_relation = api.Relation.create( - identifier=identifier, schema=schema, database=database, type='table') -%} %} - - {% if old_relation %} - {% do adapter.drop_relation(old_relation) %} - {% endif %} - - {% call statement(auto_begin=True) %} - {{ create_table_as(False, target_relation, sql) }} - {% endcall %} - - {% do relations.append(target_relation) %} - - {% set main_sql %} - select * - from {{ target_relation }} - {% endset %} - - {{ adapter.commit() }} - - {% else %} - - {% set main_sql = sql %} - - {% endif %} - - {% set limit = config.get('limit') %} - {% set fail_calc = config.get('fail_calc') %} - {% set warn_if = config.get('warn_if') %} - {% set error_if = config.get('error_if') %} - - {% call statement('main', fetch_result=True) -%} - - {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}} - - {%- endcall %} - - {{ return({'relations': relations}) }} - -{%- endmaterialization -%} diff --git a/core/dbt/include/global_project/macros/materializations/tests/where_subquery.sql b/core/dbt/include/global_project/macros/materializations/tests/where_subquery.sql deleted file mode 100644 index 332c537a3c9..00000000000 --- a/core/dbt/include/global_project/macros/materializations/tests/where_subquery.sql +++ /dev/null @@ -1,15 +0,0 @@ -{% macro get_where_subquery(relation) -%} - {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %} -{%- endmacro %} - -{% macro default__get_where_subquery(relation) -%} - {% set where = config.get('where', '') %} - {% if where %} - {%- set filtered -%} - (select * from {{ relation }} where {{ where }}) dbt_subquery - {%- endset -%} - {% do return(filtered) %} - {%- else -%} - {% do return(relation) %} - {%- endif -%} -{%- endmacro %} diff --git a/core/dbt/include/global_project/macros/python_model/python.sql b/core/dbt/include/global_project/macros/python_model/python.sql deleted file mode 100644 index d658ff185b2..00000000000 --- a/core/dbt/include/global_project/macros/python_model/python.sql +++ /dev/null @@ -1,103 +0,0 @@ -{% macro resolve_model_name(input_model_name) %} - {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }} -{% endmacro %} - -{%- macro default__resolve_model_name(input_model_name) -%} - {{ input_model_name | string | replace('"', '\"') }} -{%- endmacro -%} - -{% macro build_ref_function(model) %} - - {%- set ref_dict = {} -%} - {%- for _ref in model.refs -%} - {% set _ref_args = [_ref.get('package'), _ref['name']] if _ref.get('package') else [_ref['name'],] %} - {%- set resolved = ref(*_ref_args, v=_ref.get('version')) -%} - {%- if _ref.get('version') -%} - {% do _ref_args.extend(["v" ~ _ref['version']]) %} - {%- endif -%} - {%- do ref_dict.update({_ref_args | join('.'): resolve_model_name(resolved)}) -%} - {%- endfor -%} - -def ref(*args, **kwargs): - refs = {{ ref_dict | tojson }} - key = '.'.join(args) - version = kwargs.get("v") or kwargs.get("version") - if version: - key += f".v{version}" - dbt_load_df_function = kwargs.get("dbt_load_df_function") - return dbt_load_df_function(refs[key]) - -{% endmacro %} - -{% macro build_source_function(model) %} - - {%- set source_dict = {} -%} - {%- for _source in model.sources -%} - {%- set resolved = source(*_source) -%} - {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%} - {%- endfor -%} - -def source(*args, dbt_load_df_function): - sources = {{ source_dict | tojson }} - key = '.'.join(args) - return dbt_load_df_function(sources[key]) - -{% endmacro %} - -{% macro build_config_dict(model) %} - {%- set config_dict = {} -%} - {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %} - {%- for key, default in config_dbt_used -%} - {# weird type testing with enum, would be much easier to write this logic in Python! #} - {%- if key == "language" -%} - {%- set value = "python" -%} - {%- endif -%} - {%- set value = model.config.get(key, default) -%} - {%- do config_dict.update({key: value}) -%} - {%- endfor -%} -config_dict = {{ config_dict }} -{% endmacro %} - -{% macro py_script_postfix(model) %} -# This part is user provided model code -# you will need to copy the next section to run the code -# COMMAND ---------- -# this part is dbt logic for get ref work, do not modify - -{{ build_ref_function(model ) }} -{{ build_source_function(model ) }} -{{ build_config_dict(model) }} - -class config: - def __init__(self, *args, **kwargs): - pass - - @staticmethod - def get(key, default=None): - return config_dict.get(key, default) - -class this: - """dbt.this() or dbt.this.identifier""" - database = "{{ this.database }}" - schema = "{{ this.schema }}" - identifier = "{{ this.identifier }}" - {% set this_relation_name = resolve_model_name(this) %} - def __repr__(self): - return '{{ this_relation_name }}' - - -class dbtObj: - def __init__(self, load_df_function) -> None: - self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function) - self.ref = lambda *args, **kwargs: ref(*args, **kwargs, dbt_load_df_function=load_df_function) - self.config = config - self.this = this() - self.is_incremental = {{ is_incremental() }} - -# COMMAND ---------- -{{py_script_comment()}} -{% endmacro %} - -{#-- entry point for add instuctions for running compiled_code --#} -{%macro py_script_comment()%} -{%endmacro%} diff --git a/core/dbt/include/global_project/macros/utils/any_value.sql b/core/dbt/include/global_project/macros/utils/any_value.sql deleted file mode 100644 index a47292524ae..00000000000 --- a/core/dbt/include/global_project/macros/utils/any_value.sql +++ /dev/null @@ -1,9 +0,0 @@ -{% macro any_value(expression) -%} - {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }} -{% endmacro %} - -{% macro default__any_value(expression) -%} - - any_value({{ expression }}) - -{%- endmacro %} diff --git a/core/dbt/include/global_project/macros/utils/array_append.sql b/core/dbt/include/global_project/macros/utils/array_append.sql deleted file mode 100644 index a7d3959b84f..00000000000 --- a/core/dbt/include/global_project/macros/utils/array_append.sql +++ /dev/null @@ -1,8 +0,0 @@ -{% macro array_append(array, new_element) -%} - {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }} -{%- endmacro %} - -{# new_element must be the same data type as elements in array to match postgres functionality #} -{% macro default__array_append(array, new_element) -%} - array_append({{ array }}, {{ new_element }}) -{%- endmacro %} diff --git a/core/dbt/include/global_project/macros/utils/array_concat.sql b/core/dbt/include/global_project/macros/utils/array_concat.sql deleted file mode 100644 index b27ba8d7f3a..00000000000 --- a/core/dbt/include/global_project/macros/utils/array_concat.sql +++ /dev/null @@ -1,7 +0,0 @@ -{% macro array_concat(array_1, array_2) -%} - {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }} -{%- endmacro %} - -{% macro default__array_concat(array_1, array_2) -%} - array_cat({{ array_1 }}, {{ array_2 }}) -{%- endmacro %} diff --git a/core/dbt/include/global_project/macros/utils/array_construct.sql b/core/dbt/include/global_project/macros/utils/array_construct.sql deleted file mode 100644 index 2e503a37365..00000000000 --- a/core/dbt/include/global_project/macros/utils/array_construct.sql +++ /dev/null @@ -1,12 +0,0 @@ -{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%} - {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }} -{%- endmacro %} - -{# all inputs must be the same data type to match postgres functionality #} -{% macro default__array_construct(inputs, data_type) -%} - {% if inputs|length > 0 %} - array[ {{ inputs|join(' , ') }} ] - {% else %} - array[]::{{data_type}}[] - {% endif %} -{%- endmacro %} diff --git a/core/dbt/include/global_project/macros/utils/bool_or.sql b/core/dbt/include/global_project/macros/utils/bool_or.sql deleted file mode 100644 index 34e59d999ed..00000000000 --- a/core/dbt/include/global_project/macros/utils/bool_or.sql +++ /dev/null @@ -1,9 +0,0 @@ -{% macro bool_or(expression) -%} - {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }} -{% endmacro %} - -{% macro default__bool_or(expression) -%} - - bool_or({{ expression }}) - -{%- endmacro %} diff --git a/core/dbt/include/global_project/macros/utils/cast_bool_to_text.sql b/core/dbt/include/global_project/macros/utils/cast_bool_to_text.sql deleted file mode 100644 index 5f5c0331623..00000000000 --- a/core/dbt/include/global_project/macros/utils/cast_bool_to_text.sql +++ /dev/null @@ -1,7 +0,0 @@ -{% macro cast_bool_to_text(field) %} - {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }} -{% endmacro %} - -{% macro default__cast_bool_to_text(field) %} - cast({{ field }} as {{ api.Column.translate_type('string') }}) -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/utils/concat.sql b/core/dbt/include/global_project/macros/utils/concat.sql deleted file mode 100644 index 27bf3c9b4e9..00000000000 --- a/core/dbt/include/global_project/macros/utils/concat.sql +++ /dev/null @@ -1,7 +0,0 @@ -{% macro concat(fields) -%} - {{ return(adapter.dispatch('concat', 'dbt')(fields)) }} -{%- endmacro %} - -{% macro default__concat(fields) -%} - {{ fields|join(' || ') }} -{%- endmacro %} diff --git a/core/dbt/include/global_project/macros/utils/data_types.sql b/core/dbt/include/global_project/macros/utils/data_types.sql deleted file mode 100644 index c4508ff3066..00000000000 --- a/core/dbt/include/global_project/macros/utils/data_types.sql +++ /dev/null @@ -1,129 +0,0 @@ -{# string ------------------------------------------------- #} - -{%- macro type_string() -%} - {{ return(adapter.dispatch('type_string', 'dbt')()) }} -{%- endmacro -%} - -{% macro default__type_string() %} - {{ return(api.Column.translate_type("string")) }} -{% endmacro %} - --- This will return 'text' by default --- On Postgres + Snowflake, that's equivalent to varchar (no size) --- Redshift will treat that as varchar(256) - - -{# timestamp ------------------------------------------------- #} - -{%- macro type_timestamp() -%} - {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }} -{%- endmacro -%} - -{% macro default__type_timestamp() %} - {{ return(api.Column.translate_type("timestamp")) }} -{% endmacro %} - -/* -POSTGRES -https://www.postgresql.org/docs/current/datatype-datetime.html: -The SQL standard requires that writing just `timestamp` -be equivalent to `timestamp without time zone`, and -PostgreSQL honors that behavior. -`timestamptz` is accepted as an abbreviation for `timestamp with time zone`; -this is a PostgreSQL extension. - -SNOWFLAKE -https://docs.snowflake.com/en/sql-reference/data-types-datetime.html#timestamp -The TIMESTAMP_* variation associated with TIMESTAMP is specified by the -TIMESTAMP_TYPE_MAPPING session parameter. The default is TIMESTAMP_NTZ. - -BIGQUERY -TIMESTAMP means 'timestamp with time zone' -DATETIME means 'timestamp without time zone' -TODO: shouldn't this return DATETIME instead of TIMESTAMP, for consistency with other databases? -e.g. dateadd returns a DATETIME - -/* Snowflake: -https://docs.snowflake.com/en/sql-reference/data-types-datetime.html#timestamp -The TIMESTAMP_* variation associated with TIMESTAMP is specified by the TIMESTAMP_TYPE_MAPPING session parameter. The default is TIMESTAMP_NTZ. -*/ - - -{# float ------------------------------------------------- #} - -{%- macro type_float() -%} - {{ return(adapter.dispatch('type_float', 'dbt')()) }} -{%- endmacro -%} - -{% macro default__type_float() %} - {{ return(api.Column.translate_type("float")) }} -{% endmacro %} - -{# numeric ------------------------------------------------- #} - -{%- macro type_numeric() -%} - {{ return(adapter.dispatch('type_numeric', 'dbt')()) }} -{%- endmacro -%} - -/* -This one can't be just translate_type, since precision/scale make it a bit more complicated. - -On most databases, the default (precision, scale) is something like: - Redshift: (18, 0) - Snowflake: (38, 0) - Postgres: (<=131072, 0) - -https://www.postgresql.org/docs/current/datatype-numeric.html: -Specifying NUMERIC without any precision or scale creates an “unconstrained numeric” -column in which numeric values of any length can be stored, up to the implementation limits. -A column of this kind will not coerce input values to any particular scale, -whereas numeric columns with a declared scale will coerce input values to that scale. -(The SQL standard requires a default scale of 0, i.e., coercion to integer precision. -We find this a bit useless. If you're concerned about portability, always specify -the precision and scale explicitly.) -*/ - -{% macro default__type_numeric() %} - {{ return(api.Column.numeric_type("numeric", 28, 6)) }} -{% endmacro %} - - -{# bigint ------------------------------------------------- #} - -{%- macro type_bigint() -%} - {{ return(adapter.dispatch('type_bigint', 'dbt')()) }} -{%- endmacro -%} - --- We don't have a conversion type for 'bigint' in TYPE_LABELS, --- so this actually just returns the string 'bigint' - -{% macro default__type_bigint() %} - {{ return(api.Column.translate_type("bigint")) }} -{% endmacro %} - --- Good news: BigQuery now supports 'bigint' (and 'int') as an alias for 'int64' - -{# int ------------------------------------------------- #} - -{%- macro type_int() -%} - {{ return(adapter.dispatch('type_int', 'dbt')()) }} -{%- endmacro -%} - -{%- macro default__type_int() -%} - {{ return(api.Column.translate_type("integer")) }} -{%- endmacro -%} - --- returns 'int' everywhere, except BigQuery, where it returns 'int64' --- (but BigQuery also now accepts 'int' as a valid alias for 'int64') - -{# bool ------------------------------------------------- #} - -{%- macro type_boolean() -%} - {{ return(adapter.dispatch('type_boolean', 'dbt')()) }} -{%- endmacro -%} - -{%- macro default__type_boolean() -%} - {{ return(api.Column.translate_type("boolean")) }} -{%- endmacro -%} - --- returns 'boolean' everywhere. BigQuery accepts 'boolean' as a valid alias for 'bool' diff --git a/core/dbt/include/global_project/macros/utils/date_trunc.sql b/core/dbt/include/global_project/macros/utils/date_trunc.sql deleted file mode 100644 index deadc40864a..00000000000 --- a/core/dbt/include/global_project/macros/utils/date_trunc.sql +++ /dev/null @@ -1,7 +0,0 @@ -{% macro date_trunc(datepart, date) -%} - {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }} -{%- endmacro %} - -{% macro default__date_trunc(datepart, date) -%} - date_trunc('{{datepart}}', {{date}}) -{%- endmacro %} diff --git a/core/dbt/include/global_project/macros/utils/dateadd.sql b/core/dbt/include/global_project/macros/utils/dateadd.sql deleted file mode 100644 index 2e246098fe5..00000000000 --- a/core/dbt/include/global_project/macros/utils/dateadd.sql +++ /dev/null @@ -1,14 +0,0 @@ -{% macro dateadd(datepart, interval, from_date_or_timestamp) %} - {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }} -{% endmacro %} - - -{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %} - - dateadd( - {{ datepart }}, - {{ interval }}, - {{ from_date_or_timestamp }} - ) - -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/utils/datediff.sql b/core/dbt/include/global_project/macros/utils/datediff.sql deleted file mode 100644 index 7d70d331d86..00000000000 --- a/core/dbt/include/global_project/macros/utils/datediff.sql +++ /dev/null @@ -1,14 +0,0 @@ -{% macro datediff(first_date, second_date, datepart) %} - {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }} -{% endmacro %} - - -{% macro default__datediff(first_date, second_date, datepart) -%} - - datediff( - {{ datepart }}, - {{ first_date }}, - {{ second_date }} - ) - -{%- endmacro %} diff --git a/core/dbt/include/global_project/macros/utils/escape_single_quotes.sql b/core/dbt/include/global_project/macros/utils/escape_single_quotes.sql deleted file mode 100644 index d86b6131c29..00000000000 --- a/core/dbt/include/global_project/macros/utils/escape_single_quotes.sql +++ /dev/null @@ -1,8 +0,0 @@ -{% macro escape_single_quotes(expression) %} - {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }} -{% endmacro %} - -{# /*Default to replacing a single apostrophe with two apostrophes: they're -> they''re*/ #} -{% macro default__escape_single_quotes(expression) -%} -{{ expression | replace("'","''") }} -{%- endmacro %} diff --git a/core/dbt/include/global_project/macros/utils/except.sql b/core/dbt/include/global_project/macros/utils/except.sql deleted file mode 100644 index 91d54013d80..00000000000 --- a/core/dbt/include/global_project/macros/utils/except.sql +++ /dev/null @@ -1,9 +0,0 @@ -{% macro except() %} - {{ return(adapter.dispatch('except', 'dbt')()) }} -{% endmacro %} - -{% macro default__except() %} - - except - -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/utils/hash.sql b/core/dbt/include/global_project/macros/utils/hash.sql deleted file mode 100644 index efa12db7ce6..00000000000 --- a/core/dbt/include/global_project/macros/utils/hash.sql +++ /dev/null @@ -1,7 +0,0 @@ -{% macro hash(field) -%} - {{ return(adapter.dispatch('hash', 'dbt') (field)) }} -{%- endmacro %} - -{% macro default__hash(field) -%} - md5(cast({{ field }} as {{ api.Column.translate_type('string') }})) -{%- endmacro %} diff --git a/core/dbt/include/global_project/macros/utils/intersect.sql b/core/dbt/include/global_project/macros/utils/intersect.sql deleted file mode 100644 index 6e8ede00bc2..00000000000 --- a/core/dbt/include/global_project/macros/utils/intersect.sql +++ /dev/null @@ -1,9 +0,0 @@ -{% macro intersect() %} - {{ return(adapter.dispatch('intersect', 'dbt')()) }} -{% endmacro %} - -{% macro default__intersect() %} - - intersect - -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/utils/last_day.sql b/core/dbt/include/global_project/macros/utils/last_day.sql deleted file mode 100644 index 6a1aa99c045..00000000000 --- a/core/dbt/include/global_project/macros/utils/last_day.sql +++ /dev/null @@ -1,15 +0,0 @@ -{% macro last_day(date, datepart) %} - {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }} -{% endmacro %} - -{%- macro default_last_day(date, datepart) -%} - cast( - {{dbt.dateadd('day', '-1', - dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date)) - )}} - as date) -{%- endmacro -%} - -{% macro default__last_day(date, datepart) -%} - {{dbt.default_last_day(date, datepart)}} -{%- endmacro %} diff --git a/core/dbt/include/global_project/macros/utils/length.sql b/core/dbt/include/global_project/macros/utils/length.sql deleted file mode 100644 index 1b2fd55e725..00000000000 --- a/core/dbt/include/global_project/macros/utils/length.sql +++ /dev/null @@ -1,11 +0,0 @@ -{% macro length(expression) -%} - {{ return(adapter.dispatch('length', 'dbt') (expression)) }} -{% endmacro %} - -{% macro default__length(expression) %} - - length( - {{ expression }} - ) - -{%- endmacro -%} diff --git a/core/dbt/include/global_project/macros/utils/listagg.sql b/core/dbt/include/global_project/macros/utils/listagg.sql deleted file mode 100644 index f785ca1e682..00000000000 --- a/core/dbt/include/global_project/macros/utils/listagg.sql +++ /dev/null @@ -1,30 +0,0 @@ -{% macro listagg(measure, delimiter_text="','", order_by_clause=none, limit_num=none) -%} - {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }} -{%- endmacro %} - -{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%} - - {% if limit_num -%} - array_to_string( - array_slice( - array_agg( - {{ measure }} - ){% if order_by_clause -%} - within group ({{ order_by_clause }}) - {%- endif %} - ,0 - ,{{ limit_num }} - ), - {{ delimiter_text }} - ) - {%- else %} - listagg( - {{ measure }}, - {{ delimiter_text }} - ) - {% if order_by_clause -%} - within group ({{ order_by_clause }}) - {%- endif %} - {%- endif %} - -{%- endmacro %} diff --git a/core/dbt/include/global_project/macros/utils/literal.sql b/core/dbt/include/global_project/macros/utils/literal.sql deleted file mode 100644 index ccb0b823513..00000000000 --- a/core/dbt/include/global_project/macros/utils/literal.sql +++ /dev/null @@ -1,7 +0,0 @@ -{%- macro string_literal(value) -%} - {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }} -{%- endmacro -%} - -{% macro default__string_literal(value) -%} - '{{ value }}' -{%- endmacro %} diff --git a/core/dbt/include/global_project/macros/utils/position.sql b/core/dbt/include/global_project/macros/utils/position.sql deleted file mode 100644 index dde3ee2bee0..00000000000 --- a/core/dbt/include/global_project/macros/utils/position.sql +++ /dev/null @@ -1,11 +0,0 @@ -{% macro position(substring_text, string_text) -%} - {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }} -{% endmacro %} - -{% macro default__position(substring_text, string_text) %} - - position( - {{ substring_text }} in {{ string_text }} - ) - -{%- endmacro -%} diff --git a/core/dbt/include/global_project/macros/utils/replace.sql b/core/dbt/include/global_project/macros/utils/replace.sql deleted file mode 100644 index 478809f2c54..00000000000 --- a/core/dbt/include/global_project/macros/utils/replace.sql +++ /dev/null @@ -1,14 +0,0 @@ -{% macro replace(field, old_chars, new_chars) -%} - {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }} -{% endmacro %} - -{% macro default__replace(field, old_chars, new_chars) %} - - replace( - {{ field }}, - {{ old_chars }}, - {{ new_chars }} - ) - - -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/utils/right.sql b/core/dbt/include/global_project/macros/utils/right.sql deleted file mode 100644 index 5782a2539df..00000000000 --- a/core/dbt/include/global_project/macros/utils/right.sql +++ /dev/null @@ -1,12 +0,0 @@ -{% macro right(string_text, length_expression) -%} - {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }} -{% endmacro %} - -{% macro default__right(string_text, length_expression) %} - - right( - {{ string_text }}, - {{ length_expression }} - ) - -{%- endmacro -%} diff --git a/core/dbt/include/global_project/macros/utils/safe_cast.sql b/core/dbt/include/global_project/macros/utils/safe_cast.sql deleted file mode 100644 index 53ee7fd2e97..00000000000 --- a/core/dbt/include/global_project/macros/utils/safe_cast.sql +++ /dev/null @@ -1,9 +0,0 @@ -{% macro safe_cast(field, type) %} - {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }} -{% endmacro %} - -{% macro default__safe_cast(field, type) %} - {# most databases don't support this function yet - so we just need to use cast #} - cast({{field}} as {{type}}) -{% endmacro %} diff --git a/core/dbt/include/global_project/macros/utils/split_part.sql b/core/dbt/include/global_project/macros/utils/split_part.sql deleted file mode 100644 index 766539ac0c1..00000000000 --- a/core/dbt/include/global_project/macros/utils/split_part.sql +++ /dev/null @@ -1,26 +0,0 @@ -{% macro split_part(string_text, delimiter_text, part_number) %} - {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }} -{% endmacro %} - -{% macro default__split_part(string_text, delimiter_text, part_number) %} - - split_part( - {{ string_text }}, - {{ delimiter_text }}, - {{ part_number }} - ) - -{% endmacro %} - -{% macro _split_part_negative(string_text, delimiter_text, part_number) %} - - split_part( - {{ string_text }}, - {{ delimiter_text }}, - length({{ string_text }}) - - length( - replace({{ string_text }}, {{ delimiter_text }}, '') - ) + 2 + {{ part_number }} - ) - -{% endmacro %} diff --git a/core/dbt/include/global_project/tests/generic/builtin.sql b/core/dbt/include/global_project/tests/generic/builtin.sql deleted file mode 100644 index 23a7507fa0a..00000000000 --- a/core/dbt/include/global_project/tests/generic/builtin.sql +++ /dev/null @@ -1,30 +0,0 @@ -/* {# - Generic tests can be defined in `macros/` or in `tests/generic`. - These four tests are built into the dbt-core global project. - To support extensibility to other adapters and SQL dialects, - they call 'dispatched' macros. By default, they will use - the SQL defined in `global_project/macros/generic_test_sql` -#} */ - -{% test unique(model, column_name) %} - {% set macro = adapter.dispatch('test_unique', 'dbt') %} - {{ macro(model, column_name) }} -{% endtest %} - - -{% test not_null(model, column_name) %} - {% set macro = adapter.dispatch('test_not_null', 'dbt') %} - {{ macro(model, column_name) }} -{% endtest %} - - -{% test accepted_values(model, column_name, values, quote=True) %} - {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %} - {{ macro(model, column_name, values, quote) }} -{% endtest %} - - -{% test relationships(model, column_name, to, field) %} - {% set macro = adapter.dispatch('test_relationships', 'dbt') %} - {{ macro(model, column_name, to, field) }} -{% endtest %} diff --git a/core/dbt/include/index.html b/core/dbt/include/index.html deleted file mode 100644 index ac81278c348..00000000000 --- a/core/dbt/include/index.html +++ /dev/null @@ -1,102 +0,0 @@ -<!DOCTYPE html> -<html dir="ltr" lang="en-US" ng-app='dbt' class='no-flash video supports no-touchevents formvalidation webgl no-cssgridlegacy cssgrid cssfilters objectfit object-fit click landscape videoautoplay loaded'> - <head> - <meta charset="utf-8"> - <meta name="viewport" content="width=device-width, minimum-scale=1, maximum-scale=1, initial-scale=1" /> - - <title>dbt Docs - - - - - - - - - - - - - - - - - -
icons
-
- - diff --git a/core/dbt/include/starter_project/dbt_project.yml b/core/dbt/include/starter_project/dbt_project.yml index 630001eed2f..c7e1fcdb0ef 100644 --- a/core/dbt/include/starter_project/dbt_project.yml +++ b/core/dbt/include/starter_project/dbt_project.yml @@ -4,7 +4,6 @@ # name or the intended use of these models name: '{project_name}' version: '1.0.0' -config-version: 2 # This setting configures which "profile" dbt uses for this project. profile: '{profile_name}' diff --git a/core/dbt/include/starter_project/models/example/schema.yml b/core/dbt/include/starter_project/models/example/schema.yml index 2a53081715a..9730b7071b5 100644 --- a/core/dbt/include/starter_project/models/example/schema.yml +++ b/core/dbt/include/starter_project/models/example/schema.yml @@ -7,7 +7,7 @@ models: columns: - name: id description: "The primary key for this table" - tests: + data_tests: - unique - not_null @@ -16,6 +16,6 @@ models: columns: - name: id description: "The primary key for this table" - tests: + data_tests: - unique - not_null diff --git a/core/dbt/internal_deprecations.py b/core/dbt/internal_deprecations.py index fbc435026b6..22d389586f2 100644 --- a/core/dbt/internal_deprecations.py +++ b/core/dbt/internal_deprecations.py @@ -1,8 +1,8 @@ import functools from typing import Optional -from dbt.events.functions import warn_or_error from dbt.events.types import InternalDeprecation +from dbt_common.events.functions import warn_or_error def deprecated(suggested_action: str, version: str, reason: Optional[str]): diff --git a/core/dbt/logger.py b/core/dbt/logger.py deleted file mode 100644 index d4095fb73bf..00000000000 --- a/core/dbt/logger.py +++ /dev/null @@ -1,536 +0,0 @@ -import dbt.flags -import dbt.ui - -import json -import logging -import os -import sys -import time -import warnings -from dataclasses import dataclass -from datetime import datetime -from typing import Optional, List, ContextManager, Callable, Dict, Any, Set - -import colorama -import logbook -from dbt.constants import SECRET_ENV_PREFIX -from dbt.dataclass_schema import dbtClassMixin - -# Colorama is needed for colored logs on Windows because we're using logger.info -# intead of print(). If the Windows env doesn't have a TERM var set or it is set to None -# (i.e. in the case of Git Bash on Windows- this emulates Unix), then it's safe to initialize -# Colorama with wrapping turned on which allows us to strip ANSI sequences from stdout. -# You can safely initialize Colorama for any OS and the coloring stays the same except -# when piped to anoter process for Linux and MacOS, then it loses the coloring. To combat -# that, we will just initialize Colorama when needed on Windows using a non-Unix terminal. - -if sys.platform == "win32" and (not os.getenv("TERM") or os.getenv("TERM") == "None"): - colorama.init(wrap=True) - -STDOUT_LOG_FORMAT = "{record.message}" -DEBUG_LOG_FORMAT = "{record.time:%Y-%m-%d %H:%M:%S.%f%z} ({record.thread_name}): {record.message}" - - -def get_secret_env() -> List[str]: - return [v for k, v in os.environ.items() if k.startswith(SECRET_ENV_PREFIX)] - - -ExceptionInformation = str - - -@dataclass -class LogMessage(dbtClassMixin): - timestamp: datetime - message: str - channel: str - level: int - levelname: str - thread_name: str - process: int - extra: Optional[Dict[str, Any]] = None - exc_info: Optional[ExceptionInformation] = None - - @classmethod - def from_record_formatted(cls, record: logbook.LogRecord, message: str): - extra = dict(record.extra) - log_message = LogMessage( - timestamp=record.time, - message=message, - channel=record.channel, - level=record.level, - levelname=logbook.get_level_name(record.level), - extra=extra, - thread_name=record.thread_name, - process=record.process, - exc_info=record.formatted_exception, - ) - return log_message - - -class LogMessageFormatter(logbook.StringFormatter): - def __call__(self, record, handler): - data = self.format_record(record, handler) - exc = self.format_exception(record) - if exc: - data.exc_info = exc - return data - - def format_record(self, record, handler): - message = super().format_record(record, handler) - return LogMessage.from_record_formatted(record, message) - - -class JsonFormatter(LogMessageFormatter): - def __call__(self, record, handler): - """Return a the record converted to LogMessage's JSON form""" - # utils imports exceptions which imports logger... - import dbt.utils - - log_message = super().__call__(record, handler) - dct = log_message.to_dict(omit_none=True) - return json.dumps(dct, cls=dbt.utils.JSONEncoder) - - -class FormatterMixin: - def __init__(self, format_string): - self._text_format_string = format_string - self.formatter_class = logbook.StringFormatter - # triggers a formatter update via logbook.StreamHandler - self.format_string = self._text_format_string - - def format_json(self): - # set our formatter to the json formatter - self.formatter_class = JsonFormatter - self.format_string = STDOUT_LOG_FORMAT - - def format_text(self): - # set our formatter to the regular stdout/stderr handler - self.formatter_class = logbook.StringFormatter - self.format_string = self._text_format_string - - def reset(self): - raise NotImplementedError("reset() not implemented in FormatterMixin subclass") - - -class OutputHandler(logbook.StreamHandler, FormatterMixin): - """Output handler. - - The `format_string` parameter only changes the default text output, not - debug mode or json. - """ - - def __init__( - self, - stream, - level=logbook.INFO, - format_string=STDOUT_LOG_FORMAT, - bubble=True, - ) -> None: - self._default_format = format_string - logbook.StreamHandler.__init__( - self, - stream=stream, - level=level, - format_string=format_string, - bubble=bubble, - ) - FormatterMixin.__init__(self, format_string) - - def set_text_format(self, format_string: str): - """Set the text format to format_string. In JSON output mode, this is - a noop. - """ - if self.formatter_class is logbook.StringFormatter: - # reset text format - self._text_format_string = format_string - self.format_text() - - def reset(self): - self.level = logbook.INFO - self._text_format_string = self._default_format - self.format_text() - - def should_handle(self, record): - if record.level < self.level: - return False - text_mode = self.formatter_class is logbook.StringFormatter - if text_mode and record.extra.get("json_only", False): - return False - elif not text_mode and record.extra.get("text_only", False): - return False - else: - return True - - -def _root_channel(record: logbook.LogRecord) -> str: - return record.channel.split(".")[0] - - -class Relevel(logbook.Processor): - def __init__( - self, - allowed: List[str], - min_level=logbook.WARNING, - target_level=logbook.DEBUG, - ) -> None: - self.allowed: Set[str] = set(allowed) - self.min_level = min_level - self.target_level = target_level - super().__init__() - - def process(self, record): - if _root_channel(record) in self.allowed: - return - record.extra["old_level"] = record.level - # suppress logs at/below our min level by lowering them to NOTSET - if record.level < self.min_level: - record.level = logbook.NOTSET - # if we didn't mess with it, then lower all logs above our level to - # our target level. - else: - record.level = self.target_level - - -class TextOnly(logbook.Processor): - def process(self, record): - record.extra["text_only"] = True - - -class TimingProcessor(logbook.Processor): - def __init__(self, timing_info: Optional[dbtClassMixin] = None): - self.timing_info = timing_info - super().__init__() - - def process(self, record): - if self.timing_info is not None: - record.extra["timing_info"] = self.timing_info.to_dict(omit_none=True) - - -class DbtProcessState(logbook.Processor): - def __init__(self, value: str): - self.value = value - super().__init__() - - def process(self, record): - overwrite = "run_state" not in record.extra or record.extra["run_state"] == "internal" - if overwrite: - record.extra["run_state"] = self.value - - -class DbtModelState(logbook.Processor): - def __init__(self, state: Dict[str, str]): - self.state = state - super().__init__() - - def process(self, record): - record.extra.update(self.state) - - -class DbtStatusMessage(logbook.Processor): - def process(self, record): - record.extra["is_status_message"] = True - - -class UniqueID(logbook.Processor): - def __init__(self, unique_id: str): - self.unique_id = unique_id - super().__init__() - - def process(self, record): - record.extra["unique_id"] = self.unique_id - - -class NodeCount(logbook.Processor): - def __init__(self, node_count: int): - self.node_count = node_count - super().__init__() - - def process(self, record): - record.extra["node_count"] = self.node_count - - -class NodeMetadata(logbook.Processor): - def __init__(self, node, index): - self.node = node - self.index = index - super().__init__() - - def mapping_keys(self): - return [] - - def process_keys(self, record): - for attr, key in self.mapping_keys(): - value = getattr(self.node, attr, None) - if value is not None: - record.extra[key] = value - - def process(self, record): - self.process_keys(record) - record.extra["node_index"] = self.index - - -class ModelMetadata(NodeMetadata): - def mapping_keys(self): - return [ - ("alias", "node_alias"), - ("schema", "node_schema"), - ("database", "node_database"), - ("original_file_path", "node_path"), - ("name", "node_name"), - ("resource_type", "resource_type"), - ("depends_on_nodes", "depends_on"), - ] - - def process_config(self, record): - if hasattr(self.node, "config"): - materialized = getattr(self.node.config, "materialized", None) - if materialized is not None: - record.extra["node_materialized"] = materialized - - def process(self, record): - super().process(record) - self.process_config(record) - - -class HookMetadata(NodeMetadata): - def mapping_keys(self): - return [ - ("name", "node_name"), - ("resource_type", "resource_type"), - ] - - -class TimestampNamed(logbook.Processor): - def __init__(self, name: str): - self.name = name - super().__init__() - - def process(self, record): - super().process(record) - record.extra[self.name] = datetime.utcnow().isoformat() - - -class ScrubSecrets(logbook.Processor): - def process(self, record): - for secret in get_secret_env(): - record.message = str(record.message).replace(secret, "*****") - - -logger = logbook.Logger("dbt") -# provide this for the cache, disabled by default -CACHE_LOGGER = logbook.Logger("dbt.cache") -CACHE_LOGGER.disable() - -warnings.filterwarnings("ignore", category=ResourceWarning, message="unclosed.*") - -initialized = False - - -def make_log_dir_if_missing(log_dir): - import dbt.clients.system - - dbt.clients.system.make_directory(log_dir) - - -class DebugWarnings(logbook.compat.redirected_warnings): - """Log warnings, except send them to 'debug' instead of 'warning' level.""" - - def make_record(self, message, exception, filename, lineno): - rv = super().make_record(message, exception, filename, lineno) - rv.level = logbook.DEBUG - rv.extra["from_warnings"] = True - return rv - - -# push Python warnings to debug level logs. This will suppress all import-time -# warnings. -DebugWarnings().__enter__() - - -class LogManager(logbook.NestedSetup): - def __init__(self, stdout=sys.stdout, stderr=sys.stderr): - self.stdout = stdout - self.stderr = stderr - self._null_handler = logbook.NullHandler() - self._output_handler = OutputHandler(self.stdout) - self._relevel_processor = Relevel(allowed=["dbt", "werkzeug"]) - self._state_processor = DbtProcessState("internal") - self._scrub_processor = ScrubSecrets() - # keep track of whether we've already entered to decide if we should - # be actually pushing. This allows us to log in main() and also - # support entering dbt execution via handle_and_check. - self._stack_depth = 0 - super().__init__( - [ - self._null_handler, - self._output_handler, - self._relevel_processor, - self._state_processor, - self._scrub_processor, - ] - ) - - def push_application(self): - self._stack_depth += 1 - if self._stack_depth == 1: - super().push_application() - - def pop_application(self): - self._stack_depth -= 1 - if self._stack_depth == 0: - super().pop_application() - - def disable(self): - self.add_handler(logbook.NullHandler()) - - def add_handler(self, handler): - """add an handler to the log manager that runs before the file handler.""" - self.objects.append(handler) - - def set_path(self, _): - """No-op that allows dbt-rpc to not break. See GH #7661""" - pass - - @property - def initialized(self): - """Dummy return value for dbt-rpc. See GH#7661""" - return True - - # this is used by `dbt ls` to allow piping stdout to jq, etc - def stderr_console(self): - """Output to stderr at WARNING level instead of stdout""" - self._output_handler.stream = self.stderr - self._output_handler.level = logbook.WARNING - - def stdout_console(self): - """enable stdout and disable stderr""" - self._output_handler.stream = self.stdout - self._output_handler.level = logbook.INFO - - def set_debug(self): - self._output_handler.set_text_format(DEBUG_LOG_FORMAT) - self._output_handler.level = logbook.DEBUG - - def format_json(self): - for handler in self.objects: - if isinstance(handler, FormatterMixin): - handler.format_json() - - def format_text(self): - for handler in self.objects: - if isinstance(handler, FormatterMixin): - handler.format_text() - - def reset_handlers(self): - """Reset the handlers to their defaults. This is nice in testing!""" - self.stdout_console() - for handler in self.objects: - if isinstance(handler, FormatterMixin): - handler.reset() - - def set_output_stream(self, stream, error=None): - if error is None: - error = stream - - if self._output_handler.stream is self.stdout: - self._output_handler.stream = stream - elif self._output_handler.stream is self.stderr: - self._output_handler.stream = error - - self.stdout = stream - self.stderr = error - - -log_manager = LogManager() - - -def log_cache_events(flag): - """Set the cache logger to propagate its messages based on the given flag.""" - # the flag is True if we should log, and False if we shouldn't, so disabled - # is the inverse. - CACHE_LOGGER.disabled = not flag - - -if not dbt.flags.ENABLE_LEGACY_LOGGER: - logger.disable() -GLOBAL_LOGGER = logger - - -class LogMessageHandler(logbook.Handler): - formatter_class = LogMessageFormatter - - def format_logmessage(self, record): - """Format a LogRecord into a LogMessage""" - message = self.format(record) - return LogMessage.from_record_formatted(record, message) - - -class ListLogHandler(LogMessageHandler): - def __init__( - self, - level: int = logbook.NOTSET, - filter: Optional[Callable] = None, - bubble: bool = False, - lst: Optional[List[LogMessage]] = None, - ) -> None: - super().__init__(level, filter, bubble) - if lst is None: - lst = [] - self.records: List[LogMessage] = lst - - def should_handle(self, record): - """Only ever emit dbt-sourced log messages to the ListHandler.""" - if _root_channel(record) != "dbt": - return False - return super().should_handle(record) - - def emit(self, record: logbook.LogRecord): - as_dict = self.format_logmessage(record) - self.records.append(as_dict) - - -def _env_log_level(var_name: str) -> int: - # convert debugging environment variable name to a log level - if dbt.flags.env_set_truthy(var_name): - return logging.DEBUG - else: - return logging.ERROR - - -LOG_LEVEL_GOOGLE = _env_log_level("DBT_GOOGLE_DEBUG_LOGGING") -LOG_LEVEL_SNOWFLAKE = _env_log_level("DBT_SNOWFLAKE_CONNECTOR_DEBUG_LOGGING") -LOG_LEVEL_BOTOCORE = _env_log_level("DBT_BOTOCORE_DEBUG_LOGGING") -LOG_LEVEL_HTTP = _env_log_level("DBT_HTTP_DEBUG_LOGGING") -LOG_LEVEL_WERKZEUG = _env_log_level("DBT_WERKZEUG_DEBUG_LOGGING") - -logging.getLogger("botocore").setLevel(LOG_LEVEL_BOTOCORE) -logging.getLogger("requests").setLevel(LOG_LEVEL_HTTP) -logging.getLogger("urllib3").setLevel(LOG_LEVEL_HTTP) -logging.getLogger("google").setLevel(LOG_LEVEL_GOOGLE) -logging.getLogger("snowflake.connector").setLevel(LOG_LEVEL_SNOWFLAKE) - -logging.getLogger("parsedatetime").setLevel(logging.ERROR) -logging.getLogger("werkzeug").setLevel(LOG_LEVEL_WERKZEUG) - - -def list_handler( - lst: Optional[List[LogMessage]], - level=logbook.NOTSET, -) -> ContextManager: - """Return a context manager that temporarily attaches a list to the logger.""" - return ListLogHandler(lst=lst, level=level, bubble=True) - - -def get_timestamp(): - return time.strftime("%H:%M:%S") - - -def timestamped_line(msg: str) -> str: - return "{} | {}".format(get_timestamp(), msg) - - -def print_timestamped_line(msg: str, use_color: Optional[str] = None): - if use_color is not None: - msg = dbt.ui.color(msg, use_color) - - GLOBAL_LOGGER.info(timestamped_line(msg)) diff --git a/core/dbt/materializations/__init__.py b/core/dbt/materializations/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/core/dbt/materializations/incremental/__init__.py b/core/dbt/materializations/incremental/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/core/dbt/materializations/incremental/microbatch.py b/core/dbt/materializations/incremental/microbatch.py new file mode 100644 index 00000000000..5bd46eae5e9 --- /dev/null +++ b/core/dbt/materializations/incremental/microbatch.py @@ -0,0 +1,164 @@ +from datetime import datetime, timedelta +from typing import List, Optional, Tuple + +import pytz + +from dbt.artifacts.resources.types import BatchSize +from dbt.contracts.graph.nodes import ModelNode, NodeConfig +from dbt.exceptions import DbtInternalError, DbtRuntimeError + + +class MicrobatchBuilder: + """A utility class for building microbatch definitions associated with a specific model""" + + def __init__( + self, + model: ModelNode, + is_incremental: bool, + event_time_start: Optional[datetime], + event_time_end: Optional[datetime], + ): + if model.config.incremental_strategy != "microbatch": + raise DbtInternalError( + f"Model '{model.name}' does not use 'microbatch' incremental_strategy." + ) + self.model = model + + if self.model.config.batch_size is None: + raise DbtRuntimeError( + f"Microbatch model '{self.model.name}' does not have a 'batch_size' config (one of {[batch_size.value for batch_size in BatchSize]}) specificed." + ) + + self.is_incremental = is_incremental + self.event_time_start = ( + event_time_start.replace(tzinfo=pytz.UTC) if event_time_start else None + ) + self.event_time_end = event_time_end.replace(tzinfo=pytz.UTC) if event_time_end else None + + def build_end_time(self): + """Defaults the end_time to the current time in UTC unless a non `None` event_time_end was provided""" + return self.event_time_end or datetime.now(tz=pytz.utc) + + def build_start_time(self, checkpoint: Optional[datetime]): + """Create a start time based off the passed in checkpoint. + + If the checkpoint is `None`, then `None` will be returned as a checkpoint is necessary + to build a start time. This is because we build the start time relative to the checkpoint + via the batchsize and offset, and we cannot offset a checkpoint if there is no checkpoint. + """ + + if self.event_time_start: + return MicrobatchBuilder.truncate_timestamp( + self.event_time_start, self.model.config.batch_size + ) + + if not self.is_incremental or checkpoint is None: + # TODO: return new model-level configuration or raise error + return None + + assert isinstance(self.model.config, NodeConfig) + batch_size = self.model.config.batch_size + + lookback = self.model.config.lookback + start = MicrobatchBuilder.offset_timestamp(checkpoint, batch_size, -1 * lookback) + + return start + + def build_batches( + self, start: Optional[datetime], end: datetime + ) -> List[Tuple[Optional[datetime], datetime]]: + """ + Given a start and end datetime, builds a list of batches where each batch is + the size of the model's batch_size. + """ + if start is None: + return [(start, end)] + + batch_size = self.model.config.batch_size + curr_batch_start: datetime = start + curr_batch_end: datetime = MicrobatchBuilder.offset_timestamp( + curr_batch_start, batch_size, 1 + ) + + batches: List[Tuple[Optional[datetime], datetime]] = [(curr_batch_start, curr_batch_end)] + while curr_batch_end <= end: + curr_batch_start = curr_batch_end + curr_batch_end = MicrobatchBuilder.offset_timestamp(curr_batch_start, batch_size, 1) + batches.append((curr_batch_start, curr_batch_end)) + + # use exact end value as stop + batches[-1] = (batches[-1][0], end) + + return batches + + @staticmethod + def offset_timestamp(timestamp: datetime, batch_size: BatchSize, offset: int) -> datetime: + """Truncates the passed in timestamp based on the batch_size and then applies the offset by the batch_size. + + Note: It's important to understand that the offset applies to the truncated timestamp, not + the origin timestamp. Thus being offset by a day isn't relative to the any given hour that day, + but relative to the start of the day. So if the timestamp is the very end of a day, 2024-09-17 23:59:59, + you have a batch size of a day, and an offset of +1, then the returned value ends up being only one + second later, 2024-09-18 00:00:00. + + 2024-09-17 16:06:00 + Batchsize.hour -1 -> 2024-09-17 15:00:00 + 2024-09-17 16:06:00 + Batchsize.hour +1 -> 2024-09-17 17:00:00 + 2024-09-17 16:06:00 + Batchsize.day -1 -> 2024-09-16 00:00:00 + 2024-09-17 16:06:00 + Batchsize.day +1 -> 2024-09-18 00:00:00 + 2024-09-17 16:06:00 + Batchsize.month -1 -> 2024-08-01 00:00:00 + 2024-09-17 16:06:00 + Batchsize.month +1 -> 2024-10-01 00:00:00 + 2024-09-17 16:06:00 + Batchsize.year -1 -> 2023-01-01 00:00:00 + 2024-09-17 16:06:00 + Batchsize.year +1 -> 2025-01-01 00:00:00 + """ + truncated = MicrobatchBuilder.truncate_timestamp(timestamp, batch_size) + + offset_timestamp: datetime + if batch_size == BatchSize.hour: + offset_timestamp = truncated + timedelta(hours=offset) + elif batch_size == BatchSize.day: + offset_timestamp = truncated + timedelta(days=offset) + elif batch_size == BatchSize.month: + offset_timestamp = truncated + for _ in range(abs(offset)): + if offset < 0: + offset_timestamp = offset_timestamp - timedelta(days=1) + else: + offset_timestamp = offset_timestamp + timedelta(days=31) + offset_timestamp = MicrobatchBuilder.truncate_timestamp( + offset_timestamp, batch_size + ) + elif batch_size == BatchSize.year: + offset_timestamp = truncated.replace(year=truncated.year + offset) + + return offset_timestamp + + @staticmethod + def truncate_timestamp(timestamp: datetime, batch_size: BatchSize): + """Truncates the passed in timestamp based on the batch_size. + + 2024-09-17 16:06:00 + Batchsize.hour -> 2024-09-17 16:00:00 + 2024-09-17 16:06:00 + Batchsize.day -> 2024-09-17 00:00:00 + 2024-09-17 16:06:00 + Batchsize.month -> 2024-09-01 00:00:00 + 2024-09-17 16:06:00 + Batchsize.year -> 2024-01-01 00:00:00 + """ + if batch_size == BatchSize.hour: + truncated = datetime( + timestamp.year, + timestamp.month, + timestamp.day, + timestamp.hour, + 0, + 0, + 0, + pytz.utc, + ) + elif batch_size == BatchSize.day: + truncated = datetime( + timestamp.year, timestamp.month, timestamp.day, 0, 0, 0, 0, pytz.utc + ) + elif batch_size == BatchSize.month: + truncated = datetime(timestamp.year, timestamp.month, 1, 0, 0, 0, 0, pytz.utc) + elif batch_size == BatchSize.year: + truncated = datetime(timestamp.year, 1, 1, 0, 0, 0, 0, pytz.utc) + + return truncated diff --git a/core/dbt/mp_context.py b/core/dbt/mp_context.py new file mode 100644 index 00000000000..0a39fd4da96 --- /dev/null +++ b/core/dbt/mp_context.py @@ -0,0 +1,8 @@ +from multiprocessing import get_context +from multiprocessing.context import SpawnContext + +_MP_CONTEXT = get_context("spawn") + + +def get_mp_context() -> SpawnContext: + return _MP_CONTEXT diff --git a/core/dbt/node_types.py b/core/dbt/node_types.py index 33d8fc4b0f4..71ef90594a2 100644 --- a/core/dbt/node_types.py +++ b/core/dbt/node_types.py @@ -1,92 +1,36 @@ from typing import List -from dbt.dataclass_schema import StrEnum - - -class AccessType(StrEnum): - Protected = "protected" - Private = "private" - Public = "public" - - @classmethod - def is_valid(cls, item): - try: - cls(item) - except ValueError: - return False - return True - - -class NodeType(StrEnum): - Model = "model" - Analysis = "analysis" - Test = "test" - Snapshot = "snapshot" - Operation = "operation" - Seed = "seed" - # TODO: rm? - RPCCall = "rpc" - SqlOperation = "sql_operation" - Documentation = "doc" - Source = "source" - Macro = "macro" - Exposure = "exposure" - Metric = "metric" - Group = "group" - SemanticModel = "semantic_model" - - @classmethod - def executable(cls) -> List["NodeType"]: - return [ - cls.Model, - cls.Test, - cls.Snapshot, - cls.Analysis, - cls.Operation, - cls.Seed, - cls.Documentation, - cls.RPCCall, - cls.SqlOperation, - ] - - @classmethod - def refable(cls) -> List["NodeType"]: - return [ - cls.Model, - cls.Seed, - cls.Snapshot, - ] - - @classmethod - def versioned(cls) -> List["NodeType"]: - return [ - cls.Model, - ] - - @classmethod - def documentable(cls) -> List["NodeType"]: - return [ - cls.Model, - cls.Seed, - cls.Snapshot, - cls.Source, - cls.Macro, - cls.Analysis, - cls.Exposure, - cls.Metric, - ] - - def pluralize(self) -> str: - if self is self.Analysis: - return "analyses" - return f"{self}s" - - -class RunHookType(StrEnum): - Start = "on-run-start" - End = "on-run-end" - - -class ModelLanguage(StrEnum): - python = "python" - sql = "sql" +# preserving import path during dbt/artifacts refactor +from dbt.artifacts.resources.types import ( # noqa + AccessType, + ModelLanguage, + NodeType, + RunHookType, +) + +EXECUTABLE_NODE_TYPES: List["NodeType"] = [ + NodeType.Model, + NodeType.Test, + NodeType.Snapshot, + NodeType.Analysis, + NodeType.Operation, + NodeType.Seed, + NodeType.Documentation, + NodeType.RPCCall, + NodeType.SqlOperation, +] + +REFABLE_NODE_TYPES: List["NodeType"] = [ + NodeType.Model, + NodeType.Seed, + NodeType.Snapshot, +] + +TEST_NODE_TYPES: List["NodeType"] = [ + NodeType.Test, + NodeType.Unit, +] + +VERSIONED_NODE_TYPES: List["NodeType"] = [ + NodeType.Model, +] diff --git a/core/dbt/parser/__init__.py b/core/dbt/parser/__init__.py index ee0490ecb61..04f345fe107 100644 --- a/core/dbt/parser/__init__.py +++ b/core/dbt/parser/__init__.py @@ -1,24 +1,23 @@ -from .analysis import AnalysisParser # noqa -from .base import Parser, ConfiguredParser # noqa -from .singular_test import SingularTestParser # noqa -from .generic_test import GenericTestParser # noqa -from .docs import DocumentationParser # noqa -from .hooks import HookParser # noqa -from .macros import MacroParser # noqa -from .models import ModelParser # noqa -from .schemas import SchemaParser # noqa -from .seeds import SeedParser # noqa -from .snapshots import SnapshotParser # noqa - from . import ( # noqa analysis, base, - generic_test, - singular_test, docs, + generic_test, hooks, macros, models, schemas, + singular_test, snapshots, ) +from .analysis import AnalysisParser # noqa +from .base import ConfiguredParser, Parser # noqa +from .docs import DocumentationParser # noqa +from .generic_test import GenericTestParser # noqa +from .hooks import HookParser # noqa +from .macros import MacroParser # noqa +from .models import ModelParser # noqa +from .schemas import SchemaParser # noqa +from .seeds import SeedParser # noqa +from .singular_test import SingularTestParser # noqa +from .snapshots import SnapshotParser # noqa diff --git a/core/dbt/parser/base.py b/core/dbt/parser/base.py index b24cd4712d4..a68f7384c0b 100644 --- a/core/dbt/parser/base.py +++ b/core/dbt/parser/base.py @@ -1,27 +1,30 @@ import abc import itertools import os -from typing import List, Dict, Any, Generic, Optional, TypeVar +from typing import Any, Dict, Generic, List, Optional, TypeVar -from dbt.dataclass_schema import ValidationError - -from dbt import utils -from dbt.clients.jinja import MacroGenerator +from dbt import hooks, utils +from dbt.adapters.factory import get_adapter # noqa: F401 +from dbt.artifacts.resources import Contract +from dbt.clients.jinja import MacroGenerator, get_rendered +from dbt.config import RuntimeConfig +from dbt.context.context_config import ContextConfig from dbt.context.providers import ( - generate_parser_model_context, generate_generate_name_macro_context, + generate_parser_model_context, ) -from dbt.adapters.factory import get_adapter # noqa: F401 -from dbt.clients.jinja import get_rendered -from dbt.config import Project, RuntimeConfig -from dbt.context.context_config import ContextConfig from dbt.contracts.graph.manifest import Manifest -from dbt.contracts.graph.nodes import Contract, BaseNode, ManifestNode +from dbt.contracts.graph.nodes import BaseNode, ManifestNode from dbt.contracts.graph.unparsed import Docs, UnparsedNode -from dbt.exceptions import DbtInternalError, ConfigUpdateError, DictParseError -from dbt import hooks -from dbt.node_types import NodeType, ModelLanguage +from dbt.exceptions import ( + ConfigUpdateError, + DbtInternalError, + DictParseError, + InvalidAccessTypeError, +) +from dbt.node_types import AccessType, ModelLanguage, NodeType from dbt.parser.search import FileBlock +from dbt_common.dataclass_schema import ValidationError # internally, the parser may store a less-restrictive type that will be # transformed into the final type. But it will have to be derived from @@ -29,7 +32,6 @@ FinalValue = TypeVar("FinalValue", bound=BaseNode) IntermediateValue = TypeVar("IntermediateValue", bound=BaseNode) -IntermediateNode = TypeVar("IntermediateNode", bound=Any) FinalNode = TypeVar("FinalNode", bound=ManifestNode) @@ -37,9 +39,9 @@ class BaseParser(Generic[FinalValue]): - def __init__(self, project: Project, manifest: Manifest) -> None: - self.project = project - self.manifest = manifest + def __init__(self, project: RuntimeConfig, manifest: Manifest) -> None: + self.project: RuntimeConfig = project + self.manifest: Manifest = manifest @abc.abstractmethod def parse_file(self, block: FileBlock) -> None: @@ -61,7 +63,7 @@ def generate_unique_id(self, resource_name: str, hash: Optional[str] = None) -> class Parser(BaseParser[FinalValue], Generic[FinalValue]): def __init__( self, - project: Project, + project: RuntimeConfig, manifest: Manifest, root_project: RuntimeConfig, ) -> None: @@ -70,6 +72,7 @@ def __init__( class RelationUpdate: + # "component" is database, schema or alias def __init__(self, config: RuntimeConfig, manifest: Manifest, component: str) -> None: default_macro = manifest.find_generate_macro_by_name( component=component, @@ -103,7 +106,7 @@ def __init__(self, config: RuntimeConfig, manifest: Manifest, component: str) -> self.component = component def __call__(self, parsed_node: Any, override: Optional[str]) -> None: - if parsed_node.package_name in self.package_updaters: + if getattr(parsed_node, "package_name", None) in self.package_updaters: new_value = self.package_updaters[parsed_node.package_name](override, parsed_node) else: new_value = self.default_updater(override, parsed_node) @@ -115,16 +118,17 @@ def __call__(self, parsed_node: Any, override: Optional[str]) -> None: class ConfiguredParser( Parser[FinalNode], - Generic[ConfiguredBlockType, IntermediateNode, FinalNode], + Generic[ConfiguredBlockType, FinalNode], ): def __init__( self, - project: Project, + project: RuntimeConfig, manifest: Manifest, root_project: RuntimeConfig, ) -> None: super().__init__(project, manifest, root_project) + # this sets callables from RelationUpdate self._update_node_database = RelationUpdate( manifest=manifest, config=root_project, component="database" ) @@ -141,7 +145,7 @@ def get_compiled_path(cls, block: ConfiguredBlockType) -> str: pass @abc.abstractmethod - def parse_from_dict(self, dict, validate=True) -> IntermediateNode: + def parse_from_dict(self, dict, validate=True) -> FinalNode: pass @abc.abstractproperty @@ -205,7 +209,7 @@ def _create_parsetime_node( fqn: List[str], name=None, **kwargs, - ) -> IntermediateNode: + ) -> FinalNode: """Create the node that will be passed in to the parser context for "rendering". Some information may be partial, as it'll be updated by config() and any ref()/source() calls discovered during rendering. @@ -236,6 +240,7 @@ def _create_parsetime_node( "checksum": block.file.checksum.to_dict(omit_none=True), } dct.update(kwargs) + try: return self.parse_from_dict(dct, validate=True) except ValidationError as exc: @@ -249,10 +254,10 @@ def _create_parsetime_node( ) raise DictParseError(exc, node=node) - def _context_for(self, parsed_node: IntermediateNode, config: ContextConfig) -> Dict[str, Any]: + def _context_for(self, parsed_node: FinalNode, config: ContextConfig) -> Dict[str, Any]: return generate_parser_model_context(parsed_node, self.root_project, self.manifest, config) - def render_with_context(self, parsed_node: IntermediateNode, config: ContextConfig): + def render_with_context(self, parsed_node: FinalNode, config: ContextConfig): # Given the parsed node and a ContextConfig to use during parsing, # render the node's sql with macro capture enabled. # Note: this mutates the config object when config calls are rendered. @@ -267,7 +272,7 @@ def render_with_context(self, parsed_node: IntermediateNode, config: ContextConf # updating the config with new config passed in, then re-creating the # config from the dict in the node. def update_parsed_node_config_dict( - self, parsed_node: IntermediateNode, config_dict: Dict[str, Any] + self, parsed_node: FinalNode, config_dict: Dict[str, Any] ) -> None: # Overwrite node config final_config_dict = parsed_node.config.to_dict(omit_none=True) @@ -277,7 +282,7 @@ def update_parsed_node_config_dict( parsed_node.config = parsed_node.config.from_dict(final_config_dict) def update_parsed_node_relation_names( - self, parsed_node: IntermediateNode, config_dict: Dict[str, Any] + self, parsed_node: FinalNode, config_dict: Dict[str, Any] ) -> None: # These call the RelationUpdate callable to go through generate_name macros @@ -285,8 +290,11 @@ def update_parsed_node_relation_names( self._update_node_schema(parsed_node, config_dict.get("schema")) self._update_node_alias(parsed_node, config_dict.get("alias")) - # Snapshot nodes use special "target_database" and "target_schema" fields for some reason - if parsed_node.resource_type == NodeType.Snapshot: + # Snapshot nodes use special "target_database" and "target_schema" fields + # for backward compatibility + # We have to do getattr here because saved_query parser calls this method with + # Export object instead of a node. + if getattr(parsed_node, "resource_type", None) == NodeType.Snapshot: if "target_database" in config_dict and config_dict["target_database"]: parsed_node.database = config_dict["target_database"] if "target_schema" in config_dict and config_dict["target_schema"]: @@ -296,7 +304,7 @@ def update_parsed_node_relation_names( def update_parsed_node_config( self, - parsed_node: IntermediateNode, + parsed_node: FinalNode, config: ContextConfig, context=None, patch_config_dict=None, @@ -327,6 +335,16 @@ def update_parsed_node_config( if "group" in config_dict and config_dict["group"]: parsed_node.group = config_dict["group"] + # If we have access in the config, copy to node level + if parsed_node.resource_type == NodeType.Model and config_dict.get("access", None): + if AccessType.is_valid(config_dict["access"]): + assert hasattr(parsed_node, "access") + parsed_node.access = AccessType(config_dict["access"]) + else: + raise InvalidAccessTypeError( + unique_id=parsed_node.unique_id, field_value=config_dict["access"] + ) + # If we have docs in the config, merge with the node level, for backwards # compatibility with earlier node-only config. if "docs" in config_dict and config_dict["docs"]: @@ -345,7 +363,11 @@ def update_parsed_node_config( # If we have contract in the config, copy to node level if "contract" in config_dict and config_dict["contract"]: - parsed_node.contract = Contract(enforced=config_dict["contract"]["enforced"]) + contract_dct = config_dict["contract"] + Contract.validate(contract_dct) + # Seed node has contract config (from NodeConfig) but no contract in SeedNode + if hasattr(parsed_node, "contract"): + parsed_node.contract = Contract.from_dict(contract_dct) # unrendered_config is used to compare the original database/schema/alias # values and to handle 'same_config' and 'same_contents' calls @@ -367,6 +389,7 @@ def update_parsed_node_config( # at this point, we've collected our hooks. Use the node context to # render each hook and collect refs/sources + assert hasattr(parsed_node.config, "pre_hook") and hasattr(parsed_node.config, "post_hook") hooks = list(itertools.chain(parsed_node.config.pre_hook, parsed_node.config.post_hook)) # skip context rebuilding if there aren't any hooks if not hooks: @@ -398,7 +421,7 @@ def config_dict( self._mangle_hooks(config_dict) return config_dict - def render_update(self, node: IntermediateNode, config: ContextConfig) -> None: + def render_update(self, node: FinalNode, config: ContextConfig) -> None: try: context = self.render_with_context(node, config) self.update_parsed_node_config(node, config, context=context) @@ -425,16 +448,15 @@ def parse_node(self, block: ConfiguredBlockType) -> FinalNode: fqn=fqn, ) self.render_update(node, config) - result = self.transform(node) - self.add_result_node(block, result) - return result + self.add_result_node(block, node) + return node def _update_node_relation_name(self, node: ManifestNode): # Seed and Snapshot nodes and Models that are not ephemeral, # and TestNodes that store_failures. # TestNodes do not get a relation_name without store failures # because no schema is created. - if node.is_relational and not node.is_ephemeral_model: + if getattr(node, "is_relational", None) and not getattr(node, "is_ephemeral_model", None): adapter = get_adapter(self.root_project) relation_cls = adapter.Relation node.relation_name = str(relation_cls.create_from(self.root_project, node)) @@ -446,26 +468,18 @@ def _update_node_relation_name(self, node: ManifestNode): def parse_file(self, file_block: FileBlock) -> None: pass - @abc.abstractmethod - def transform(self, node: IntermediateNode) -> FinalNode: - pass - class SimpleParser( - ConfiguredParser[ConfiguredBlockType, FinalNode, FinalNode], + ConfiguredParser[ConfiguredBlockType, FinalNode], Generic[ConfiguredBlockType, FinalNode], ): - def transform(self, node): - return node + pass -class SQLParser( - ConfiguredParser[FileBlock, IntermediateNode, FinalNode], Generic[IntermediateNode, FinalNode] -): +class SQLParser(ConfiguredParser[FileBlock, FinalNode], Generic[FinalNode]): def parse_file(self, file_block: FileBlock) -> None: self.parse_node(file_block) -class SimpleSQLParser(SQLParser[FinalNode, FinalNode]): - def transform(self, node): - return node +class SimpleSQLParser(SQLParser[FinalNode]): + pass diff --git a/core/dbt/parser/common.py b/core/dbt/parser/common.py index 24a0810943b..3bafbb9550f 100644 --- a/core/dbt/parser/common.py +++ b/core/dbt/parser/common.py @@ -1,23 +1,24 @@ +from dataclasses import dataclass +from typing import Any, Dict, Generic, List, Optional, TypeVar, Union + +from dbt.artifacts.resources import ColumnInfo, NodeVersion +from dbt.contracts.graph.nodes import UnpatchedSourceDefinition from dbt.contracts.graph.unparsed import ( + HasColumnDocs, HasColumnProps, - UnparsedColumn, - UnparsedNodeUpdate, - UnparsedMacroUpdate, + HasColumnTests, UnparsedAnalysisUpdate, + UnparsedColumn, UnparsedExposure, + UnparsedMacroUpdate, UnparsedModelUpdate, + UnparsedNodeUpdate, ) -from dbt.contracts.graph.unparsed import NodeVersion, HasColumnTests, HasColumnDocs -from dbt.contracts.graph.nodes import ( - UnpatchedSourceDefinition, - ColumnInfo, - ColumnLevelConstraint, - ConstraintType, -) +from dbt.exceptions import ParsingError from dbt.parser.search import FileBlock -from typing import List, Dict, Any, TypeVar, Generic, Union, Optional -from dataclasses import dataclass -from dbt.exceptions import DbtInternalError, ParsingError +from dbt_common.contracts.constraints import ColumnLevelConstraint, ConstraintType +from dbt_common.exceptions import DbtInternalError +from dbt_semantic_interfaces.type_enums import TimeGranularity def trimmed(inp: str) -> str: @@ -77,6 +78,10 @@ def name(self): def columns(self): return [] + @property + def data_tests(self) -> List[TestDef]: + return [] + @property def tests(self) -> List[TestDef]: return [] @@ -103,11 +108,11 @@ def columns(self): @dataclass class TestBlock(TargetColumnsBlock[Testable], Generic[Testable]): @property - def tests(self) -> List[TestDef]: - if self.target.tests is None: + def data_tests(self) -> List[TestDef]: + if self.target.data_tests is None: return [] else: - return self.target.tests + return self.target.data_tests @property def quote_columns(self) -> Optional[bool]: @@ -132,11 +137,11 @@ def columns(self): raise DbtInternalError(".columns for VersionedTestBlock with versions") @property - def tests(self) -> List[TestDef]: + def data_tests(self) -> List[TestDef]: if not self.target.versions: - return super().tests + return super().data_tests else: - raise DbtInternalError(".tests for VersionedTestBlock with versions") + raise DbtInternalError(".data_tests for VersionedTestBlock with versions") @classmethod def from_yaml_block(cls, src: YamlBlock, target: Versioned) -> "VersionedTestBlock[Versioned]": @@ -149,7 +154,7 @@ def from_yaml_block(cls, src: YamlBlock, target: Versioned) -> "VersionedTestBlo @dataclass class GenericTestBlock(TestBlock[Testable], Generic[Testable]): - test: Dict[str, Any] + data_test: Dict[str, Any] column_name: Optional[str] tags: List[str] version: Optional[NodeVersion] @@ -158,7 +163,7 @@ class GenericTestBlock(TestBlock[Testable], Generic[Testable]): def from_test_block( cls, src: TestBlock, - test: Dict[str, Any], + data_test: Dict[str, Any], column_name: Optional[str], tags: List[str], version: Optional[NodeVersion], @@ -167,7 +172,7 @@ def from_test_block( file=src.file, data=src.data, target=src.target, - test=test, + data_test=data_test, column_name=column_name, tags=tags, version=version, @@ -177,17 +182,16 @@ def from_test_block( class ParserRef: """A helper object to hold parse-time references.""" - def __init__(self): + def __init__(self) -> None: self.column_info: Dict[str, ColumnInfo] = {} - def _add(self, column: HasColumnProps): - tags: List[str] = [] - tags.extend(getattr(column, "tags", ())) - quote: Optional[bool] + def _add(self, column: HasColumnProps) -> None: + tags: List[str] = getattr(column, "tags", []) + quote: Optional[bool] = None + granularity: Optional[TimeGranularity] = None if isinstance(column, UnparsedColumn): quote = column.quote - else: - quote = None + granularity = TimeGranularity(column.granularity) if column.granularity else None if any( c @@ -205,6 +209,7 @@ def _add(self, column: HasColumnProps): tags=tags, quote=quote, _extra=column.extra, + granularity=granularity, ) @classmethod diff --git a/core/dbt/parser/docs.py b/core/dbt/parser/docs.py index edc7f83acfc..1bd7af5b55c 100644 --- a/core/dbt/parser/docs.py +++ b/core/dbt/parser/docs.py @@ -1,14 +1,12 @@ -from typing import Iterable, Optional - import re +from typing import Iterable, Optional from dbt.clients.jinja import get_rendered from dbt.contracts.files import SourceFile from dbt.contracts.graph.nodes import Documentation from dbt.node_types import NodeType from dbt.parser.base import Parser -from dbt.parser.search import BlockContents, FileBlock, BlockSearcher - +from dbt.parser.search import BlockContents, BlockSearcher, FileBlock SHOULD_PARSE_RE = re.compile(r"{[{%]") diff --git a/core/dbt/parser/fixtures.py b/core/dbt/parser/fixtures.py new file mode 100644 index 00000000000..b1469d0477d --- /dev/null +++ b/core/dbt/parser/fixtures.py @@ -0,0 +1,51 @@ +import csv +from io import StringIO +from typing import Any, Dict, List, Optional + +from dbt.contracts.files import FixtureSourceFile +from dbt.contracts.graph.nodes import UnitTestFileFixture +from dbt.node_types import NodeType +from dbt.parser.base import Parser +from dbt.parser.search import FileBlock + + +class FixtureParser(Parser[UnitTestFileFixture]): + @property + def resource_type(self) -> NodeType: + return NodeType.Fixture + + @classmethod + def get_compiled_path(cls, block: FileBlock): + # Is this necessary? + return block.path.relative_path + + def generate_unique_id(self, resource_name: str, _: Optional[str] = None) -> str: + return f"fixture.{self.project.project_name}.{resource_name}" + + def parse_file(self, file_block: FileBlock): + assert isinstance(file_block.file, FixtureSourceFile) + unique_id = self.generate_unique_id(file_block.name) + + if file_block.file.path.relative_path.endswith(".sql"): + rows = file_block.file.contents # type: ignore + else: # endswith('.csv') + rows = self.get_rows(file_block.file.contents) # type: ignore + + fixture = UnitTestFileFixture( + name=file_block.name, + path=file_block.file.path.relative_path, + original_file_path=file_block.path.original_file_path, + package_name=self.project.project_name, + unique_id=unique_id, + resource_type=NodeType.Fixture, + rows=rows, + ) + self.manifest.add_fixture(file_block.file, fixture) + + def get_rows(self, contents) -> List[Dict[str, Any]]: + rows = [] + dummy_file = StringIO(contents) + reader = csv.DictReader(dummy_file) + for row in reader: + rows.append(row) + return rows diff --git a/core/dbt/parser/generic_test.py b/core/dbt/parser/generic_test.py index 88efc3c7dce..3c10976fe30 100644 --- a/core/dbt/parser/generic_test.py +++ b/core/dbt/parser/generic_test.py @@ -2,15 +2,15 @@ import jinja2 -from dbt.exceptions import ParsingError -from dbt.clients import jinja +from dbt.contracts.files import SourceFile from dbt.contracts.graph.nodes import GenericTestNode, Macro from dbt.contracts.graph.unparsed import UnparsedMacro -from dbt.contracts.files import SourceFile +from dbt.exceptions import ParsingError from dbt.node_types import NodeType from dbt.parser.base import BaseParser from dbt.parser.search import FileBlock -from dbt.utils import MACRO_PREFIX +from dbt_common.clients import jinja +from dbt_common.utils import MACRO_PREFIX class GenericTestParser(BaseParser[GenericTestNode]): @@ -26,10 +26,11 @@ def create_generic_test_macro( self, block: jinja.BlockTag, base_node: UnparsedMacro, name: str ) -> Macro: unique_id = self.generate_unique_id(name) + macro_sql = block.full_block or "" return Macro( path=base_node.path, - macro_sql=block.full_block, + macro_sql=macro_sql, original_file_path=base_node.original_file_path, package_name=base_node.package_name, resource_type=base_node.resource_type, @@ -43,7 +44,7 @@ def parse_unparsed_generic_test(self, base_node: UnparsedMacro) -> Iterable[Macr t for t in jinja.extract_toplevel_blocks( base_node.raw_code, - allowed_blocks={"test"}, + allowed_blocks={"test", "data_test"}, collect_raw_data=False, ) if isinstance(t, jinja.BlockTag) diff --git a/core/dbt/parser/generic_test_builders.py b/core/dbt/parser/generic_test_builders.py index 69c86853162..6bca8300dae 100644 --- a/core/dbt/parser/generic_test_builders.py +++ b/core/dbt/parser/generic_test_builders.py @@ -1,21 +1,11 @@ import re from copy import deepcopy -from typing import ( - Generic, - Dict, - Any, - Tuple, - Optional, - List, -) +from typing import Any, Dict, Generic, List, Optional, Tuple -from dbt.clients.jinja import get_rendered, GENERIC_TEST_KWARGS_NAME +from dbt.artifacts.resources import NodeVersion +from dbt.clients.jinja import GENERIC_TEST_KWARGS_NAME, get_rendered from dbt.contracts.graph.nodes import UnpatchedSourceDefinition -from dbt.contracts.graph.unparsed import ( - NodeVersion, - UnparsedNodeUpdate, - UnparsedModelUpdate, -) +from dbt.contracts.graph.unparsed import UnparsedModelUpdate, UnparsedNodeUpdate from dbt.exceptions import ( CustomMacroPopulatingConfigValueError, SameKeyNestedError, @@ -24,13 +14,13 @@ TestArgIncludesModelError, TestArgsNotDictError, TestDefinitionDictLengthError, - TestTypeError, TestNameNotStringError, + TestTypeError, UnexpectedTestNamePatternError, - UndefinedMacroError, ) from dbt.parser.common import Testable from dbt.utils import md5 +from dbt_common.exceptions.macros import UndefinedMacroError def synthesize_generic_test_names( @@ -101,6 +91,7 @@ class TestBuilder(Generic[Testable]): "error_if", "fail_calc", "store_failures", + "store_failures_as", "meta", "database", "schema", @@ -109,21 +100,22 @@ class TestBuilder(Generic[Testable]): def __init__( self, - test: Dict[str, Any], + data_test: Dict[str, Any], target: Testable, package_name: str, render_ctx: Dict[str, Any], column_name: Optional[str] = None, version: Optional[NodeVersion] = None, ) -> None: - test_name, test_args = self.extract_test_args(test, column_name) + test_name, test_args = self.extract_test_args(data_test, column_name) self.args: Dict[str, Any] = test_args if "model" in self.args: raise TestArgIncludesModelError() self.package_name: str = package_name self.target: Testable = target self.version: Optional[NodeVersion] = version - + self.render_ctx: Dict[str, Any] = render_ctx + self.column_name: Optional[str] = column_name self.args["model"] = self.build_model_str() match = self.TEST_NAME_PATTERN.match(test_name) @@ -134,38 +126,12 @@ def __init__( self.name: str = groups["test_name"] self.namespace: str = groups["test_namespace"] self.config: Dict[str, Any] = {} + # Process legacy args + self.config.update(self._process_legacy_args()) - # This code removes keys identified as config args from the test entry - # dictionary. The keys remaining in the 'args' dictionary will be - # "kwargs", or keyword args that are passed to the test macro. - # The "kwargs" are not rendered into strings until compilation time. - # The "configs" are rendered here (since they were not rendered back - # in the 'get_key_dicts' methods in the schema parsers). - for key in self.CONFIG_ARGS: - value = self.args.pop(key, None) - # 'modifier' config could be either top level arg or in config - if value and "config" in self.args and key in self.args["config"]: - raise SameKeyNestedError() - if not value and "config" in self.args: - value = self.args["config"].pop(key, None) - if isinstance(value, str): - - try: - value = get_rendered(value, render_ctx, native=True) - except UndefinedMacroError as e: - raise CustomMacroPopulatingConfigValueError( - target_name=self.target.name, - column_name=column_name, - name=self.name, - key=key, - err_msg=e.msg, - ) - - if value is not None: - self.config[key] = value - + # Process config args if present if "config" in self.args: - del self.args["config"] + self.config.update(self._render_values(self.args.pop("config", {}))) if self.namespace is not None: self.package_name = self.namespace @@ -190,28 +156,58 @@ def __init__( if short_name != full_name and "alias" not in self.config: self.config["alias"] = short_name + def _process_legacy_args(self): + config = {} + for key in self.CONFIG_ARGS: + value = self.args.pop(key, None) + if value and "config" in self.args and key in self.args["config"]: + raise SameKeyNestedError() + if not value and "config" in self.args: + value = self.args["config"].pop(key, None) + config[key] = value + + return self._render_values(config) + + def _render_values(self, config: Dict[str, Any]) -> Dict[str, Any]: + rendered_config = {} + for key, value in config.items(): + if isinstance(value, str): + try: + value = get_rendered(value, self.render_ctx, native=True) + except UndefinedMacroError as e: + raise CustomMacroPopulatingConfigValueError( + target_name=self.target.name, + column_name=self.column_name, + name=self.name, + key=key, + err_msg=e.msg, + ) + if value is not None: + rendered_config[key] = value + return rendered_config + def _bad_type(self) -> TypeError: return TypeError('invalid target type "{}"'.format(type(self.target))) @staticmethod - def extract_test_args(test, name=None) -> Tuple[str, Dict[str, Any]]: - if not isinstance(test, dict): - raise TestTypeError(test) + def extract_test_args(data_test, name=None) -> Tuple[str, Dict[str, Any]]: + if not isinstance(data_test, dict): + raise TestTypeError(data_test) # If the test is a dictionary with top-level keys, the test name is "test_name" # and the rest are arguments # {'name': 'my_favorite_test', 'test_name': 'unique', 'config': {'where': '1=1'}} - if "test_name" in test.keys(): - test_name = test.pop("test_name") - test_args = test + if "test_name" in data_test.keys(): + test_name = data_test.pop("test_name") + test_args = data_test # If the test is a nested dictionary with one top-level key, the test name # is the dict name, and nested keys are arguments # {'unique': {'name': 'my_favorite_test', 'config': {'where': '1=1'}}} else: - test = list(test.items()) - if len(test) != 1: - raise TestDefinitionDictLengthError(test) - test_name, test_args = test[0] + data_test = list(data_test.items()) + if len(data_test) != 1: + raise TestDefinitionDictLengthError(data_test) + test_name, test_args = data_test[0] if not isinstance(test_args, dict): raise TestArgsNotDictError(test_args) @@ -242,6 +238,10 @@ def severity(self) -> Optional[str]: def store_failures(self) -> Optional[bool]: return self.config.get("store_failures") + @property + def store_failures_as(self) -> Optional[bool]: + return self.config.get("store_failures_as") + @property def where(self) -> Optional[str]: return self.config.get("where") @@ -294,6 +294,8 @@ def get_static_config(self): config["fail_calc"] = self.fail_calc if self.store_failures is not None: config["store_failures"] = self.store_failures + if self.store_failures_as is not None: + config["store_failures_as"] = self.store_failures_as if self.meta is not None: config["meta"] = self.meta if self.database is not None: diff --git a/core/dbt/parser/hooks.py b/core/dbt/parser/hooks.py index d96257a0e71..bcc25c0d937 100644 --- a/core/dbt/parser/hooks.py +++ b/core/dbt/parser/hooks.py @@ -1,14 +1,14 @@ from dataclasses import dataclass -from typing import Iterable, Iterator, Union, List, Tuple +from typing import Iterable, Iterator, List, Tuple, Union from dbt.context.context_config import ContextConfig from dbt.contracts.files import FilePath from dbt.contracts.graph.nodes import HookNode -from dbt.exceptions import DbtInternalError from dbt.node_types import NodeType, RunHookType from dbt.parser.base import SimpleParser from dbt.parser.search import FileBlock from dbt.utils import get_pseudo_hook_path +from dbt_common.exceptions import DbtInternalError @dataclass @@ -66,8 +66,6 @@ def __iter__(self) -> Iterator[HookBlock]: class HookParser(SimpleParser[HookBlock, HookNode]): - def transform(self, node): - return node # Hooks are only in the dbt_project.yml file for the project def get_path(self) -> FilePath: diff --git a/core/dbt/parser/macros.py b/core/dbt/parser/macros.py index 7178222fc1d..f7eaef62b69 100644 --- a/core/dbt/parser/macros.py +++ b/core/dbt/parser/macros.py @@ -2,15 +2,16 @@ import jinja2 -from dbt.clients import jinja -from dbt.contracts.graph.unparsed import UnparsedMacro -from dbt.contracts.graph.nodes import Macro +from dbt.clients.jinja import get_supported_languages from dbt.contracts.files import FilePath, SourceFile +from dbt.contracts.graph.nodes import Macro +from dbt.contracts.graph.unparsed import UnparsedMacro from dbt.exceptions import ParsingError from dbt.node_types import NodeType from dbt.parser.base import BaseParser from dbt.parser.search import FileBlock, filesystem_search -from dbt.utils import MACRO_PREFIX +from dbt_common.clients import jinja +from dbt_common.utils import MACRO_PREFIX class MacroParser(BaseParser[Macro]): @@ -31,10 +32,11 @@ def get_compiled_path(cls, block: FileBlock): def parse_macro(self, block: jinja.BlockTag, base_node: UnparsedMacro, name: str) -> Macro: unique_id = self.generate_unique_id(name) + macro_sql = block.full_block or "" return Macro( path=base_node.path, - macro_sql=block.full_block, + macro_sql=macro_sql, original_file_path=base_node.original_file_path, package_name=base_node.package_name, resource_type=base_node.resource_type, @@ -48,7 +50,7 @@ def parse_unparsed_macros(self, base_node: UnparsedMacro) -> Iterable[Macro]: t for t in jinja.extract_toplevel_blocks( base_node.raw_code, - allowed_blocks={"macro", "materialization", "test"}, + allowed_blocks={"macro", "materialization", "test", "data_test"}, collect_raw_data=False, ) if isinstance(t, jinja.BlockTag) @@ -64,16 +66,28 @@ def parse_unparsed_macros(self, base_node: UnparsedMacro) -> Iterable[Macro]: e.add_node(base_node) raise - macro_nodes = list(ast.find_all(jinja2.nodes.Macro)) - - if len(macro_nodes) != 1: - # things have gone disastrously wrong, we thought we only - # parsed one block! - raise ParsingError( - f"Found multiple macros in {block.full_block}, expected 1", node=base_node - ) - - macro = macro_nodes[0] + if ( + isinstance(ast, jinja2.nodes.Template) + and hasattr(ast, "body") + and len(ast.body) == 1 + and isinstance(ast.body[0], jinja2.nodes.Macro) + ): + # If the top level node in the Template is a Macro, things look + # good and this is much faster than traversing the full ast, as + # in the following else clause. It's not clear if that traversal + # is ever really needed. + macro = ast.body[0] + else: + macro_nodes = list(ast.find_all(jinja2.nodes.Macro)) + + if len(macro_nodes) != 1: + # things have gone disastrously wrong, we thought we only + # parsed one block! + raise ParsingError( + f"Found multiple macros in {block.full_block}, expected 1", node=base_node + ) + + macro = macro_nodes[0] if not macro.name.startswith(MACRO_PREFIX): continue @@ -82,7 +96,7 @@ def parse_unparsed_macros(self, base_node: UnparsedMacro) -> Iterable[Macro]: node = self.parse_macro(block, base_node, name) # get supported_languages for materialization macro if block.block_type_name == "materialization": - node.supported_languages = jinja.get_supported_languages(macro) + node.supported_languages = get_supported_languages(macro) yield node def parse_file(self, block: FileBlock): diff --git a/core/dbt/parser/manifest.py b/core/dbt/parser/manifest.py index 259413dde3d..d54aa898713 100644 --- a/core/dbt/parser/manifest.py +++ b/core/dbt/parser/manifest.py @@ -1,133 +1,125 @@ -from copy import deepcopy -from dataclasses import dataclass -from dataclasses import field import datetime +import json import os +import pprint +import time import traceback -from typing import ( - Dict, - Optional, - Mapping, - Callable, - Any, - List, - Type, - Union, - Tuple, - Set, -) +from copy import deepcopy +from dataclasses import dataclass, field from itertools import chain -import time +from typing import Any, Callable, Dict, List, Mapping, Optional, Set, Tuple, Type, Union -from dbt.contracts.graph.semantic_manifest import SemanticManifest -from dbt.events.base_types import EventLevel -import json -import pprint import msgpack +import dbt.deprecations import dbt.exceptions import dbt.tracking import dbt.utils -from dbt.flags import get_flags - +import dbt_common.utils +from dbt import plugins from dbt.adapters.factory import ( get_adapter, - get_relation_class_by_name, get_adapter_package_names, + get_relation_class_by_name, + register_adapter, ) +from dbt.artifacts.resources import FileHash, NodeRelation, NodeVersion +from dbt.artifacts.schemas.base import Writable +from dbt.clients.jinja import MacroStack, get_rendered +from dbt.clients.jinja_static import statically_extract_macro_calls +from dbt.config import Project, RuntimeConfig from dbt.constants import ( MANIFEST_FILE_NAME, PARTIAL_PARSE_FILE_NAME, SEMANTIC_MANIFEST_FILE_NAME, ) -from dbt.helper_types import PathSet -from dbt.events.functions import fire_event, get_invocation_id, warn_or_error -from dbt.events.types import ( - PartialParsingErrorProcessingFile, - PartialParsingError, - ParsePerfInfoPath, - PartialParsingSkipParsing, - UnableToPartialParse, - PartialParsingNotEnabled, - ParsedFileLoadFailed, - InvalidDisabledTargetInTestNode, - NodeNotFoundOrDisabled, - StateCheckVarsHash, - Note, - DeprecatedModel, - DeprecatedReference, - UpcomingReferenceDeprecation, -) -from dbt.logger import DbtProcessState -from dbt.node_types import NodeType, AccessType -from dbt.clients.jinja import get_rendered, MacroStack -from dbt.clients.jinja_static import statically_extract_macro_calls -from dbt.clients.system import ( - make_directory, - path_exists, - read_json, - write_file, -) -from dbt.config import Project, RuntimeConfig +from dbt.context.configured import generate_macro_context from dbt.context.docs import generate_runtime_docs_context from dbt.context.macro_resolver import MacroResolver, TestMacroNamespace -from dbt.context.configured import generate_macro_context -from dbt.context.providers import ParseProvider -from dbt.contracts.files import FileHash, ParseFileType, SchemaSourceFile -from dbt.parser.read_files import ( - ReadFilesFromFileSystem, - load_source_file, - FileDiff, - ReadFilesFromDiff, -) -from dbt.parser.partial import PartialParsing, special_override_macros +from dbt.context.providers import ParseProvider, generate_runtime_macro_context +from dbt.context.query_header import generate_query_header_context +from dbt.contracts.files import ParseFileType, SchemaSourceFile from dbt.contracts.graph.manifest import ( - Manifest, Disabled, MacroManifest, + Manifest, ManifestStateCheck, ParsingInfo, ) from dbt.contracts.graph.nodes import ( - SourceDefinition, - Macro, Exposure, - Metric, - SeedNode, + GenericTestNode, + Macro, ManifestNode, - ResultNode, + Metric, ModelNode, - NodeRelation, + ResultNode, + SavedQuery, + SeedNode, + SemanticModel, + SourceDefinition, +) +from dbt.contracts.graph.semantic_manifest import SemanticManifest +from dbt.events.types import ( + DeprecatedModel, + DeprecatedReference, + InvalidDisabledTargetInTestNode, + NodeNotFoundOrDisabled, + ParsedFileLoadFailed, + ParsePerfInfoPath, + PartialParsingError, + PartialParsingErrorProcessingFile, + PartialParsingNotEnabled, + PartialParsingSkipParsing, + SpacesInResourceNameDeprecation, + StateCheckVarsHash, + UnableToPartialParse, + UpcomingReferenceDeprecation, ) -from dbt.contracts.graph.unparsed import NodeVersion -from dbt.contracts.util import Writable from dbt.exceptions import ( - TargetNotFoundError, AmbiguousAliasError, InvalidAccessTypeError, + TargetNotFoundError, + scrub_secrets, ) -from dbt.parser.base import Parser +from dbt.flags import get_flags +from dbt.mp_context import get_mp_context +from dbt.node_types import AccessType, NodeType from dbt.parser.analysis import AnalysisParser -from dbt.parser.generic_test import GenericTestParser -from dbt.parser.singular_test import SingularTestParser +from dbt.parser.base import Parser from dbt.parser.docs import DocumentationParser +from dbt.parser.fixtures import FixtureParser +from dbt.parser.generic_test import GenericTestParser from dbt.parser.hooks import HookParser from dbt.parser.macros import MacroParser from dbt.parser.models import ModelParser +from dbt.parser.partial import PartialParsing, special_override_macros +from dbt.parser.read_files import ( + FileDiff, + ReadFiles, + ReadFilesFromDiff, + ReadFilesFromFileSystem, + load_source_file, +) from dbt.parser.schemas import SchemaParser from dbt.parser.search import FileBlock from dbt.parser.seeds import SeedParser +from dbt.parser.singular_test import SingularTestParser from dbt.parser.snapshots import SnapshotParser from dbt.parser.sources import SourcePatcher +from dbt.parser.unit_tests import process_models_for_unit_test from dbt.version import __version__ - -from dbt.dataclass_schema import StrEnum, dbtClassMixin -from dbt.plugins import get_plugin_manager - +from dbt_common.clients.system import make_directory, path_exists, read_json, write_file +from dbt_common.constants import SECRET_ENV_PREFIX +from dbt_common.dataclass_schema import StrEnum, dbtClassMixin +from dbt_common.events.base_types import EventLevel +from dbt_common.events.functions import fire_event, get_invocation_id, warn_or_error +from dbt_common.events.types import Note +from dbt_common.exceptions.base import DbtValidationError +from dbt_common.helper_types import PathSet from dbt_semantic_interfaces.enum_extension import assert_values_exhausted from dbt_semantic_interfaces.type_enums import MetricType -PARSING_STATE = DbtProcessState("parsing") PERF_INFO_FILE_NAME = "perf_info.json" @@ -218,7 +210,7 @@ class ManifestLoaderInfo(dbtClassMixin, Writable): projects: List[ProjectLoaderInfo] = field(default_factory=list) _project_index: Dict[str, ProjectLoaderInfo] = field(default_factory=dict) - def __post_serialize__(self, dct): + def __post_serialize__(self, dct: Dict, context: Optional[Dict] = None): del dct["_project_index"] return dct @@ -230,12 +222,12 @@ class ManifestLoader: def __init__( self, root_project: RuntimeConfig, - all_projects: Mapping[str, Project], + all_projects: Mapping[str, RuntimeConfig], macro_hook: Optional[Callable[[Manifest], Any]] = None, file_diff: Optional[FileDiff] = None, ) -> None: self.root_project: RuntimeConfig = root_project - self.all_projects: Mapping[str, Project] = all_projects + self.all_projects: Mapping[str, RuntimeConfig] = all_projects self.file_diff = file_diff self.manifest: Manifest = Manifest() self.new_manifest = self.manifest @@ -259,7 +251,8 @@ def __init__( # We need to know if we're actually partially parsing. It could # have been enabled, but not happening because of some issue. self.partially_parsing = False - self.partial_parser = None + self.partial_parser: Optional[PartialParsing] = None + self.skip_parsing = False # This is a saved manifest from a previous run that's used for partial parsing self.saved_manifest: Optional[Manifest] = self.read_manifest_for_partial_parse() @@ -275,54 +268,61 @@ def get_full_manifest( reset: bool = False, write_perf_info=False, ) -> Manifest: - adapter = get_adapter(config) # type: ignore # reset is set in a TaskManager load_manifest call, since # the config and adapter may be persistent. if reset: config.clear_dependencies() - adapter.clear_macro_manifest() + adapter.clear_macro_resolver() macro_hook = adapter.connections.set_query_header + flags = get_flags() + if not flags.PARTIAL_PARSE_FILE_DIFF: + file_diff = FileDiff.from_dict( + { + "deleted": [], + "changed": [], + "added": [], + } + ) # Hack to test file_diffs - if os.environ.get("DBT_PP_FILE_DIFF_TEST"): + elif os.environ.get("DBT_PP_FILE_DIFF_TEST"): file_diff_path = "file_diff.json" if path_exists(file_diff_path): file_diff_dct = read_json(file_diff_path) file_diff = FileDiff.from_dict(file_diff_dct) - with PARSING_STATE: # set up logbook.Processor for parsing - # Start performance counting - start_load_all = time.perf_counter() + # Start performance counting + start_load_all = time.perf_counter() - projects = config.load_dependencies() - loader = cls( - config, - projects, - macro_hook=macro_hook, - file_diff=file_diff, - ) + projects = config.load_dependencies() + loader = cls( + config, + projects, + macro_hook=macro_hook, + file_diff=file_diff, + ) - manifest = loader.load() + manifest = loader.load() - _check_manifest(manifest, config) - manifest.build_flat_graph() + _check_manifest(manifest, config) + manifest.build_flat_graph() - # This needs to happen after loading from a partial parse, - # so that the adapter has the query headers from the macro_hook. - loader.save_macros_to_adapter(adapter) + # This needs to happen after loading from a partial parse, + # so that the adapter has the query headers from the macro_hook. + loader.save_macros_to_adapter(adapter) - # Save performance info - loader._perf_info.load_all_elapsed = time.perf_counter() - start_load_all - loader.track_project_load() + # Save performance info + loader._perf_info.load_all_elapsed = time.perf_counter() - start_load_all + loader.track_project_load() - if write_perf_info: - loader.write_perf_info(config.project_target_path) + if write_perf_info: + loader.write_perf_info(config.project_target_path) return manifest # This is where the main action happens - def load(self): + def load(self) -> Manifest: start_read_files = time.perf_counter() # This updates the "files" dictionary in self.manifest, and creates @@ -331,6 +331,7 @@ def load(self): # of parsers to lists of file strings. The file strings are # used to get the SourceFiles from the manifest files. saved_files = self.saved_manifest.files if self.saved_manifest else {} + file_reader: Optional[ReadFiles] = None if self.file_diff: # We're getting files from a file diff file_reader = ReadFilesFromDiff( @@ -355,71 +356,15 @@ def load(self): self._perf_info.path_count = len(self.manifest.files) self._perf_info.read_files_elapsed = time.perf_counter() - start_read_files - skip_parsing = False - if self.saved_manifest is not None: - self.partial_parser = PartialParsing(self.saved_manifest, self.manifest.files) - skip_parsing = self.partial_parser.skip_parsing() - if skip_parsing: - # nothing changed, so we don't need to generate project_parser_files - self.manifest = self.saved_manifest - else: - # create child_map and parent_map - self.saved_manifest.build_parent_and_child_maps() - # create group_map - self.saved_manifest.build_group_map() - # files are different, we need to create a new set of - # project_parser_files. - try: - project_parser_files = self.partial_parser.get_parsing_files() - self.partially_parsing = True - self.manifest = self.saved_manifest - except Exception as exc: - # pp_files should still be the full set and manifest is new manifest, - # since get_parsing_files failed - fire_event( - UnableToPartialParse( - reason="an error occurred. Switching to full reparse." - ) - ) - - # Get traceback info - tb_info = traceback.format_exc() - formatted_lines = tb_info.splitlines() - (_, line, method) = formatted_lines[-3].split(", ") - exc_info = { - "traceback": tb_info, - "exception": formatted_lines[-1], - "code": formatted_lines[-2], - "location": f"{line} {method}", - } - - # get file info for local logs - parse_file_type = None - file_id = self.partial_parser.processing_file - if file_id: - source_file = None - if file_id in self.saved_manifest.files: - source_file = self.saved_manifest.files[file_id] - elif file_id in self.manifest.files: - source_file = self.manifest.files[file_id] - if source_file: - parse_file_type = source_file.parse_file_type - fire_event(PartialParsingErrorProcessingFile(file=file_id)) - exc_info["parse_file_type"] = parse_file_type - fire_event(PartialParsingError(exc_info=exc_info)) - - # Send event - if dbt.tracking.active_user is not None: - exc_info["full_reparse_reason"] = ReparseReason.exception - dbt.tracking.track_partial_parser(exc_info) - - if os.environ.get("DBT_PP_TEST"): - raise exc + self.skip_parsing = False + project_parser_files = self.safe_update_project_parser_files_partially( + project_parser_files + ) if self.manifest._parsing_info is None: self.manifest._parsing_info = ParsingInfo() - if skip_parsing: + if self.skip_parsing: fire_event(PartialParsingSkipParsing()) else: # Load Macros and tests @@ -458,6 +403,7 @@ def load(self): SeedParser, DocumentationParser, HookParser, + FixtureParser, ] for project in self.all_projects.values(): if project.project_name not in project_parser_files: @@ -475,7 +421,7 @@ def load(self): self.manifest.rebuild_disabled_lookup() # Load yaml files - parser_types = [SchemaParser] + parser_types = [SchemaParser] # type: ignore for project in self.all_projects.values(): if project.project_name not in project_parser_files: continue @@ -503,6 +449,7 @@ def load(self): self.manifest.selectors = self.root_project.manifest_selectors # inject any available external nodes + self.manifest.build_parent_and_child_maps() external_nodes_modified = self.inject_external_nodes() if external_nodes_modified: self.manifest.rebuild_ref_lookup() @@ -513,10 +460,14 @@ def load(self): start_process = time.perf_counter() self.process_sources(self.root_project.project_name) self.process_refs(self.root_project.project_name, self.root_project.dependencies) + self.process_unit_tests(self.root_project.project_name) self.process_docs(self.root_project) self.process_metrics(self.root_project) + self.process_saved_queries(self.root_project) + self.process_model_inferred_primary_keys() self.check_valid_group_config() self.check_valid_access_property() + self.check_valid_snapshot_config() semantic_manifest = SemanticManifest(self.manifest) if not semantic_manifest.validate(): @@ -533,7 +484,7 @@ def load(self): # Inject any available external nodes, reprocess refs if changes to the manifest were made. external_nodes_modified = False - if skip_parsing: + if self.skip_parsing: # If we didn't skip parsing, this will have already run because it must run # before process_refs. If we did skip parsing, then it's possible that only # external nodes have changed and we need to run this to capture that. @@ -547,36 +498,94 @@ def load(self): ) # parent and child maps will be rebuilt by write_manifest - if not skip_parsing: + if not self.skip_parsing or external_nodes_modified: # write out the fully parsed manifest self.write_manifest_for_partial_parse() self.check_for_model_deprecations() + self.check_for_spaces_in_resource_names() return self.manifest + def safe_update_project_parser_files_partially(self, project_parser_files: Dict) -> Dict: + if self.saved_manifest is None: + return project_parser_files + + self.partial_parser = PartialParsing(self.saved_manifest, self.manifest.files) # type: ignore[arg-type] + self.skip_parsing = self.partial_parser.skip_parsing() + if self.skip_parsing: + # nothing changed, so we don't need to generate project_parser_files + self.manifest = self.saved_manifest # type: ignore[assignment] + else: + # create child_map and parent_map + self.saved_manifest.build_parent_and_child_maps() # type: ignore[union-attr] + # create group_map + self.saved_manifest.build_group_map() # type: ignore[union-attr] + # files are different, we need to create a new set of + # project_parser_files. + try: + project_parser_files = self.partial_parser.get_parsing_files() + self.partially_parsing = True + self.manifest = self.saved_manifest # type: ignore[assignment] + except Exception as exc: + # pp_files should still be the full set and manifest is new manifest, + # since get_parsing_files failed + fire_event( + UnableToPartialParse(reason="an error occurred. Switching to full reparse.") + ) + + # Get traceback info + tb_info = traceback.format_exc() + # index last stack frame in traceback (i.e. lastest exception and its context) + tb_last_frame = traceback.extract_tb(exc.__traceback__)[-1] + exc_info = { + "traceback": tb_info, + "exception": tb_info.splitlines()[-1], + "code": tb_last_frame.line, # if the source is not available, it is None + "location": f"line {tb_last_frame.lineno} in {tb_last_frame.name}", + } + + # get file info for local logs + parse_file_type: str = "" + file_id = self.partial_parser.processing_file + if file_id: + source_file = None + if file_id in self.saved_manifest.files: + source_file = self.saved_manifest.files[file_id] + elif file_id in self.manifest.files: + source_file = self.manifest.files[file_id] + if source_file: + parse_file_type = source_file.parse_file_type + fire_event(PartialParsingErrorProcessingFile(file=file_id)) + exc_info["parse_file_type"] = parse_file_type + fire_event(PartialParsingError(exc_info=exc_info)) + # Send event + if dbt.tracking.active_user is not None: + exc_info["full_reparse_reason"] = ReparseReason.exception + dbt.tracking.track_partial_parser(exc_info) + + if os.environ.get("DBT_PP_TEST"): + raise exc + + return project_parser_files + def check_for_model_deprecations(self): for node in self.manifest.nodes.values(): - if isinstance(node, ModelNode): - if ( - node.deprecation_date - and node.deprecation_date < datetime.datetime.now().astimezone() - ): - warn_or_error( - DeprecatedModel( - model_name=node.name, - model_version=version_to_str(node.version), - deprecation_date=node.deprecation_date.isoformat(), - ) + if isinstance(node, ModelNode) and node.is_past_deprecation_date: + warn_or_error( + DeprecatedModel( + model_name=node.name, + model_version=version_to_str(node.version), + deprecation_date=node.deprecation_date.isoformat(), ) + ) resolved_refs = self.manifest.resolve_refs(node, self.root_project.project_name) resolved_model_refs = [r for r in resolved_refs if isinstance(r, ModelNode)] node.depends_on for resolved_ref in resolved_model_refs: if resolved_ref.deprecation_date: - - if resolved_ref.deprecation_date < datetime.datetime.now().astimezone(): + if resolved_ref.is_past_deprecation_date: event_cls = DeprecatedReference else: event_cls = UpcomingReferenceDeprecation @@ -592,6 +601,44 @@ def check_for_model_deprecations(self): ) ) + def check_for_spaces_in_resource_names(self): + """Validates that resource names do not contain spaces + + If `DEBUG` flag is `False`, logs only first bad model name + If `DEBUG` flag is `True`, logs every bad model name + If `REQUIRE_RESOURCE_NAMES_WITHOUT_SPACES` is `True`, logs are `ERROR` level and an exception is raised if any names are bad + If `REQUIRE_RESOURCE_NAMES_WITHOUT_SPACES` is `False`, logs are `WARN` level + """ + improper_resource_names = 0 + level = ( + EventLevel.ERROR + if self.root_project.args.REQUIRE_RESOURCE_NAMES_WITHOUT_SPACES + else EventLevel.WARN + ) + + for node in self.manifest.nodes.values(): + if " " in node.name: + if improper_resource_names == 0 or self.root_project.args.DEBUG: + fire_event( + SpacesInResourceNameDeprecation( + unique_id=node.unique_id, + level=level.value, + ), + level=level, + ) + improper_resource_names += 1 + + if improper_resource_names > 0: + if level == EventLevel.WARN: + flags = get_flags() + dbt.deprecations.warn( + "resource-names-with-spaces", + count_invalid_names=improper_resource_names, + show_debug_hint=(not flags.DEBUG), + ) + else: # ERROR level + raise DbtValidationError("Resource names cannot contain spaces") + def load_and_parse_macros(self, project_parser_files): for project in self.all_projects.values(): if project.project_name not in project_parser_files: @@ -622,7 +669,7 @@ def load_and_parse_macros(self, project_parser_files): # 'parser_types' def parse_project( self, - project: Project, + project: RuntimeConfig, parser_files, parser_types: List[Type[Parser]], ) -> None: @@ -745,18 +792,25 @@ def write_manifest_for_partial_parse(self): def inject_external_nodes(self) -> bool: # Remove previously existing external nodes since we are regenerating them manifest_nodes_modified = False + # Remove all dependent nodes before removing referencing nodes for unique_id in self.manifest.external_node_unique_ids: - self.manifest.nodes.pop(unique_id) remove_dependent_project_references(self.manifest, unique_id) manifest_nodes_modified = True + for unique_id in self.manifest.external_node_unique_ids: + # remove external nodes from manifest only after dependent project references safely removed + self.manifest.nodes.pop(unique_id) # Inject any newly-available external nodes - pm = get_plugin_manager(self.root_project.project_name) + pm = plugins.get_plugin_manager(self.root_project.project_name) plugin_model_nodes = pm.get_nodes().models for node_arg in plugin_model_nodes.values(): node = ModelNode.from_args(node_arg) - # node may already exist from package or running project - in which case we should avoid clobbering it with an external node - if node.unique_id not in self.manifest.nodes: + # node may already exist from package or running project (even if it is disabled), + # in which case we should avoid clobbering it with an external node + if ( + node.unique_id not in self.manifest.nodes + and node.unique_id not in self.manifest.disabled + ): self.manifest.add_node_nofile(node) manifest_nodes_modified = True @@ -803,13 +857,6 @@ def is_partial_parsable(self, manifest: Manifest) -> Tuple[bool, Optional[str]]: ) valid = False reparse_reason = ReparseReason.proj_env_vars_changed - if ( - self.manifest.state_check.profile_env_vars_hash - != manifest.state_check.profile_env_vars_hash - ): - fire_event(UnableToPartialParse(reason="env vars used in profiles.yml have changed")) - valid = False - reparse_reason = ReparseReason.prof_env_vars_changed missing_keys = { k @@ -919,6 +966,9 @@ def build_manifest_state_check(self): # of env_vars, that would need to change. # We are using the parsed cli_vars instead of config.args.vars, in order # to sort them and avoid reparsing because of ordering issues. + secret_vars = [ + v for k, v in config.cli_vars.items() if k.startswith(SECRET_ENV_PREFIX) and v.strip() + ] stringified_cli_vars = pprint.pformat(config.cli_vars) vars_hash = FileHash.from_contents( "\x00".join( @@ -933,7 +983,7 @@ def build_manifest_state_check(self): fire_event( StateCheckVarsHash( checksum=vars_hash.checksum, - vars=stringified_cli_vars, + vars=scrub_secrets(stringified_cli_vars, secret_vars), profile=config.args.profile, target=config.args.target, version=__version__, @@ -948,18 +998,18 @@ def build_manifest_state_check(self): env_var_str += f"{key}:{config.project_env_vars[key]}|" project_env_vars_hash = FileHash.from_contents(env_var_str) - # Create a FileHash of the env_vars in the project - key_list = list(config.profile_env_vars.keys()) - key_list.sort() - env_var_str = "" - for key in key_list: - env_var_str += f"{key}:{config.profile_env_vars[key]}|" - profile_env_vars_hash = FileHash.from_contents(env_var_str) + # Create a hash of the connection_info, which user has access to in + # jinja context. Thus attributes here may affect the parsing result. + # Ideally we should not expose all of the connection info to the jinja. - # Create a FileHash of the profile file - profile_path = os.path.join(get_flags().PROFILES_DIR, "profiles.yml") - with open(profile_path) as fp: - profile_hash = FileHash.from_contents(fp.read()) + # Renaming this variable mean that we will have to do a whole lot more + # change to make sure the previous manifest can be loaded correctly. + # This is an example of naming should be chosen based on the functionality + # rather than the implementation details. + connection_keys = list(config.credentials.connection_info()) + # avoid reparsing because of ordering issues + connection_keys.sort() + profile_hash = FileHash.from_contents(pprint.pformat(connection_keys)) # Create a FileHashes for dbt_project for all dependencies project_hashes = {} @@ -971,7 +1021,6 @@ def build_manifest_state_check(self): # Create the ManifestStateCheck object state_check = ManifestStateCheck( project_env_vars_hash=project_env_vars_hash, - profile_env_vars_hash=profile_env_vars_hash, vars_hash=vars_hash, profile_hash=profile_hash, project_hashes=project_hashes, @@ -979,11 +1028,12 @@ def build_manifest_state_check(self): return state_check def save_macros_to_adapter(self, adapter): - macro_manifest = MacroManifest(self.manifest.macros) - adapter._macro_manifest_lazy = macro_manifest + adapter.set_macro_resolver(self.manifest) # This executes the callable macro_hook and sets the # query headers - self.macro_hook(macro_manifest) + # This executes the callable macro_hook and sets the query headers + query_header_context = generate_query_header_context(adapter.config, self.manifest) + self.macro_hook(query_header_context) # This creates a MacroManifest which contains the macros in # the adapter. Only called by the load_macros call from the @@ -1015,18 +1065,16 @@ def load_macros( macro_hook: Callable[[Manifest], Any], base_macros_only=False, ) -> Manifest: - with PARSING_STATE: - # base_only/base_macros_only: for testing only, - # allows loading macros without running 'dbt deps' first - projects = root_config.load_dependencies(base_only=base_macros_only) + # base_only/base_macros_only: for testing only, + # allows loading macros without running 'dbt deps' first + projects = root_config.load_dependencies(base_only=base_macros_only) - # This creates a loader object, including result, - # and then throws it away, returning only the - # manifest - loader = cls(root_config, projects, macro_hook) - macro_manifest = loader.create_macro_manifest() + # This creates a loader object, including result, + # and then throws it away, returning only the + # manifest + loader = cls(root_config, projects, macro_hook) - return macro_manifest + return loader.create_macro_manifest() # Create tracking event for saving performance info def track_project_load(self): @@ -1052,7 +1100,7 @@ def track_project_load(self): # Takes references in 'refs' array of nodes and exposures, finds the target # node, and updates 'depends_on.nodes' with the unique id - def process_refs(self, current_project: str, dependencies: Optional[Dict[str, Project]]): + def process_refs(self, current_project: str, dependencies: Optional[Mapping[str, Project]]): for node in self.manifest.nodes.values(): if node.created_at < self.started_at: continue @@ -1089,6 +1137,24 @@ def process_metrics(self, config: RuntimeConfig): continue _process_metrics_for_node(self.manifest, current_project, exposure) + def process_saved_queries(self, config: RuntimeConfig): + """Processes SavedQuery nodes to populate their `depends_on`.""" + current_project = config.project_name + for saved_query in self.manifest.saved_queries.values(): + # TODO: + # 1. process `where` of SavedQuery for `depends_on`s + # 2. process `group_by` of SavedQuery for `depends_on`` + _process_metrics_for_node(self.manifest, current_project, saved_query) + + def process_model_inferred_primary_keys(self): + """Processes Model nodes to populate their `primary_key`.""" + for node in self.manifest.nodes.values(): + if not isinstance(node, ModelNode): + continue + generic_tests = self._get_generic_tests_for_model(node) + primary_key = node.infer_primary_key(generic_tests) + node.primary_key = sorted(primary_key) + def update_semantic_model(self, semantic_model) -> None: # This has to be done at the end of parsing because the referenced model # might have alias/schema/database fields that are updated by yaml config. @@ -1101,10 +1167,12 @@ def update_semantic_model(self, semantic_model) -> None: database=refd_node.database, ) - # nodes: node and column descriptions + # nodes: node and column descriptions, version columns descriptions # sources: source and table descriptions, column descriptions # macros: macro argument descriptions # exposures: exposure descriptions + # metrics: metric descriptions + # semantic_models: semantic model descriptions def process_docs(self, config: RuntimeConfig): for node in self.manifest.nodes.values(): if node.created_at < self.started_at: @@ -1156,6 +1224,23 @@ def process_docs(self, config: RuntimeConfig): config.project_name, ) _process_docs_for_metrics(ctx, metric) + for semantic_model in self.manifest.semantic_models.values(): + if semantic_model.created_at < self.started_at: + continue + ctx = generate_runtime_docs_context( + config, + semantic_model, + self.manifest, + config.project_name, + ) + _process_docs_for_semantic_model(ctx, semantic_model) + for saved_query in self.manifest.saved_queries.values(): + if saved_query.created_at < self.started_at: + continue + ctx = generate_runtime_docs_context( + config, saved_query, self.manifest, config.project_name + ) + _process_docs_for_saved_query(ctx, saved_query) # Loops through all nodes and exposures, for each element in # 'sources' array finds the source node and updates the @@ -1173,6 +1258,27 @@ def process_sources(self, current_project: str): continue _process_sources_for_exposure(self.manifest, current_project, exposure) + # Loops through all nodes, for each element in + # 'unit_test' array finds the node and updates the + # 'depends_on.nodes' array with the unique id + def process_unit_tests(self, current_project: str): + models_to_versions = None + unit_test_unique_ids = list(self.manifest.unit_tests.keys()) + for unit_test_unique_id in unit_test_unique_ids: + # This is because some unit tests will be removed when processing + # and the list of unit_test_unique_ids won't have changed + if unit_test_unique_id in self.manifest.unit_tests: + unit_test = self.manifest.unit_tests[unit_test_unique_id] + else: + continue + if unit_test.created_at < self.started_at: + continue + if not models_to_versions: + models_to_versions = _build_model_names_to_versions(self.manifest) + process_models_for_unit_test( + self.manifest, current_project, unit_test, models_to_versions + ) + def cleanup_disabled(self): # make sure the nodes are in the manifest.nodes or the disabled dict, # correctly now that the schema files are also parsed @@ -1205,11 +1311,19 @@ def check_valid_group_config(self): for metric in manifest.metrics.values(): self.check_valid_group_config_node(metric, group_names) + for semantic_model in manifest.semantic_models.values(): + self.check_valid_group_config_node(semantic_model, group_names) + + for saved_query in manifest.saved_queries.values(): + self.check_valid_group_config_node(saved_query, group_names) + for node in manifest.nodes.values(): self.check_valid_group_config_node(node, group_names) def check_valid_group_config_node( - self, groupable_node: Union[Metric, ManifestNode], valid_group_names: Set[str] + self, + groupable_node: Union[Metric, SavedQuery, SemanticModel, ManifestNode], + valid_group_names: Set[str], ): groupable_node_group = groupable_node.group if groupable_node_group and groupable_node_group not in valid_group_names: @@ -1231,11 +1345,39 @@ def check_valid_access_property(self): materialization=node.get_materialization(), ) + def check_valid_snapshot_config(self): + # Snapshot config can be set in either SQL files or yaml files, + # so we need to validate afterward. + for node in self.manifest.nodes.values(): + if node.resource_type != NodeType.Snapshot: + continue + if node.created_at < self.started_at: + continue + node.config.final_validate() + def write_perf_info(self, target_path: str): path = os.path.join(target_path, PERF_INFO_FILE_NAME) write_file(path, json.dumps(self._perf_info, cls=dbt.utils.JSONEncoder, indent=4)) fire_event(ParsePerfInfoPath(path=path)) + def _get_generic_tests_for_model( + self, + model: ModelNode, + ) -> List[GenericTestNode]: + """Return a list of generic tests that are attached to the given model, including disabled tests""" + tests = [] + for _, node in self.manifest.nodes.items(): + if isinstance(node, GenericTestNode) and node.attached_node == model.unique_id: + tests.append(node) + for _, nodes in self.manifest.disabled.items(): + for disabled_node in nodes: + if ( + isinstance(disabled_node, GenericTestNode) + and disabled_node.attached_node == model.unique_id + ): + tests.append(disabled_node) + return tests + def invalid_target_fail_unless_test( node, @@ -1281,6 +1423,21 @@ def invalid_target_fail_unless_test( ) +def _build_model_names_to_versions(manifest: Manifest) -> Dict[str, Dict]: + model_names_to_versions: Dict[str, Dict] = {} + for node in manifest.nodes.values(): + if node.resource_type != NodeType.Model: + continue + if not node.is_versioned: + continue + if node.package_name not in model_names_to_versions: + model_names_to_versions[node.package_name] = {} + if node.name not in model_names_to_versions[node.package_name]: + model_names_to_versions[node.package_name][node.name] = [] + model_names_to_versions[node.package_name][node.name].append(node.unique_id) + return model_names_to_versions + + def _check_resource_uniqueness( manifest: Manifest, config: RuntimeConfig, @@ -1301,7 +1458,7 @@ def _check_resource_uniqueness( # the full node name is really defined by the adapter's relation relation_cls = get_relation_class_by_name(config.credentials.type) - relation = relation_cls.create_from(config=config, node=node) + relation = relation_cls.create_from(quoting=config, relation_config=node) # type: ignore[arg-type] full_node_name = str(relation) existing_alias = alias_resources.get(full_node_name) @@ -1385,6 +1542,30 @@ def _process_docs_for_metrics(context: Dict[str, Any], metric: Metric) -> None: metric.description = get_rendered(metric.description, context) +def _process_docs_for_semantic_model( + context: Dict[str, Any], semantic_model: SemanticModel +) -> None: + if semantic_model.description: + semantic_model.description = get_rendered(semantic_model.description, context) + + for dimension in semantic_model.dimensions: + if dimension.description: + dimension.description = get_rendered(dimension.description, context) + + for measure in semantic_model.measures: + if measure.description: + measure.description = get_rendered(measure.description, context) + + for entity in semantic_model.entities: + if entity.description: + entity.description = get_rendered(entity.description, context) + + +def _process_docs_for_saved_query(context: Dict[str, Any], saved_query: SavedQuery) -> None: + if saved_query.description: + saved_query.description = get_rendered(saved_query.description, context) + + def _process_refs( manifest: Manifest, current_project: str, node, dependencies: Optional[Mapping[str, Project]] ) -> None: @@ -1435,7 +1616,7 @@ def _process_refs( unique_id=node.unique_id, ref_unique_id=target_model.unique_id, access=AccessType.Private, - scope=dbt.utils.cast_to_str(target_model.group), + scope=dbt_common.utils.cast_to_str(target_model.group), ) elif manifest.is_invalid_protected_ref(node, target_model, dependencies): raise dbt.exceptions.DbtReferenceError( @@ -1449,6 +1630,34 @@ def _process_refs( node.depends_on.add_node(target_model_id) +def _process_metric_depends_on( + manifest: Manifest, + current_project: str, + metric: Metric, +) -> None: + """For a given metric, set the `depends_on` property""" + + assert len(metric.type_params.input_measures) > 0 + for input_measure in metric.type_params.input_measures: + target_semantic_model = manifest.resolve_semantic_model_for_measure( + target_measure_name=input_measure.name, + current_project=current_project, + node_package=metric.package_name, + ) + if target_semantic_model is None: + raise dbt.exceptions.ParsingError( + f"A semantic model having a measure `{input_measure.name}` does not exist but was referenced.", + node=metric, + ) + if target_semantic_model.config.enabled is False: + raise dbt.exceptions.ParsingError( + f"The measure `{input_measure.name}` is referenced on disabled semantic model `{target_semantic_model.name}`.", + node=metric, + ) + + metric.depends_on.add_node(target_semantic_model.unique_id) + + def _process_metric_node( manifest: Manifest, current_project: str, @@ -1467,20 +1676,20 @@ def _process_metric_node( assert ( metric.type_params.measure is not None ), f"{metric} should have a measure defined, but it does not." - metric.type_params.input_measures.append(metric.type_params.measure) - target_semantic_model = manifest.resolve_semantic_model_for_measure( - target_measure_name=metric.type_params.measure.name, - current_project=current_project, - node_package=metric.package_name, + metric.add_input_measure(metric.type_params.measure) + _process_metric_depends_on( + manifest=manifest, current_project=current_project, metric=metric + ) + elif metric.type is MetricType.CONVERSION: + conversion_type_params = metric.type_params.conversion_type_params + assert ( + conversion_type_params + ), f"{metric.name} is a conversion metric and must have conversion_type_params defined." + metric.add_input_measure(conversion_type_params.base_measure) + metric.add_input_measure(conversion_type_params.conversion_measure) + _process_metric_depends_on( + manifest=manifest, current_project=current_project, metric=metric ) - if target_semantic_model is None: - raise dbt.exceptions.ParsingError( - f"A semantic model having a measure `{metric.type_params.measure.name}` does not exist but was referenced.", - node=metric, - ) - - metric.depends_on.add_node(target_semantic_model.unique_id) - elif metric.type is MetricType.DERIVED or metric.type is MetricType.RATIO: input_metrics = metric.input_metrics if metric.type is MetricType.RATIO: @@ -1513,7 +1722,8 @@ def _process_metric_node( _process_metric_node( manifest=manifest, current_project=current_project, metric=target_metric ) - metric.type_params.input_measures.extend(target_metric.type_params.input_measures) + for input_measure in target_metric.type_params.input_measures: + metric.add_input_measure(input_measure) metric.depends_on.add_node(target_metric.unique_id) else: assert_values_exhausted(metric.type) @@ -1522,14 +1732,19 @@ def _process_metric_node( def _process_metrics_for_node( manifest: Manifest, current_project: str, - node: Union[ManifestNode, Metric, Exposure], + node: Union[ManifestNode, Metric, Exposure, SavedQuery], ): """Given a manifest and a node in that manifest, process its metrics""" + metrics: List[List[str]] if isinstance(node, SeedNode): return + elif isinstance(node, SavedQuery): + metrics = [[metric] for metric in node.metrics] + else: + metrics = node.metrics - for metric in node.metrics: + for metric in metrics: target_metric: Optional[Union[Disabled, Metric]] = None target_metric_name: str target_metric_package: Optional[str] = None @@ -1622,7 +1837,6 @@ def _process_sources_for_metric(manifest: Manifest, current_project: str, metric def _process_sources_for_node(manifest: Manifest, current_project: str, node: ManifestNode): - if isinstance(node, SeedNode): return @@ -1636,7 +1850,7 @@ def _process_sources_for_node(manifest: Manifest, current_project: str, node: Ma ) if target_source is None or isinstance(target_source, Disabled): - # this folows the same pattern as refs + # this follows the same pattern as refs node.config.enabled = False invalid_target_fail_unless_test( node=node, @@ -1664,7 +1878,6 @@ def process_macro(config: RuntimeConfig, manifest: Manifest, macro: Macro) -> No # This is called in task.rpc.sql_commands when a "dynamic" node is # created in the manifest, in 'add_refs' def process_node(config: RuntimeConfig, manifest: Manifest, node: ManifestNode): - _process_sources_for_node(manifest, config.project_name, node) _process_refs(manifest, config.project_name, node, config.dependencies) ctx = generate_runtime_docs_context(config, node, manifest, config.project_name) @@ -1677,8 +1890,33 @@ def write_semantic_manifest(manifest: Manifest, target_path: str) -> None: semantic_manifest.write_json_to_file(path) -def write_manifest(manifest: Manifest, target_path: str): - path = os.path.join(target_path, MANIFEST_FILE_NAME) +def write_manifest(manifest: Manifest, target_path: str, which: Optional[str] = None): + file_name = MANIFEST_FILE_NAME + path = os.path.join(target_path, file_name) manifest.write(path) write_semantic_manifest(manifest=manifest, target_path=target_path) + + +def parse_manifest( + runtime_config: RuntimeConfig, + write_perf_info: bool, + write: bool, + write_json: bool, +) -> Manifest: + register_adapter(runtime_config, get_mp_context()) + adapter = get_adapter(runtime_config) + adapter.set_macro_context_generator(generate_runtime_macro_context) + manifest = ManifestLoader.get_full_manifest( + runtime_config, + write_perf_info=write_perf_info, + ) + + # If we should (over)write the manifest in the target path, do that now + if write and write_json: + write_manifest(manifest, runtime_config.project_target_path) + pm = plugins.get_plugin_manager(runtime_config.project_name) + plugin_artifacts = pm.get_manifest_artifacts(manifest) + for path, plugin_artifact in plugin_artifacts.items(): + plugin_artifact.write(path) + return manifest diff --git a/core/dbt/parser/models.py b/core/dbt/parser/models.py index 4fe6cd56082..06e11a89649 100644 --- a/core/dbt/parser/models.py +++ b/core/dbt/parser/models.py @@ -1,41 +1,40 @@ +# New for Python models :p +import ast +import random from copy import deepcopy -from dbt.context.context_config import ContextConfig -from dbt.contracts.graph.nodes import ModelNode, RefArgs -from dbt.events.base_types import EventLevel -from dbt.events.types import Note -from dbt.events.functions import fire_event_if_test -from dbt.flags import get_flags -from dbt.node_types import NodeType, ModelLanguage -from dbt.parser.base import SimpleSQLParser -from dbt.parser.search import FileBlock -from dbt.clients.jinja import get_rendered -import dbt.tracking as tracking -from dbt import utils -from dbt_extractor import ExtractionError, py_extract_from_source # type: ignore from functools import reduce from itertools import chain -import random from typing import Any, Dict, Iterator, List, Optional, Tuple, Union -# New for Python models :p -import ast -from dbt.dataclass_schema import ValidationError +import dbt.tracking as tracking +from dbt import utils +from dbt.artifacts.resources import RefArgs +from dbt.clients.jinja import get_rendered +from dbt.context.context_config import ContextConfig +from dbt.contracts.graph.nodes import ModelNode from dbt.exceptions import ( ModelConfigError, ParsingError, PythonLiteralEvalError, PythonParsingError, - UndefinedMacroError, ) +from dbt.flags import get_flags +from dbt.node_types import ModelLanguage, NodeType +from dbt.parser.base import SimpleSQLParser +from dbt.parser.search import FileBlock +from dbt_common.contracts.config.base import merge_config_dicts +from dbt_common.dataclass_schema import ValidationError +from dbt_common.exceptions.macros import UndefinedMacroError +from dbt_extractor import ExtractionError, py_extract_from_source # type: ignore dbt_function_key_words = set(["ref", "source", "config", "get"]) dbt_function_full_names = set(["dbt.ref", "dbt.source", "dbt.config", "dbt.config.get"]) class PythonValidationVisitor(ast.NodeVisitor): - def __init__(self): + def __init__(self) -> None: super().__init__() - self.dbt_errors = [] + self.dbt_errors: List[str] = [] self.num_model_def = 0 def visit_FunctionDef(self, node: ast.FunctionDef) -> None: @@ -133,6 +132,12 @@ def visit_Call(self, node: ast.Call) -> None: for value in obj.values: if isinstance(value, ast.Call): self.visit_Call(value) + # support dbt function calls in f-strings + elif isinstance(obj, ast.JoinedStr): + for value in obj.values: + if isinstance(value, ast.FormattedValue) and isinstance(value.value, ast.Call): + self.visit_Call(value.value) + # visit node.func.value if we are at an call attr if isinstance(node.func, ast.Attribute): self.attribute_helper(node.func) @@ -200,7 +205,7 @@ def parse_python_model(self, node, config, context): dbt_parser = PythonParseVisitor(node) dbt_parser.visit(tree) - for (func, args, kwargs) in dbt_parser.dbt_function_calls: + for func, args, kwargs in dbt_parser.dbt_function_calls: if func == "get": num_args = len(args) if num_args == 0: @@ -246,12 +251,6 @@ def render_update(self, node: ModelNode, config: ContextConfig) -> None: elif not flags.STATIC_PARSER: # jinja rendering super().render_update(node, config) - fire_event_if_test( - lambda: Note( - msg=f"1605: jinja rendering because of STATIC_PARSER flag. file: {node.path}" - ), - EventLevel.DEBUG, - ) return # only sample for experimental parser correctness on normal runs, @@ -285,10 +284,6 @@ def render_update(self, node: ModelNode, config: ContextConfig) -> None: # sample the experimental parser only during a normal run if exp_sample and not flags.USE_EXPERIMENTAL_PARSER: - fire_event_if_test( - lambda: Note(msg=f"1610: conducting experimental parser sample on {node.path}"), - EventLevel.DEBUG, - ) experimental_sample = self.run_experimental_parser(node) # if the experimental parser succeeded, make a full copy of model parser # and populate _everything_ into it so it can be compared apples-to-apples @@ -318,12 +313,6 @@ def render_update(self, node: ModelNode, config: ContextConfig) -> None: # sampling rng here, but the effect would be the same since we would only roll # it 40% of the time. So I've opted to keep all the rng code colocated above. if stable_sample and not flags.USE_EXPERIMENTAL_PARSER: - fire_event_if_test( - lambda: Note( - msg=f"1611: conducting full jinja rendering sample on {node.path}" - ), - EventLevel.DEBUG, - ) # if this will _never_ mutate anything `self` we could avoid these deep copies, # but we can't really guarantee that going forward. model_parser_copy = self.partial_deepcopy() @@ -358,11 +347,6 @@ def render_update(self, node: ModelNode, config: ContextConfig) -> None: else: # jinja rendering super().render_update(node, config) - # only for test purposes - fire_event_if_test( - lambda: Note(msg=f"1602: parser fallback to jinja rendering on {node.path}"), - EventLevel.DEBUG, - ) # if sampling, add the correct messages for tracking if exp_sample and isinstance(experimental_sample, str): @@ -395,33 +379,16 @@ def render_update(self, node: ModelNode, config: ContextConfig) -> None: def run_static_parser(self, node: ModelNode) -> Optional[Union[str, Dict[str, List[Any]]]]: # if any banned macros have been overridden by the user, we cannot use the static parser. if self._has_banned_macro(node): - # this log line is used for integration testing. If you change - # the code at the beginning of the line change the tests in - # test/integration/072_experimental_parser_tests/test_all_experimental_parser.py - fire_event_if_test( - lambda: Note( - msg=f"1601: detected macro override of ref/source/config in the scope of {node.path}" - ), - EventLevel.DEBUG, - ) return "has_banned_macro" # run the stable static parser and return the results try: statically_parsed = py_extract_from_source(node.raw_code) - fire_event_if_test( - lambda: Note(msg=f"1699: static parser successfully parsed {node.path}"), - EventLevel.DEBUG, - ) return _shift_sources(statically_parsed) # if we want information on what features are barring the static # parser from reading model files, this is where we would add that # since that information is stored in the `ExtractionError`. except ExtractionError: - fire_event_if_test( - lambda: Note(msg=f"1603: static parser failed on {node.path}"), - EventLevel.DEBUG, - ) return "cannot_parse" def run_experimental_parser( @@ -429,15 +396,6 @@ def run_experimental_parser( ) -> Optional[Union[str, Dict[str, List[Any]]]]: # if any banned macros have been overridden by the user, we cannot use the static parser. if self._has_banned_macro(node): - # this log line is used for integration testing. If you change - # the code at the beginning of the line change the tests in - # test/integration/072_experimental_parser_tests/test_all_experimental_parser.py - fire_event_if_test( - lambda: Note( - msg=f"1601: detected macro override of ref/source/config in the scope of {node.path}" - ), - EventLevel.DEBUG, - ) return "has_banned_macro" # run the experimental parser and return the results @@ -446,19 +404,11 @@ def run_experimental_parser( # experimental features. Change `py_extract_from_source` to the new # experimental call when we add additional features. experimentally_parsed = py_extract_from_source(node.raw_code) - fire_event_if_test( - lambda: Note(msg=f"1698: experimental parser successfully parsed {node.path}"), - EventLevel.DEBUG, - ) return _shift_sources(experimentally_parsed) # if we want information on what features are barring the experimental # parser from reading model files, this is where we would add that # since that information is stored in the `ExtractionError`. except ExtractionError: - fire_event_if_test( - lambda: Note(msg=f"1604: experimental parser failed on {node.path}"), - EventLevel.DEBUG, - ) return "cannot_parse" # checks for banned macros @@ -518,7 +468,7 @@ def _get_config_call_dict(static_parser_result: Dict[str, Any]) -> Dict[str, Any config_call_dict: Dict[str, Any] = {} for c in static_parser_result["configs"]: - ContextConfig._add_config_call(config_call_dict, {c[0]: c[1]}) + merge_config_dicts(config_call_dict, {c[0]: c[1]}) return config_call_dict diff --git a/core/dbt/parser/partial.py b/core/dbt/parser/partial.py index edcae83574a..db3400dbc58 100644 --- a/core/dbt/parser/partial.py +++ b/core/dbt/parser/partial.py @@ -1,22 +1,20 @@ import os from copy import deepcopy -from typing import MutableMapping, Dict, List, Callable -from dbt.contracts.graph.manifest import Manifest +from typing import Callable, Dict, List, MutableMapping + +from dbt.constants import DEFAULT_ENV_PLACEHOLDER from dbt.contracts.files import ( AnySourceFile, ParseFileType, - parse_file_type_to_parser, SchemaSourceFile, + parse_file_type_to_parser, ) -from dbt.events.functions import fire_event -from dbt.events.base_types import EventLevel -from dbt.events.types import ( - PartialParsingEnabled, - PartialParsingFile, -) -from dbt.constants import DEFAULT_ENV_PLACEHOLDER +from dbt.contracts.graph.manifest import Manifest +from dbt.events.types import PartialParsingEnabled, PartialParsingFile from dbt.node_types import NodeType - +from dbt_common.context import get_invocation_context +from dbt_common.events.base_types import EventLevel +from dbt_common.events.functions import fire_event mssat_files = ( ParseFileType.Model, @@ -68,7 +66,9 @@ # to preserve an unchanged file object in case we need to drop back to a # a full parse (such as for certain macro changes) class PartialParsing: - def __init__(self, saved_manifest: Manifest, new_files: MutableMapping[str, AnySourceFile]): + def __init__( + self, saved_manifest: Manifest, new_files: MutableMapping[str, AnySourceFile] + ) -> None: self.saved_manifest = saved_manifest self.new_files = new_files self.project_parser_files: Dict = {} @@ -157,7 +157,8 @@ def build_file_diff(self): deleted = len(deleted) + len(deleted_schema_files) changed = len(changed) + len(changed_schema_files) event = PartialParsingEnabled(deleted=deleted, added=len(added), changed=changed) - if os.environ.get("DBT_PP_TEST"): + + if get_invocation_context().env.get("DBT_PP_TEST"): fire_event(event, level=EventLevel.INFO) else: fire_event(event) @@ -206,6 +207,7 @@ def add_to_pp_files(self, source_file): if ( file_id not in self.project_parser_files[project_name][parser_name] and file_id not in self.file_diff["deleted"] + and file_id not in self.file_diff["deleted_schema_files"] ): self.project_parser_files[project_name][parser_name].append(file_id) @@ -278,6 +280,10 @@ def delete_from_saved(self, file_id): if saved_source_file.parse_file_type == ParseFileType.Documentation: self.delete_doc_node(saved_source_file) + # fixtures + if saved_source_file.parse_file_type == ParseFileType.Fixture: + self.delete_fixture_node(saved_source_file) + fire_event(PartialParsingFile(operation="deleted", file_id=file_id)) # Updates for non-schema files @@ -291,6 +297,8 @@ def update_in_saved(self, file_id): self.update_macro_in_saved(new_source_file, old_source_file) elif new_source_file.parse_file_type == ParseFileType.Documentation: self.update_doc_in_saved(new_source_file, old_source_file) + elif new_source_file.parse_file_type == ParseFileType.Fixture: + self.update_fixture_in_saved(new_source_file, old_source_file) else: raise Exception(f"Invalid parse_file_type in source_file {file_id}") fire_event(PartialParsingFile(operation="updated", file_id=file_id)) @@ -336,7 +344,10 @@ def remove_node_in_saved(self, source_file, unique_id): file_id = node.patch_path # it might be changed... then what? if file_id not in self.file_diff["deleted"] and file_id in self.saved_files: - # schema_files should already be updated + # Schema files should already be updated if this comes from a node, + # but this code is also called when updating groups and exposures. + # This might save the old schema file element, so when the schema file + # is processed, it should overwrite it by passing True to "merge_patch" schema_file = self.saved_files[file_id] dict_key = parse_file_type_to_key[source_file.parse_file_type] # look for a matching list dictionary @@ -372,6 +383,13 @@ def update_doc_in_saved(self, new_source_file, old_source_file): self.saved_files[new_source_file.file_id] = deepcopy(new_source_file) self.add_to_pp_files(new_source_file) + def update_fixture_in_saved(self, new_source_file, old_source_file): + if self.already_scheduled_for_parsing(old_source_file): + return + self.delete_fixture_node(old_source_file) + self.saved_files[new_source_file.file_id] = deepcopy(new_source_file) + self.add_to_pp_files(new_source_file) + def remove_mssat_file(self, source_file): # nodes [unique_ids] -- SQL files # There should always be a node for a SQL file @@ -425,6 +443,11 @@ def schedule_nodes_for_parsing(self, unique_ids): semantic_model.name, self.delete_schema_semantic_model, ) + elif unique_id in self.saved_manifest.saved_queries: + saved_query = self.saved_manifest.saved_queries[unique_id] + self._schedule_for_parsing( + "saved_queries", saved_query, saved_query.name, self.delete_schema_saved_query + ) elif unique_id in self.saved_manifest.macros: macro = self.saved_manifest.macros[unique_id] file_id = macro.file_id @@ -433,10 +456,19 @@ def schedule_nodes_for_parsing(self, unique_ids): self.delete_macro_file(source_file) self.saved_files[file_id] = deepcopy(self.new_files[file_id]) self.add_to_pp_files(self.saved_files[file_id]) + elif unique_id in self.saved_manifest.unit_tests: + unit_test = self.saved_manifest.unit_tests[unique_id] + self._schedule_for_parsing( + "unit_tests", unit_test, unit_test.name, self.delete_schema_unit_test + ) def _schedule_for_parsing(self, dict_key: str, element, name, delete: Callable) -> None: file_id = element.file_id - if file_id in self.saved_files and file_id not in self.file_diff["deleted"]: + if ( + file_id in self.saved_files + and file_id not in self.file_diff["deleted"] + and file_id not in self.file_diff["deleted_schema_files"] + ): schema_file = self.saved_files[file_id] elements = [] assert isinstance(schema_file, SchemaSourceFile) @@ -569,6 +601,20 @@ def delete_doc_node(self, source_file): # Remove the file object self.saved_manifest.files.pop(source_file.file_id) + def delete_fixture_node(self, source_file): + # remove fixtures from the "fixtures" dictionary + fixture_unique_id = source_file.fixture + self.saved_manifest.fixtures.pop(fixture_unique_id) + unit_tests = source_file.unit_tests.copy() + for unique_id in unit_tests: + unit_test = self.saved_manifest.unit_tests.pop(unique_id) + # schedule unit_test for parsing + self._schedule_for_parsing( + "unit_tests", unit_test, unit_test.name, self.delete_schema_unit_test + ) + source_file.unit_tests.remove(unique_id) + self.saved_manifest.files.pop(source_file.file_id) + # Schema files ----------------------- # Changed schema files def change_schema_file(self, file_id): @@ -598,7 +644,7 @@ def delete_schema_file(self, file_id): self.saved_manifest.files.pop(file_id) # For each key in a schema file dictionary, process the changed, deleted, and added - # elemnts for the key lists + # elements for the key lists def handle_schema_file_changes(self, schema_file, saved_yaml_dict, new_yaml_dict): # loop through comparing previous dict_from_yaml with current dict_from_yaml # Need to do the deleted/added/changed thing, just like the files lists @@ -613,13 +659,13 @@ def handle_schema_file_changes(self, schema_file, saved_yaml_dict, new_yaml_dict if key_diff["changed"]: for elem in key_diff["changed"]: self.delete_schema_mssa_links(schema_file, dict_key, elem) - self.merge_patch(schema_file, dict_key, elem) + self.merge_patch(schema_file, dict_key, elem, True) if key_diff["deleted"]: for elem in key_diff["deleted"]: self.delete_schema_mssa_links(schema_file, dict_key, elem) if key_diff["added"]: for elem in key_diff["added"]: - self.merge_patch(schema_file, dict_key, elem) + self.merge_patch(schema_file, dict_key, elem, True) # Handle schema file updates due to env_var changes if dict_key in env_var_changes and dict_key in new_yaml_dict: for name in env_var_changes[dict_key]: @@ -628,7 +674,7 @@ def handle_schema_file_changes(self, schema_file, saved_yaml_dict, new_yaml_dict elem = self.get_schema_element(new_yaml_dict[dict_key], name) if elem: self.delete_schema_mssa_links(schema_file, dict_key, elem) - self.merge_patch(schema_file, dict_key, elem) + self.merge_patch(schema_file, dict_key, elem, True) # sources dict_key = "sources" @@ -638,7 +684,7 @@ def handle_schema_file_changes(self, schema_file, saved_yaml_dict, new_yaml_dict if "overrides" in source: # This is a source patch; need to re-parse orig source self.remove_source_override_target(source) self.delete_schema_source(schema_file, source) - self.merge_patch(schema_file, dict_key, source) + self.merge_patch(schema_file, dict_key, source, True) if source_diff["deleted"]: for source in source_diff["deleted"]: if "overrides" in source: # This is a source patch; need to re-parse orig source @@ -648,7 +694,7 @@ def handle_schema_file_changes(self, schema_file, saved_yaml_dict, new_yaml_dict for source in source_diff["added"]: if "overrides" in source: # This is a source patch; need to re-parse orig source self.remove_source_override_target(source) - self.merge_patch(schema_file, dict_key, source) + self.merge_patch(schema_file, dict_key, source, True) # Handle schema file updates due to env_var changes if dict_key in env_var_changes and dict_key in new_yaml_dict: for name in env_var_changes[dict_key]: @@ -659,7 +705,7 @@ def handle_schema_file_changes(self, schema_file, saved_yaml_dict, new_yaml_dict if "overrides" in source: self.remove_source_override_target(source) self.delete_schema_source(schema_file, source) - self.merge_patch(schema_file, dict_key, source) + self.merge_patch(schema_file, dict_key, source, True) def handle_change(key: str, delete: Callable): self._handle_element_change( @@ -671,6 +717,8 @@ def handle_change(key: str, delete: Callable): handle_change("metrics", self.delete_schema_metric) handle_change("groups", self.delete_schema_group) handle_change("semantic_models", self.delete_schema_semantic_model) + handle_change("unit_tests", self.delete_schema_unit_test) + handle_change("saved_queries", self.delete_schema_saved_query) def _handle_element_change( self, schema_file, saved_yaml_dict, new_yaml_dict, env_var_changes, dict_key: str, delete @@ -679,13 +727,13 @@ def _handle_element_change( if element_diff["changed"]: for element in element_diff["changed"]: delete(schema_file, element) - self.merge_patch(schema_file, dict_key, element) + self.merge_patch(schema_file, dict_key, element, True) if element_diff["deleted"]: for element in element_diff["deleted"]: delete(schema_file, element) if element_diff["added"]: for element in element_diff["added"]: - self.merge_patch(schema_file, dict_key, element) + self.merge_patch(schema_file, dict_key, element, True) # Handle schema file updates due to env_var changes if dict_key in env_var_changes and dict_key in new_yaml_dict: for name in env_var_changes[dict_key]: @@ -694,7 +742,7 @@ def _handle_element_change( elem = self.get_schema_element(new_yaml_dict[dict_key], name) if elem: delete(schema_file, elem) - self.merge_patch(schema_file, dict_key, elem) + self.merge_patch(schema_file, dict_key, elem, True) # Take a "section" of the schema file yaml dictionary from saved and new schema files # and determine which parts have changed @@ -737,8 +785,10 @@ def get_diff_for(self, key, saved_yaml_dict, new_yaml_dict): } return diff - # Merge a patch file into the pp_dict in a schema file - def merge_patch(self, schema_file, key, patch): + # Merge a patch file into the pp_dict in a schema file. The "new_patch" + # flag indicates that we're processing a schema file, so if a matching + # patch has already been scheduled, replace it. + def merge_patch(self, schema_file, key, patch, new_patch=False): if schema_file.pp_dict is None: schema_file.pp_dict = {} pp_dict = schema_file.pp_dict @@ -746,12 +796,17 @@ def merge_patch(self, schema_file, key, patch): pp_dict[key] = [patch] else: # check that this patch hasn't already been saved - found = False + found_elem = None for elem in pp_dict[key]: if elem["name"] == patch["name"]: - found = True - if not found: + found_elem = elem + if not found_elem: + pp_dict[key].append(patch) + elif found_elem and new_patch: + # remove patch and replace with new one + pp_dict[key].remove(found_elem) pp_dict[key].append(patch) + schema_file.delete_from_env_vars(key, patch["name"]) self.add_to_pp_files(schema_file) @@ -793,8 +848,9 @@ def delete_schema_mssa_links(self, schema_file, dict_key, elem): # if the node's group has changed - need to reparse all referencing nodes to ensure valid ref access if node.group != elem.get("group"): self.schedule_referencing_nodes_for_parsing(node.unique_id) - # if the node's latest version has changed - need to reparse all referencing nodes to ensure correct ref resolution - if node.is_versioned and node.latest_version != elem.get("latest_version"): + # If the latest version has changed or a version has been removed we need to + # reparse referencing nodes. + if node.is_versioned: self.schedule_referencing_nodes_for_parsing(node.unique_id) # remove from patches schema_file.node_patches.remove(elem_unique_id) @@ -883,6 +939,20 @@ def delete_schema_metric(self, schema_file, metric_dict): elif unique_id in self.saved_manifest.disabled: self.delete_disabled(unique_id, schema_file.file_id) + def delete_schema_saved_query(self, schema_file, saved_query_dict): + saved_query_name = saved_query_dict["name"] + saved_queries = schema_file.saved_queries.copy() + for unique_id in saved_queries: + if unique_id in self.saved_manifest.saved_queries: + saved_query = self.saved_manifest.saved_queries[unique_id] + if saved_query.name == saved_query_name: + # Need to find everything that referenced this saved_query and schedule for parsing + if unique_id in self.saved_manifest.child_map: + self.schedule_nodes_for_parsing(self.saved_manifest.child_map[unique_id]) + self.saved_manifest.saved_queries.pop(unique_id) + elif unique_id in self.saved_manifest.disabled: + self.delete_disabled(unique_id, schema_file.file_id) + def delete_schema_semantic_model(self, schema_file, semantic_model_dict): semantic_model_name = semantic_model_dict["name"] semantic_models = schema_file.semantic_models.copy() @@ -890,11 +960,37 @@ def delete_schema_semantic_model(self, schema_file, semantic_model_dict): if unique_id in self.saved_manifest.semantic_models: semantic_model = self.saved_manifest.semantic_models[unique_id] if semantic_model.name == semantic_model_name: + # Need to find everything that referenced this semantic model and schedule for parsing + if unique_id in self.saved_manifest.child_map: + self.schedule_nodes_for_parsing(self.saved_manifest.child_map[unique_id]) self.saved_manifest.semantic_models.pop(unique_id) schema_file.semantic_models.remove(unique_id) elif unique_id in self.saved_manifest.disabled: self.delete_disabled(unique_id, schema_file.file_id) + if schema_file.generated_metrics: + # If this partial parse file has an old "generated_metrics" list, + # call code to fix it up before processing. + schema_file.fix_metrics_from_measures() + if semantic_model_name in schema_file.metrics_from_measures: + for unique_id in schema_file.metrics_from_measures[semantic_model_name]: + if unique_id in self.saved_manifest.metrics: + self.saved_manifest.metrics.pop(unique_id) + elif unique_id in self.saved_manifest.disabled: + self.delete_disabled(unique_id, schema_file.file_id) + del schema_file.metrics_from_measures[semantic_model_name] + + def delete_schema_unit_test(self, schema_file, unit_test_dict): + unit_test_name = unit_test_dict["name"] + unit_tests = schema_file.unit_tests.copy() + for unique_id in unit_tests: + if unique_id in self.saved_manifest.unit_tests: + unit_test = self.saved_manifest.unit_tests[unique_id] + if unit_test.name == unit_test_name: + self.saved_manifest.unit_tests.pop(unique_id) + schema_file.unit_tests.remove(unique_id) + # No disabled unit tests yet + def get_schema_element(self, elem_list, elem_name): for element in elem_list: if "name" in element and element["name"] == elem_name: @@ -966,6 +1062,8 @@ def build_env_vars_to_files(self): # Create a list of file_ids for source_files that need to be reparsed, and # a dictionary of file_ids to yaml_keys to names. for source_file in self.saved_files.values(): + if source_file.parse_file_type == ParseFileType.Fixture: + continue file_id = source_file.file_id if not source_file.env_vars: continue diff --git a/core/dbt/parser/read_files.py b/core/dbt/parser/read_files.py index 3dc2c0fe44c..e5e25841f06 100644 --- a/core/dbt/parser/read_files.py +++ b/core/dbt/parser/read_files.py @@ -1,24 +1,27 @@ import os -import pathspec # type: ignore import pathlib from dataclasses import dataclass, field -from dbt.clients.system import load_file_contents +from typing import Dict, List, Mapping, MutableMapping, Optional, Protocol + +import pathspec # type: ignore + +from dbt.config import Project from dbt.contracts.files import ( + AnySourceFile, + FileHash, FilePath, + FixtureSourceFile, ParseFileType, - SourceFile, - FileHash, - AnySourceFile, SchemaSourceFile, + SourceFile, ) -from dbt.config import Project -from dbt.dataclass_schema import dbtClassMixin -from dbt.parser.schemas import yaml_from_file, schema_file_keys +from dbt.events.types import InputFileDiffError from dbt.exceptions import ParsingError +from dbt.parser.schemas import schema_file_keys, yaml_from_file from dbt.parser.search import filesystem_search -from typing import Optional, Dict, List, Mapping -from dbt.events.types import InputFileDiffError -from dbt.events.functions import fire_event +from dbt_common.clients.system import load_file_contents +from dbt_common.dataclass_schema import dbtClassMixin +from dbt_common.events.functions import fire_event @dataclass @@ -45,7 +48,13 @@ def load_source_file( saved_files, ) -> Optional[AnySourceFile]: - sf_cls = SchemaSourceFile if parse_file_type == ParseFileType.Schema else SourceFile + if parse_file_type == ParseFileType.Schema: + sf_cls = SchemaSourceFile + elif parse_file_type == ParseFileType.Fixture: + sf_cls = FixtureSourceFile # type:ignore[assignment] + else: + sf_cls = SourceFile # type:ignore[assignment] + source_file = sf_cls( path=path, checksum=FileHash.empty(), @@ -73,7 +82,7 @@ def load_source_file( # the checksum to match the stored file contents file_contents = load_file_contents(path.absolute_path, strip=True) source_file.contents = file_contents - source_file.checksum = FileHash.from_contents(source_file.contents) + source_file.checksum = FileHash.from_contents(file_contents) if parse_file_type == ParseFileType.Schema and source_file.contents: dfy = yaml_from_file(source_file) @@ -137,11 +146,11 @@ def get_source_files(project, paths, extension, parse_file_type, saved_files, ig if parse_file_type == ParseFileType.Seed: fb_list.append(load_seed_source_file(fp, project.project_name)) # singular tests live in /tests but only generic tests live - # in /tests/generic so we want to skip those + # in /tests/generic and fixtures in /tests/fixture so we want to skip those else: if parse_file_type == ParseFileType.SingularTest: path = pathlib.Path(fp.relative_path) - if path.parts[0] == "generic": + if path.parts[0] in ["generic", "fixtures"]: continue file = load_source_file(fp, parse_file_type, project.project_name, saved_files) # only append the list if it has contents. added to fix #3568 @@ -173,12 +182,21 @@ def generate_dbt_ignore_spec(project_root): return ignore_spec +# Protocol for the ReadFiles... classes +class ReadFiles(Protocol): + files: MutableMapping[str, AnySourceFile] + project_parser_files: Dict + + def read_files(self): + pass + + @dataclass class ReadFilesFromFileSystem: all_projects: Mapping[str, Project] - files: Dict[str, AnySourceFile] = field(default_factory=dict) + files: MutableMapping[str, AnySourceFile] = field(default_factory=dict) # saved_files is only used to compare schema files - saved_files: Dict[str, AnySourceFile] = field(default_factory=dict) + saved_files: MutableMapping[str, AnySourceFile] = field(default_factory=dict) # project_parser_files = { # "my_project": { # "ModelParser": ["my_project://models/my_model.sql"] @@ -212,10 +230,10 @@ class ReadFilesFromDiff: root_project_name: str all_projects: Mapping[str, Project] file_diff: FileDiff - files: Dict[str, AnySourceFile] = field(default_factory=dict) + files: MutableMapping[str, AnySourceFile] = field(default_factory=dict) # saved_files is used to construct a fresh copy of files, without # additional information from parsing - saved_files: Dict[str, AnySourceFile] = field(default_factory=dict) + saved_files: MutableMapping[str, AnySourceFile] = field(default_factory=dict) project_parser_files: Dict = field(default_factory=dict) project_file_types: Dict = field(default_factory=dict) local_package_dirs: Optional[List[str]] = None @@ -412,5 +430,10 @@ def get_file_types_for_project(project): "extensions": [".yml", ".yaml"], "parser": "SchemaParser", }, + ParseFileType.Fixture: { + "paths": project.fixture_paths, + "extensions": [".csv", ".sql"], + "parser": "FixtureParser", + }, } return file_types diff --git a/core/dbt/parser/schema_generic_tests.py b/core/dbt/parser/schema_generic_tests.py index 29b71c21e92..14e2dbc862a 100644 --- a/core/dbt/parser/schema_generic_tests.py +++ b/core/dbt/parser/schema_generic_tests.py @@ -1,37 +1,43 @@ -import pathlib import itertools import os +import pathlib +from typing import Any, Dict, List, Optional, Union -from typing import List, Dict, Optional, Union, Any +from dbt.adapters.factory import get_adapter, get_adapter_package_names +from dbt.artifacts.resources import NodeVersion, RefArgs +from dbt.clients.jinja import add_rendered_test_kwargs, get_rendered +from dbt.context.configured import SchemaYamlVars, generate_schema_yml_context +from dbt.context.context_config import ContextConfig +from dbt.context.macro_resolver import MacroResolver +from dbt.context.providers import generate_test_context +from dbt.contracts.files import FileHash +from dbt.contracts.graph.nodes import ( + GenericTestNode, + GraphMemberNode, + ManifestNode, + UnpatchedSourceDefinition, +) +from dbt.contracts.graph.unparsed import UnparsedColumn, UnparsedNodeUpdate +from dbt.exceptions import ( + CompilationError, + ParsingError, + SchemaConfigError, + TestConfigError, +) +from dbt.node_types import NodeType from dbt.parser.base import SimpleParser -from dbt.parser.generic_test_builders import TestBuilder -from dbt.parser.search import FileBlock -from dbt.context.providers import RefArgs, generate_test_context from dbt.parser.common import ( - TestBlock, + GenericTestBlock, Testable, + TestBlock, TestDef, - GenericTestBlock, VersionedTestBlock, trimmed, ) -from dbt.contracts.graph.unparsed import UnparsedNodeUpdate, NodeVersion, UnparsedColumn -from dbt.contracts.graph.nodes import ( - GenericTestNode, - UnpatchedSourceDefinition, - ManifestNode, - GraphMemberNode, -) -from dbt.context.context_config import ContextConfig -from dbt.context.configured import generate_schema_yml_context, SchemaYamlVars -from dbt.dataclass_schema import ValidationError -from dbt.exceptions import SchemaConfigError, CompilationError, ParsingError, TestConfigError -from dbt.contracts.files import FileHash -from dbt.utils import md5, get_pseudo_test_path -from dbt.clients.jinja import get_rendered, add_rendered_test_kwargs -from dbt.adapters.factory import get_adapter, get_adapter_package_names -from dbt.node_types import NodeType -from dbt.context.macro_resolver import MacroResolver +from dbt.parser.generic_test_builders import TestBuilder +from dbt.parser.search import FileBlock +from dbt.utils import get_pseudo_test_path, md5 +from dbt_common.dataclass_schema import ValidationError # This parser handles the tests that are defined in "schema" (yaml) files, on models, @@ -72,11 +78,11 @@ def parse_from_dict(self, dct, validate=True) -> GenericTestNode: def parse_column_tests( self, block: TestBlock, column: UnparsedColumn, version: Optional[NodeVersion] ) -> None: - if not column.tests: + if not column.data_tests: return - for test in column.tests: - self.parse_test(block, test, column, version) + for data_test in column.data_tests: + self.parse_test(block, data_test, column, version) def create_test_node( self, @@ -148,7 +154,7 @@ def get_hashable_md(data: Union[str, int, float, List, Dict]) -> Union[str, List def parse_generic_test( self, target: Testable, - test: Dict[str, Any], + data_test: Dict[str, Any], tags: List[str], column_name: Optional[str], schema_file_id: str, @@ -156,7 +162,7 @@ def parse_generic_test( ) -> GenericTestNode: try: builder = TestBuilder( - test=test, + data_test=data_test, target=target, column_name=column_name, version=version, @@ -233,7 +239,7 @@ def _lookup_attached_node( attached_node = None # type: Optional[Union[ManifestNode, GraphMemberNode]] if not isinstance(target, UnpatchedSourceDefinition): attached_node_unique_id = self.manifest.ref_lookup.get_unique_id( - target.name, None, version + target.name, target.package_name, version ) if attached_node_unique_id: attached_node = self.manifest.nodes[attached_node_unique_id] @@ -321,7 +327,7 @@ def parse_node(self, block: GenericTestBlock) -> GenericTestNode: """ node = self.parse_generic_test( target=block.target, - test=block.test, + data_test=block.data_test, tags=block.tags, column_name=block.column_name, schema_file_id=block.file.file_id, @@ -357,12 +363,12 @@ def render_with_context( def parse_test( self, target_block: TestBlock, - test: TestDef, + data_test: TestDef, column: Optional[UnparsedColumn], version: Optional[NodeVersion], ) -> None: - if isinstance(test, str): - test = {test: {}} + if isinstance(data_test, str): + data_test = {data_test: {}} if column is None: column_name: Optional[str] = None @@ -376,7 +382,7 @@ def parse_test( block = GenericTestBlock.from_test_block( src=target_block, - test=test, + data_test=data_test, column_name=column_name, tags=column_tags, version=version, @@ -387,8 +393,8 @@ def parse_tests(self, block: TestBlock) -> None: for column in block.columns: self.parse_column_tests(block, column, None) - for test in block.tests: - self.parse_test(block, test, None, None) + for data_test in block.data_tests: + self.parse_test(block, data_test, None, None) def parse_versioned_tests(self, block: VersionedTestBlock) -> None: if not block.target.versions: diff --git a/core/dbt/parser/schema_renderer.py b/core/dbt/parser/schema_renderer.py index e0c54f247da..b187c4f673f 100644 --- a/core/dbt/parser/schema_renderer.py +++ b/core/dbt/parser/schema_renderer.py @@ -1,4 +1,4 @@ -from typing import Dict, Any +from typing import Any, Dict from dbt.config.renderer import BaseRenderer, Keypath @@ -11,6 +11,7 @@ # keyword args are rendered to capture refs in render_test_update. # Keyword args are finally rendered at compilation time. # Descriptions are not rendered until 'process_docs'. +# Pre- and post-hooks in configs are late-rendered. class SchemaYamlRenderer(BaseRenderer): def __init__(self, context: Dict[str, Any], key: str) -> None: super().__init__(context) @@ -25,25 +26,40 @@ def _is_norender_key(self, keypath: Keypath) -> bool: models: - name: blah description: blah - tests: ... + data_tests: ... columns: - name: description: blah - tests: ... + data_tests: ... - Return True if it's tests or description - those aren't rendered now + Return True if it's tests, data_tests or description - those aren't rendered now because they're rendered later in parse_generic_tests or process_docs. + "tests" and "data_tests" are both currently supported but "tests" has been deprecated """ - if len(keypath) >= 1 and keypath[0] in ("tests", "description"): + # top level descriptions and data_tests + if len(keypath) >= 1 and keypath[0] in ("tests", "data_tests", "description"): return True - if len(keypath) == 2 and keypath[1] in ("tests", "description"): + # columns descriptions and data_tests + if len(keypath) == 2 and keypath[1] in ("tests", "data_tests", "description"): + return True + + # pre- and post-hooks + if ( + len(keypath) >= 2 + and keypath[0] == "config" + and keypath[1] in ("pre_hook", "post_hook") + ): + return True + + # versions + if len(keypath) == 5 and keypath[4] == "description": return True if ( len(keypath) >= 3 - and keypath[0] == "columns" - and keypath[2] in ("tests", "description") + and keypath[0] in ("columns", "dimensions", "measures", "entities") + and keypath[2] in ("tests", "data_tests", "description") ): return True @@ -67,8 +83,14 @@ def should_render_keypath(self, keypath: Keypath) -> bool: elif self._is_norender_key(keypath[0:]): return False elif self.key == "metrics": - # This ensures all key paths that end in 'filter' for a metric are skipped - if keypath[-1] == "filter": + # This ensures that metric filters are skipped + if keypath[-1] == "filter" or len(keypath) > 1 and keypath[-2] == "filter": + return False + elif self._is_norender_key(keypath[0:]): + return False + elif self.key == "saved_queries": + # This ensures that saved query filters are skipped + if keypath[0] == "query_params" and len(keypath) > 1 and keypath[1] == "where": return False elif self._is_norender_key(keypath[0:]): return False diff --git a/core/dbt/parser/schema_yaml_readers.py b/core/dbt/parser/schema_yaml_readers.py index 2f2a3eb18e6..dc99e87a218 100644 --- a/core/dbt/parser/schema_yaml_readers.py +++ b/core/dbt/parser/schema_yaml_readers.py @@ -1,10 +1,45 @@ -from dbt.parser.schemas import YamlReader, SchemaParser -from dbt.parser.common import YamlBlock -from dbt.node_types import NodeType +from typing import Any, Dict, List, Optional, Union + +from dbt.artifacts.resources import ( + ConversionTypeParams, + CumulativeTypeParams, + Dimension, + DimensionTypeParams, + Entity, + Export, + ExportConfig, + ExposureConfig, + Measure, + MetricConfig, + MetricInput, + MetricInputMeasure, + MetricTimeWindow, + MetricTypeParams, + NonAdditiveDimension, + QueryParams, + SavedQueryConfig, + WhereFilter, + WhereFilterIntersection, +) +from dbt.clients.jinja import get_rendered +from dbt.context.context_config import ( + BaseContextConfigGenerator, + ContextConfigGenerator, + UnrenderedConfigGenerator, +) +from dbt.context.providers import ( + generate_parse_exposure, + generate_parse_semantic_models, +) +from dbt.contracts.files import SchemaSourceFile +from dbt.contracts.graph.nodes import Exposure, Group, Metric, SavedQuery, SemanticModel from dbt.contracts.graph.unparsed import ( + UnparsedConversionTypeParams, + UnparsedCumulativeTypeParams, UnparsedDimension, UnparsedDimensionTypeParams, UnparsedEntity, + UnparsedExport, UnparsedExposure, UnparsedGroup, UnparsedMeasure, @@ -13,54 +48,45 @@ UnparsedMetricInputMeasure, UnparsedMetricTypeParams, UnparsedNonAdditiveDimension, + UnparsedQueryParams, + UnparsedSavedQuery, UnparsedSemanticModel, ) -from dbt.contracts.graph.nodes import ( - Exposure, - Group, - Metric, - MetricInput, - MetricInputMeasure, - MetricTimeWindow, - MetricTypeParams, - SemanticModel, - WhereFilter, -) -from dbt.contracts.graph.semantic_models import ( - Dimension, - DimensionTypeParams, - Entity, - Measure, - NonAdditiveDimension, -) -from dbt.exceptions import DbtInternalError, YamlParseDictError, JSONValidationError -from dbt.context.providers import generate_parse_exposure, generate_parse_semantic_models - -from dbt.contracts.graph.model_config import MetricConfig, ExposureConfig -from dbt.context.context_config import ( - BaseContextConfigGenerator, - ContextConfigGenerator, - UnrenderedConfigGenerator, -) -from dbt.clients.jinja import get_rendered -from dbt.dataclass_schema import ValidationError +from dbt.exceptions import JSONValidationError, YamlParseDictError +from dbt.node_types import NodeType +from dbt.parser.common import YamlBlock +from dbt.parser.schemas import ParseResult, SchemaParser, YamlReader +from dbt_common.dataclass_schema import ValidationError +from dbt_common.exceptions import DbtInternalError from dbt_semantic_interfaces.type_enums import ( AggregationType, + ConversionCalculationType, DimensionType, EntityType, MetricType, + PeriodAggregation, TimeGranularity, ) -from typing import List, Optional, Union + + +def parse_where_filter( + where: Optional[Union[List[str], str]] +) -> Optional[WhereFilterIntersection]: + if where is None: + return None + elif isinstance(where, str): + return WhereFilterIntersection([WhereFilter(where)]) + else: + return WhereFilterIntersection([WhereFilter(where_str) for where_str in where]) class ExposureParser(YamlReader): - def __init__(self, schema_parser: SchemaParser, yaml: YamlBlock): + def __init__(self, schema_parser: SchemaParser, yaml: YamlBlock) -> None: super().__init__(schema_parser, yaml, NodeType.Exposure.pluralize()) self.schema_parser = schema_parser self.yaml = yaml - def parse_exposure(self, unparsed: UnparsedExposure): + def parse_exposure(self, unparsed: UnparsedExposure) -> None: package_name = self.project.project_name unique_id = f"{NodeType.Exposure}.{package_name}.{unparsed.name}" path = self.yaml.path.relative_path @@ -118,6 +144,7 @@ def parse_exposure(self, unparsed: UnparsedExposure): get_rendered(depends_on_jinja, ctx, parsed, capture_macros=True) # parsed now has a populated refs/sources/metrics + assert isinstance(self.yaml.file, SchemaSourceFile) if parsed.config.enabled: self.manifest.add_exposure(self.yaml.file, parsed) else: @@ -146,7 +173,7 @@ def _generate_exposure_config( patch_config_dict=precedence_configs, ) - def parse(self): + def parse(self) -> None: for data in self.get_key_dicts(): try: UnparsedExposure.validate(data) @@ -158,7 +185,7 @@ def parse(self): class MetricParser(YamlReader): - def __init__(self, schema_parser: SchemaParser, yaml: YamlBlock): + def __init__(self, schema_parser: SchemaParser, yaml: YamlBlock) -> None: super().__init__(schema_parser, yaml, NodeType.Metric.pluralize()) self.schema_parser = schema_parser self.yaml = yaml @@ -170,14 +197,12 @@ def _get_input_measure( if isinstance(unparsed_input_measure, str): return MetricInputMeasure(name=unparsed_input_measure) else: - filter: Optional[WhereFilter] = None - if unparsed_input_measure.filter is not None: - filter = WhereFilter(where_sql_template=unparsed_input_measure.filter) - return MetricInputMeasure( name=unparsed_input_measure.name, - filter=filter, + filter=parse_where_filter(unparsed_input_measure.filter), alias=unparsed_input_measure.alias, + join_to_timespine=unparsed_input_measure.join_to_timespine, + fill_nulls_with=unparsed_input_measure.fill_nulls_with, ) def _get_optional_input_measure( @@ -200,9 +225,19 @@ def _get_input_measures( return input_measures - def _get_time_window( - self, - unparsed_window: Optional[str], + def _get_period_agg(self, unparsed_period_agg: str) -> PeriodAggregation: + return PeriodAggregation(unparsed_period_agg) + + def _get_optional_grain_to_date( + self, unparsed_grain_to_date: Optional[str] + ) -> Optional[TimeGranularity]: + if not unparsed_grain_to_date: + return None + + return TimeGranularity(unparsed_grain_to_date) + + def _get_optional_time_window( + self, unparsed_window: Optional[str] ) -> Optional[MetricTimeWindow]: if unparsed_window is not None: parts = unparsed_window.split(" ") @@ -211,7 +246,7 @@ def _get_time_window( self.yaml.path, "window", {"window": unparsed_window}, - f"Invalid window ({unparsed_window}) in cumulative metric. Should be of the form ` `, " + f"Invalid window ({unparsed_window}) in cumulative/conversion metric. Should be of the form ` `, " "e.g., `28 days`", ) @@ -225,7 +260,7 @@ def _get_time_window( self.yaml.path, "window", {"window": unparsed_window}, - f"Invalid time granularity {granularity} in cumulative metric window string: ({unparsed_window})", + f"Invalid time granularity {granularity} in cumulative/conversion metric window string: ({unparsed_window})", ) count = parts[0] @@ -234,7 +269,7 @@ def _get_time_window( self.yaml.path, "window", {"window": unparsed_window}, - f"Invalid count ({count}) in cumulative metric window string: ({unparsed_window})", + f"Invalid count ({count}) in cumulative/conversion metric window string: ({unparsed_window})", ) return MetricTimeWindow( @@ -252,15 +287,11 @@ def _get_metric_input(self, unparsed: Union[UnparsedMetricInput, str]) -> Metric if unparsed.offset_to_grain is not None: offset_to_grain = TimeGranularity(unparsed.offset_to_grain) - filter: Optional[WhereFilter] = None - if unparsed.filter is not None: - filter = WhereFilter(where_sql_template=unparsed.filter) - return MetricInput( name=unparsed.name, - filter=filter, + filter=parse_where_filter(unparsed.filter), alias=unparsed.alias, - offset_window=self._get_time_window(unparsed.offset_window), + offset_window=self._get_optional_time_window(unparsed.offset_window), offset_to_grain=offset_to_grain, ) @@ -284,7 +315,58 @@ def _get_metric_inputs( return metric_inputs - def _get_metric_type_params(self, type_params: UnparsedMetricTypeParams) -> MetricTypeParams: + def _get_optional_conversion_type_params( + self, unparsed: Optional[UnparsedConversionTypeParams] + ) -> Optional[ConversionTypeParams]: + if unparsed is None: + return None + return ConversionTypeParams( + base_measure=self._get_input_measure(unparsed.base_measure), + conversion_measure=self._get_input_measure(unparsed.conversion_measure), + entity=unparsed.entity, + calculation=ConversionCalculationType(unparsed.calculation), + window=self._get_optional_time_window(unparsed.window), + constant_properties=unparsed.constant_properties, + ) + + def _get_optional_cumulative_type_params( + self, unparsed_metric: UnparsedMetric + ) -> Optional[CumulativeTypeParams]: + unparsed_type_params = unparsed_metric.type_params + if unparsed_metric.type.lower() == MetricType.CUMULATIVE.value: + if not unparsed_type_params.cumulative_type_params: + unparsed_type_params.cumulative_type_params = UnparsedCumulativeTypeParams() + + if ( + unparsed_type_params.window + and not unparsed_type_params.cumulative_type_params.window + ): + unparsed_type_params.cumulative_type_params.window = unparsed_type_params.window + if ( + unparsed_type_params.grain_to_date + and not unparsed_type_params.cumulative_type_params.grain_to_date + ): + unparsed_type_params.cumulative_type_params.grain_to_date = ( + unparsed_type_params.grain_to_date + ) + + return CumulativeTypeParams( + window=self._get_optional_time_window( + unparsed_type_params.cumulative_type_params.window + ), + grain_to_date=self._get_optional_grain_to_date( + unparsed_type_params.cumulative_type_params.grain_to_date + ), + period_agg=self._get_period_agg( + unparsed_type_params.cumulative_type_params.period_agg + ), + ) + + return None + + def _get_metric_type_params(self, unparsed_metric: UnparsedMetric) -> MetricTypeParams: + type_params = unparsed_metric.type_params + grain_to_date: Optional[TimeGranularity] = None if type_params.grain_to_date is not None: grain_to_date = TimeGranularity(type_params.grain_to_date) @@ -294,16 +376,20 @@ def _get_metric_type_params(self, type_params: UnparsedMetricTypeParams) -> Metr numerator=self._get_optional_metric_input(type_params.numerator), denominator=self._get_optional_metric_input(type_params.denominator), expr=str(type_params.expr) if type_params.expr is not None else None, - window=self._get_time_window(type_params.window), + window=self._get_optional_time_window(type_params.window), grain_to_date=grain_to_date, metrics=self._get_metric_inputs(type_params.metrics), - # TODO This is a compiled list of measure/numerator/denominator as - # well as the `input_measures` of included metrics. We're planning - # on doing this as part of CT-2707 + conversion_type_params=self._get_optional_conversion_type_params( + type_params.conversion_type_params + ), + cumulative_type_params=self._get_optional_cumulative_type_params( + unparsed_metric=unparsed_metric, + ), + # input measures are calculated via metric processing post parsing # input_measures=?, ) - def parse_metric(self, unparsed: UnparsedMetric): + def parse_metric(self, unparsed: UnparsedMetric, generated_from: Optional[str] = None) -> None: package_name = self.project.project_name unique_id = f"{NodeType.Metric}.{package_name}.{unparsed.name}" path = self.yaml.path.relative_path @@ -332,9 +418,10 @@ def parse_metric(self, unparsed: UnparsedMetric): f"Calculated a {type(config)} for a metric, but expected a MetricConfig" ) - filter: Optional[WhereFilter] = None - if unparsed.filter is not None: - filter = WhereFilter(where_sql_template=unparsed.filter) + # If we have meta in the config, copy to node level, for backwards + # compatibility with earlier node-only config. + if "meta" in config and config["meta"]: + unparsed.meta = config["meta"] parsed = Metric( resource_type=NodeType.Metric, @@ -347,8 +434,11 @@ def parse_metric(self, unparsed: UnparsedMetric): description=unparsed.description, label=unparsed.label, type=MetricType(unparsed.type), - type_params=self._get_metric_type_params(unparsed.type_params), - filter=filter, + type_params=self._get_metric_type_params(unparsed), + time_granularity=( + TimeGranularity(unparsed.time_granularity) if unparsed.time_granularity else None + ), + filter=parse_where_filter(unparsed.filter), meta=unparsed.meta, tags=unparsed.tags, config=config, @@ -357,8 +447,9 @@ def parse_metric(self, unparsed: UnparsedMetric): ) # if the metric is disabled we do not want it included in the manifest, only in the disabled dict + assert isinstance(self.yaml.file, SchemaSourceFile) if parsed.config.enabled: - self.manifest.add_metric(self.yaml.file, parsed) + self.manifest.add_metric(self.yaml.file, parsed, generated_from) else: self.manifest.add_disabled(self.yaml.file, parsed) @@ -386,7 +477,7 @@ def _generate_metric_config( ) return config - def parse(self): + def parse(self) -> None: for data in self.get_key_dicts(): try: UnparsedMetric.validate(data) @@ -398,12 +489,12 @@ def parse(self): class GroupParser(YamlReader): - def __init__(self, schema_parser: SchemaParser, yaml: YamlBlock): + def __init__(self, schema_parser: SchemaParser, yaml: YamlBlock) -> None: super().__init__(schema_parser, yaml, NodeType.Group.pluralize()) self.schema_parser = schema_parser self.yaml = yaml - def parse_group(self, unparsed: UnparsedGroup): + def parse_group(self, unparsed: UnparsedGroup) -> None: package_name = self.project.project_name unique_id = f"{NodeType.Group}.{package_name}.{unparsed.name}" path = self.yaml.path.relative_path @@ -418,6 +509,7 @@ def parse_group(self, unparsed: UnparsedGroup): owner=unparsed.owner, ) + assert isinstance(self.yaml.file, SchemaSourceFile) self.manifest.add_group(self.yaml.file, parsed) def parse(self): @@ -432,7 +524,7 @@ def parse(self): class SemanticModelParser(YamlReader): - def __init__(self, schema_parser: SchemaParser, yaml: YamlBlock): + def __init__(self, schema_parser: SchemaParser, yaml: YamlBlock) -> None: super().__init__(schema_parser, yaml, "semantic_models") self.schema_parser = schema_parser self.yaml = yaml @@ -456,6 +548,7 @@ def _get_dimensions(self, unparsed_dimensions: List[UnparsedDimension]) -> List[ name=unparsed.name, type=DimensionType(unparsed.type), description=unparsed.description, + label=unparsed.label, is_partition=unparsed.is_partition, type_params=self._get_dimension_type_params(unparsed=unparsed.type_params), expr=unparsed.expr, @@ -472,6 +565,7 @@ def _get_entities(self, unparsed_entities: List[UnparsedEntity]) -> List[Entity] name=unparsed.name, type=EntityType(unparsed.type), description=unparsed.description, + label=unparsed.label, role=unparsed.role, expr=unparsed.expr, ) @@ -499,6 +593,7 @@ def _get_measures(self, unparsed_measures: List[UnparsedMeasure]) -> List[Measur name=unparsed.name, agg=AggregationType(unparsed.agg), description=unparsed.description, + label=unparsed.label, expr=str(unparsed.expr) if unparsed.expr is not None else None, agg_params=unparsed.agg_params, non_additive_dimension=self._get_non_additive_dimension( @@ -509,7 +604,50 @@ def _get_measures(self, unparsed_measures: List[UnparsedMeasure]) -> List[Measur ) return measures - def parse_semantic_model(self, unparsed: UnparsedSemanticModel): + def _create_metric( + self, + measure: UnparsedMeasure, + enabled: bool, + semantic_model_name: str, + ) -> None: + unparsed_metric = UnparsedMetric( + name=measure.name, + label=measure.label or measure.name, + type="simple", + type_params=UnparsedMetricTypeParams(measure=measure.name, expr=measure.name), + description=measure.description or f"Metric created from measure {measure.name}", + config={"enabled": enabled}, + ) + + parser = MetricParser(self.schema_parser, yaml=self.yaml) + parser.parse_metric(unparsed=unparsed_metric, generated_from=semantic_model_name) + + def _generate_semantic_model_config( + self, target: UnparsedSemanticModel, fqn: List[str], package_name: str, rendered: bool + ): + generator: BaseContextConfigGenerator + if rendered: + generator = ContextConfigGenerator(self.root_project) + else: + generator = UnrenderedConfigGenerator(self.root_project) + + # configs with precendence set + precedence_configs = dict() + # first apply semantic model configs + precedence_configs.update(target.config) + + config = generator.calculate_node_config( + config_call_dict={}, + fqn=fqn, + resource_type=NodeType.SemanticModel, + project_name=package_name, + base=False, + patch_config_dict=precedence_configs, + ) + + return config + + def parse_semantic_model(self, unparsed: UnparsedSemanticModel) -> None: package_name = self.project.project_name unique_id = f"{NodeType.SemanticModel}.{package_name}.{unparsed.name}" path = self.yaml.path.relative_path @@ -517,8 +655,25 @@ def parse_semantic_model(self, unparsed: UnparsedSemanticModel): fqn = self.schema_parser.get_fqn_prefix(path) fqn.append(unparsed.name) + config = self._generate_semantic_model_config( + target=unparsed, + fqn=fqn, + package_name=package_name, + rendered=True, + ) + + config = config.finalize_and_validate() + + unrendered_config = self._generate_semantic_model_config( + target=unparsed, + fqn=fqn, + package_name=package_name, + rendered=False, + ) + parsed = SemanticModel( description=unparsed.description, + label=unparsed.label, fqn=fqn, model=unparsed.model, name=unparsed.name, @@ -533,6 +688,9 @@ def parse_semantic_model(self, unparsed: UnparsedSemanticModel): dimensions=self._get_dimensions(unparsed.dimensions), defaults=unparsed.defaults, primary_entity=unparsed.primary_entity, + config=config, + unrendered_config=unrendered_config, + group=config.group, ) ctx = generate_parse_semantic_models( @@ -544,13 +702,25 @@ def parse_semantic_model(self, unparsed: UnparsedSemanticModel): if parsed.model is not None: model_ref = "{{ " + parsed.model + " }}" - # This sets the "refs" in the SemanticModel from the MetricRefResolver in context/providers.py + # This sets the "refs" in the SemanticModel from the SemanticModelRefResolver in context/providers.py get_rendered(model_ref, ctx, parsed) - # No ability to disable a semantic model at this time - self.manifest.add_semantic_model(self.yaml.file, parsed) + # if the semantic model is disabled we do not want it included in the manifest, + # only in the disabled dict + assert isinstance(self.yaml.file, SchemaSourceFile) + if parsed.config.enabled: + self.manifest.add_semantic_model(self.yaml.file, parsed) + else: + self.manifest.add_disabled(self.yaml.file, parsed) + + # Create a metric for each measure with `create_metric = True` + for measure in unparsed.measures: + if measure.create_metric is True: + self._create_metric( + measure=measure, enabled=parsed.config.enabled, semantic_model_name=parsed.name + ) - def parse(self): + def parse(self) -> None: for data in self.get_key_dicts(): try: UnparsedSemanticModel.validate(data) @@ -559,3 +729,141 @@ def parse(self): raise YamlParseDictError(self.yaml.path, self.key, data, exc) self.parse_semantic_model(unparsed) + + +class SavedQueryParser(YamlReader): + def __init__(self, schema_parser: SchemaParser, yaml: YamlBlock) -> None: + super().__init__(schema_parser, yaml, "saved_queries") + self.schema_parser = schema_parser + self.yaml = yaml + + def _generate_saved_query_config( + self, target: UnparsedSavedQuery, fqn: List[str], package_name: str, rendered: bool + ): + generator: BaseContextConfigGenerator + if rendered: + generator = ContextConfigGenerator(self.root_project) + else: + generator = UnrenderedConfigGenerator(self.root_project) + + # configs with precendence set + precedence_configs = dict() + # first apply semantic model configs + precedence_configs.update(target.config) + + config = generator.calculate_node_config( + config_call_dict={}, + fqn=fqn, + resource_type=NodeType.SavedQuery, + project_name=package_name, + base=False, + patch_config_dict=precedence_configs, + ) + + return config + + def _get_export_config( + self, unparsed_export_config: Dict[str, Any], saved_query_config: SavedQueryConfig + ) -> ExportConfig: + # Combine the two dictionaries using dictionary unpacking + # the second dictionary is the one whose keys take priority + combined = {**saved_query_config.__dict__, **unparsed_export_config} + # `schema` is the user facing attribute, but for DSI protocol purposes we track it as `schema_name` + if combined.get("schema") is not None and combined.get("schema_name") is None: + combined["schema_name"] = combined["schema"] + + return ExportConfig.from_dict(combined) + + def _get_export( + self, unparsed: UnparsedExport, saved_query_config: SavedQueryConfig + ) -> Export: + return Export( + name=unparsed.name, + config=self._get_export_config(unparsed.config, saved_query_config), + unrendered_config=unparsed.config, + ) + + def _get_query_params(self, unparsed: UnparsedQueryParams) -> QueryParams: + return QueryParams( + group_by=unparsed.group_by, + metrics=unparsed.metrics, + where=parse_where_filter(unparsed.where), + ) + + def parse_saved_query(self, unparsed: UnparsedSavedQuery) -> None: + package_name = self.project.project_name + unique_id = f"{NodeType.SavedQuery}.{package_name}.{unparsed.name}" + path = self.yaml.path.relative_path + + fqn = self.schema_parser.get_fqn_prefix(path) + fqn.append(unparsed.name) + + config = self._generate_saved_query_config( + target=unparsed, + fqn=fqn, + package_name=package_name, + rendered=True, + ) + + config = config.finalize_and_validate() + + unrendered_config = self._generate_saved_query_config( + target=unparsed, + fqn=fqn, + package_name=package_name, + rendered=False, + ) + + parsed = SavedQuery( + description=unparsed.description, + label=unparsed.label, + fqn=fqn, + name=unparsed.name, + original_file_path=self.yaml.path.original_file_path, + package_name=package_name, + path=path, + resource_type=NodeType.SavedQuery, + unique_id=unique_id, + query_params=self._get_query_params(unparsed.query_params), + exports=[self._get_export(export, config) for export in unparsed.exports], + config=config, + unrendered_config=unrendered_config, + group=config.group, + ) + + for export in parsed.exports: + self.schema_parser.update_parsed_node_relation_names(export, export.config.to_dict()) # type: ignore + + if not export.config.schema_name: + export.config.schema_name = getattr(export, "schema", None) + delattr(export, "schema") + + export.config.database = getattr(export, "database", None) or export.config.database + delattr(export, "database") + + if not export.config.alias: + export.config.alias = getattr(export, "alias", None) + delattr(export, "alias") + + delattr(export, "relation_name") + + # Only add thes saved query if it's enabled, otherwise we track it with other diabled nodes + assert isinstance(self.yaml.file, SchemaSourceFile) + if parsed.config.enabled: + self.manifest.add_saved_query(self.yaml.file, parsed) + else: + self.manifest.add_disabled(self.yaml.file, parsed) + + def parse(self) -> ParseResult: + for data in self.get_key_dicts(): + try: + UnparsedSavedQuery.validate(data) + unparsed = UnparsedSavedQuery.from_dict(data) + except (ValidationError, JSONValidationError) as exc: + raise YamlParseDictError(self.yaml.path, self.key, data, exc) + + self.parse_saved_query(unparsed) + + # The supertype (YamlReader) requires `parse` to return a ParseResult, so + # we return an empty one because we don't have one to actually return. + return ParseResult() diff --git a/core/dbt/parser/schemas.py b/core/dbt/parser/schemas.py index fbc95a73df7..3a06756e355 100644 --- a/core/dbt/parser/schemas.py +++ b/core/dbt/parser/schemas.py @@ -1,25 +1,23 @@ import datetime import time - from abc import ABCMeta, abstractmethod -from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, Type, TypeVar from dataclasses import dataclass, field +from typing import Any, Callable, Dict, Generic, Iterable, List, Optional, Type, TypeVar -from dbt.dataclass_schema import ValidationError, dbtClassMixin - +from dbt.artifacts.resources import RefArgs +from dbt.artifacts.resources.v1.model import CustomGranularity, TimeSpine +from dbt.clients.jinja_static import statically_parse_ref_or_source from dbt.clients.yaml_helper import load_yaml_text -from dbt.parser.schema_renderer import SchemaYamlRenderer -from dbt.parser.schema_generic_tests import SchemaGenericTestParser +from dbt.config import RuntimeConfig +from dbt.context.configured import SchemaYamlVars, generate_schema_yml_context from dbt.context.context_config import ContextConfig -from dbt.context.configured import generate_schema_yml_context, SchemaYamlVars -from dbt.contracts.files import SchemaSourceFile +from dbt.contracts.files import SchemaSourceFile, SourceFile +from dbt.contracts.graph.manifest import Manifest from dbt.contracts.graph.nodes import ( - ParsedNodePatch, + ModelNode, ParsedMacroPatch, + ParsedNodePatch, UnpatchedSourceDefinition, - ConstraintType, - ModelNode, - ModelLevelConstraint, ) from dbt.contracts.graph.unparsed import ( HasColumnDocs, @@ -27,56 +25,63 @@ SourcePatch, UnparsedAnalysisUpdate, UnparsedMacroUpdate, - UnparsedNodeUpdate, UnparsedModelUpdate, + UnparsedNodeUpdate, UnparsedSourceDefinition, ) +from dbt.events.types import ( + MacroNotFoundForPatch, + NoNodeForYamlKey, + UnsupportedConstraintMaterialization, + ValidationWarning, + WrongResourceSchemaFile, +) from dbt.exceptions import ( + DbtInternalError, DuplicateMacroPatchNameError, DuplicatePatchPathError, DuplicateSourcePatchNameError, + InvalidAccessTypeError, JSONValidationError, - DbtInternalError, ParsingError, - DbtValidationError, YamlLoadError, YamlParseDictError, YamlParseListError, - InvalidAccessTypeError, ) -from dbt.events.functions import warn_or_error -from dbt.events.types import ( - MacroNotFoundForPatch, - NoNodeForYamlKey, - ValidationWarning, - UnsupportedConstraintMaterialization, - WrongResourceSchemaFile, -) -from dbt.node_types import NodeType, AccessType +from dbt.node_types import AccessType, NodeType from dbt.parser.base import SimpleParser -from dbt.parser.search import FileBlock from dbt.parser.common import ( - YamlBlock, + ParserRef, TargetBlock, TestBlock, VersionedTestBlock, - ParserRef, + YamlBlock, trimmed, ) -from dbt.utils import coerce_dict_str, deep_merge - - -schema_file_keys = ( - "models", - "seeds", - "snapshots", - "sources", - "macros", - "analyses", - "exposures", - "metrics", - "semantic_models", -) +from dbt.parser.schema_generic_tests import SchemaGenericTestParser +from dbt.parser.schema_renderer import SchemaYamlRenderer +from dbt.parser.search import FileBlock +from dbt.utils import coerce_dict_str +from dbt_common.contracts.constraints import ConstraintType, ModelLevelConstraint +from dbt_common.dataclass_schema import ValidationError, dbtClassMixin +from dbt_common.events.functions import warn_or_error +from dbt_common.exceptions import DbtValidationError +from dbt_common.utils import deep_merge + +schema_file_keys_to_resource_types = { + "models": NodeType.Model, + "seeds": NodeType.Seed, + "snapshots": NodeType.Snapshot, + "sources": NodeType.Source, + "macros": NodeType.Macro, + "analyses": NodeType.Analysis, + "exposures": NodeType.Exposure, + "metrics": NodeType.Metric, + "semantic_models": NodeType.SemanticModel, + "saved_queries": NodeType.SavedQuery, +} + +schema_file_keys = list(schema_file_keys_to_resource_types.keys()) # =============================================================================== @@ -109,11 +114,29 @@ # =============================================================================== -def yaml_from_file(source_file: SchemaSourceFile) -> Dict[str, Any]: +def yaml_from_file(source_file: SchemaSourceFile) -> Optional[Dict[str, Any]]: """If loading the yaml fails, raise an exception.""" try: # source_file.contents can sometimes be None - return load_yaml_text(source_file.contents or "", source_file.path) + contents = load_yaml_text(source_file.contents or "", source_file.path) + + if contents is None: + return contents + + if not isinstance(contents, dict): + raise DbtValidationError( + f"Contents of file '{source_file.original_file_path}' are not valid. Dictionary expected." + ) + + # When loaded_loaded_at_field is defined as None or null, it shows up in + # the dict but when it is not defined, it does not show up in the dict + # We need to capture this to be able to override source level settings later. + for source in contents.get("sources", []): + for table in source.get("tables", []): + if "loaded_at_field" in table: + table["loaded_at_field_present"] = True + + return contents except DbtValidationError as e: raise YamlLoadError( project_name=source_file.project_name, path=source_file.path.relative_path, exc=e @@ -125,9 +148,9 @@ def yaml_from_file(source_file: SchemaSourceFile) -> Dict[str, Any]: class SchemaParser(SimpleParser[YamlBlock, ModelNode]): def __init__( self, - project, - manifest, - root_project, + project: RuntimeConfig, + manifest: Manifest, + root_project: RuntimeConfig, ) -> None: super().__init__(project, manifest, root_project) @@ -138,6 +161,11 @@ def __init__( self.root_project, self.project.project_name, self.schema_yaml_vars ) + # This is unnecessary, but mypy was requiring it. Clean up parser code so + # we don't have to do this. + def parse_from_dict(self, dct): + pass + @classmethod def get_compiled_path(cls, block: FileBlock) -> str: # should this raise an error? @@ -225,6 +253,18 @@ def parse_file(self, block: FileBlock, dct: Optional[Dict] = None) -> None: semantic_model_parser = SemanticModelParser(self, yaml_block) semantic_model_parser.parse() + if "unit_tests" in dct: + from dbt.parser.unit_tests import UnitTestParser + + unit_test_parser = UnitTestParser(self, yaml_block) + unit_test_parser.parse() + + if "saved_queries" in dct: + from dbt.parser.schema_yaml_readers import SavedQueryParser + + saved_query_parser = SavedQueryParser(self, yaml_block) + saved_query_parser.parse() + Parsed = TypeVar("Parsed", UnpatchedSourceDefinition, ParsedNodePatch, ParsedMacroPatch) NodeTarget = TypeVar("NodeTarget", UnparsedNodeUpdate, UnparsedAnalysisUpdate, UnparsedModelUpdate) @@ -244,36 +284,37 @@ class ParseResult: # abstract base class (ABCMeta) -# Four subclasses: MetricParser, ExposureParser, GroupParser, SourceParser, PatchParser +# Many subclasses: MetricParser, ExposureParser, GroupParser, SourceParser, +# PatchParser, SemanticModelParser, SavedQueryParser, UnitTestParser class YamlReader(metaclass=ABCMeta): def __init__(self, schema_parser: SchemaParser, yaml: YamlBlock, key: str) -> None: - self.schema_parser = schema_parser + self.schema_parser: SchemaParser = schema_parser # key: models, seeds, snapshots, sources, macros, - # analyses, exposures - self.key = key - self.yaml = yaml - self.schema_yaml_vars = SchemaYamlVars() + # analyses, exposures, unit_tests + self.key: str = key + self.yaml: YamlBlock = yaml + self.schema_yaml_vars: SchemaYamlVars = SchemaYamlVars() self.render_ctx = generate_schema_yml_context( self.schema_parser.root_project, self.schema_parser.project.project_name, self.schema_yaml_vars, ) - self.renderer = SchemaYamlRenderer(self.render_ctx, self.key) + self.renderer: SchemaYamlRenderer = SchemaYamlRenderer(self.render_ctx, self.key) @property - def manifest(self): + def manifest(self) -> Manifest: return self.schema_parser.manifest @property - def project(self): + def project(self) -> RuntimeConfig: return self.schema_parser.project @property - def default_database(self): + def default_database(self) -> str: return self.schema_parser.default_database @property - def root_project(self): + def root_project(self) -> RuntimeConfig: return self.schema_parser.root_project # for the different schema subparsers ('models', 'source', etc) @@ -297,10 +338,10 @@ def get_key_dicts(self) -> Iterable[Dict[str, Any]]: if coerce_dict_str(entry) is None: raise YamlParseListError(path, self.key, data, "expected a dict with string keys") - if "name" not in entry: + if "name" not in entry and "model" not in entry: raise ParsingError("Entry did not contain a name") - # Render the data (except for tests and descriptions). + # Render the data (except for tests, data_tests and descriptions). # See the SchemaYamlRenderer entry = self.render_entry(entry) if self.schema_yaml_vars.env_vars: @@ -325,7 +366,7 @@ def render_entry(self, dct): return dct @abstractmethod - def parse(self) -> ParseResult: + def parse(self) -> Optional[ParseResult]: raise NotImplementedError("parse is abstract") @@ -390,7 +431,9 @@ def add_source_definitions(self, source: UnparsedSourceDefinition) -> None: fqn=fqn, name=f"{source.name}_{table.name}", ) - self.manifest.add_source(self.yaml.file, source_def) + assert isinstance(self.yaml.file, SchemaSourceFile) + source_file: SchemaSourceFile = self.yaml.file + self.manifest.add_source(source_file, source_def) # This class has two subclasses: NodePatchParser and MacroPatchParser @@ -469,6 +512,9 @@ def get_unparsed_target(self) -> Iterable[NonSourceTarget]: self.normalize_docs_attribute(data, path) self.normalize_group_attribute(data, path) self.normalize_contract_attribute(data, path) + self.normalize_access_attribute(data, path) + # `tests` has been deprecated, convert to `data_tests` here if present + self.validate_data_tests(data) node = self._target_type().from_dict(data) except (ValidationError, JSONValidationError) as exc: raise YamlParseDictError(path, self.key, data, exc) @@ -477,7 +523,7 @@ def get_unparsed_target(self) -> Iterable[NonSourceTarget]: # We want to raise an error if some attributes are in two places, and move them # from toplevel to config if necessary - def normalize_attribute(self, data, path, attribute): + def normalize_attribute(self, data, path, attribute) -> None: if attribute in data: if "config" in data and attribute in data["config"]: raise ParsingError( @@ -491,19 +537,63 @@ def normalize_attribute(self, data, path, attribute): data["config"] = {} data["config"][attribute] = data.pop(attribute) - def normalize_meta_attribute(self, data, path): + def normalize_meta_attribute(self, data, path) -> None: return self.normalize_attribute(data, path, "meta") - def normalize_docs_attribute(self, data, path): + def normalize_docs_attribute(self, data, path) -> None: return self.normalize_attribute(data, path, "docs") - def normalize_group_attribute(self, data, path): + def normalize_group_attribute(self, data, path) -> None: return self.normalize_attribute(data, path, "group") - def normalize_contract_attribute(self, data, path): + def normalize_contract_attribute(self, data, path) -> None: return self.normalize_attribute(data, path, "contract") - def patch_node_config(self, node, patch): + def normalize_access_attribute(self, data, path) -> None: + return self.normalize_attribute(data, path, "access") + + @property + def is_root_project(self) -> bool: + if self.root_project.project_name == self.project.project_name: + return True + return False + + def validate_data_tests(self, data) -> None: + # Rename 'tests' -> 'data_tests' at both model-level and column-level + # Raise a validation error if the user has defined both names + def validate_and_rename(data, is_root_project: bool) -> None: + if data.get("tests"): + if "tests" in data and "data_tests" in data: + raise ValidationError( + "Invalid test config: cannot have both 'tests' and 'data_tests' defined" + ) + data["data_tests"] = data.pop("tests") + + # model-level tests + validate_and_rename(data, self.is_root_project) + + # column-level tests + if data.get("columns"): + for column in data["columns"]: + validate_and_rename(column, self.is_root_project) + + # versioned models + if data.get("versions"): + for version in data["versions"]: + validate_and_rename(version, self.is_root_project) + if version.get("columns"): + for column in version["columns"]: + validate_and_rename(column, self.is_root_project) + + def patch_node_config(self, node, patch) -> None: + if "access" in patch.config: + if AccessType.is_valid(patch.config["access"]): + patch.config["access"] = AccessType(patch.config["access"]) + else: + raise InvalidAccessTypeError( + unique_id=node.unique_id, + field_value=patch.config["access"], + ) # Get the ContextConfig that's used in calculating the config # This must match the model resource_type that's being patched config = ContextConfig( @@ -525,9 +615,23 @@ def parse_patch(self, block: TargetBlock[NodeTarget], refs: ParserRef) -> None: # could possibly skip creating one. Leaving here for now for # code consistency. deprecation_date: Optional[datetime.datetime] = None + time_spine: Optional[TimeSpine] = None if isinstance(block.target, UnparsedModelUpdate): deprecation_date = block.target.deprecation_date - + time_spine = ( + TimeSpine( + standard_granularity_column=block.target.time_spine.standard_granularity_column, + custom_granularities=[ + CustomGranularity( + name=custom_granularity.name, + column_name=custom_granularity.column_name, + ) + for custom_granularity in block.target.time_spine.custom_granularities + ], + ) + if block.target.time_spine + else None + ) patch = ParsedNodePatch( name=block.target.name, original_file_path=block.target.original_file_path, @@ -543,6 +647,7 @@ def parse_patch(self, block: TargetBlock[NodeTarget], refs: ParserRef) -> None: latest_version=None, constraints=block.target.constraints, deprecation_date=deprecation_date, + time_spine=time_spine, ) assert isinstance(self.yaml.file, SchemaSourceFile) source_file: SchemaSourceFile = self.yaml.file @@ -575,7 +680,10 @@ def parse_patch(self, block: TargetBlock[NodeTarget], refs: ParserRef) -> None: # handle disabled nodes if unique_id is None: # Node might be disabled. Following call returns list of matching disabled nodes - found_nodes = self.manifest.disabled_lookup.find(patch.name, patch.package_name) + resource_type = schema_file_keys_to_resource_types[patch.yaml_key] + found_nodes = self.manifest.disabled_lookup.find( + patch.name, patch.package_name, resource_types=[resource_type] + ) if found_nodes: if len(found_nodes) > 1 and patch.config.get("enabled"): # There are multiple disabled nodes for this model and the schema file wants to enable one. @@ -625,7 +733,7 @@ def parse_patch(self, block: TargetBlock[NodeTarget], refs: ParserRef) -> None: self.patch_node_properties(node, patch) - def patch_node_properties(self, node, patch: "ParsedNodePatch"): + def patch_node_properties(self, node, patch: "ParsedNodePatch") -> None: """Given a ParsedNodePatch, add the new information to the node.""" # explicitly pick out the parts to update so we don't inadvertently # step on the model name or anything @@ -693,21 +801,23 @@ def parse_patch(self, block: TargetBlock[UnparsedModelUpdate], refs: ParserRef) ) # ref lookup without version - version is not set yet versioned_model_unique_id = self.manifest.ref_lookup.get_unique_id( - versioned_model_name, None, None + versioned_model_name, target.package_name, None ) - versioned_model_node = None + versioned_model_node: Optional[ModelNode] = None add_node_nofile_fn: Callable # If this is the latest version, it's allowed to define itself in a model file name that doesn't have a suffix if versioned_model_unique_id is None and unparsed_version.v == latest_version: versioned_model_unique_id = self.manifest.ref_lookup.get_unique_id( - block.name, None, None + block.name, target.package_name, None ) if versioned_model_unique_id is None: # Node might be disabled. Following call returns list of matching disabled nodes - found_nodes = self.manifest.disabled_lookup.find(versioned_model_name, None) + found_nodes = self.manifest.disabled_lookup.find( + versioned_model_name, None, resource_types=[NodeType.Model] + ) if found_nodes: if len(found_nodes) > 1 and target.config.get("enabled"): # There are multiple disabled nodes for this model and the schema file wants to enable one. @@ -720,12 +830,17 @@ def parse_patch(self, block: TargetBlock[UnparsedModelUpdate], refs: ParserRef) "in `dbt_project.yml` or in the sql files." ) raise ParsingError(msg) - versioned_model_node = self.manifest.disabled.pop( - found_nodes[0].unique_id - )[0] + # We know that there's only one node in the disabled list because + # otherwise we would have raised the error above + found_node = found_nodes[0] + self.manifest.disabled.pop(found_node.unique_id) + assert isinstance(found_node, ModelNode) + versioned_model_node = found_node add_node_nofile_fn = self.manifest.add_disabled_nofile else: - versioned_model_node = self.manifest.nodes.pop(versioned_model_unique_id) + found_node = self.manifest.nodes.pop(versioned_model_unique_id) + assert isinstance(found_node, ModelNode) + versioned_model_node = found_node add_node_nofile_fn = self.manifest.add_node_nofile if versioned_model_node is None: @@ -744,12 +859,12 @@ def parse_patch(self, block: TargetBlock[UnparsedModelUpdate], refs: ParserRef) f"model.{target.package_name}.{target.name}.{unparsed_version.formatted_v}" ) # update source file.nodes with new unique_id - self.manifest.files[versioned_model_node.file_id].nodes.remove( - versioned_model_node_unique_id_old - ) - self.manifest.files[versioned_model_node.file_id].nodes.append( - versioned_model_node.unique_id - ) + model_source_file = self.manifest.files[versioned_model_node.file_id] + assert isinstance(model_source_file, SourceFile) + # because of incomplete test setup, check before removing + if versioned_model_node_unique_id_old in model_source_file.nodes: + model_source_file.nodes.remove(versioned_model_node_unique_id_old) + model_source_file.nodes.append(versioned_model_node.unique_id) # update versioned node fqn versioned_model_node.fqn[-1] = target.name @@ -787,7 +902,10 @@ def parse_patch(self, block: TargetBlock[UnparsedModelUpdate], refs: ParserRef) # Includes alias recomputation self.patch_node_config(versioned_model_node, versioned_model_patch) - # Need to reapply this here, in the case that 'contract: {enforced: true}' was during config-setting + # Need to reapply setting constraints and contract checksum here, because + # they depend on node.contract.enabled, which wouldn't be set when + # patch_node_properties was called if it wasn't set in the model file. + self.patch_constraints(versioned_model_node, versioned_model_patch.constraints) versioned_model_node.build_contract_checksum() source_file.append_patch( versioned_model_patch.yaml_key, versioned_model_node.unique_id @@ -798,8 +916,13 @@ def parse_patch(self, block: TargetBlock[UnparsedModelUpdate], refs: ParserRef) def _target_type(self) -> Type[UnparsedModelUpdate]: return UnparsedModelUpdate - def patch_node_properties(self, node, patch: "ParsedNodePatch"): + def patch_node_properties(self, node, patch: "ParsedNodePatch") -> None: super().patch_node_properties(node, patch) + + # Remaining patch properties are only relevant to ModelNode objects + if not isinstance(node, ModelNode): + return + node.version = patch.version node.latest_version = patch.latest_version node.deprecation_date = patch.deprecation_date @@ -811,10 +934,12 @@ def patch_node_properties(self, node, patch: "ParsedNodePatch"): unique_id=node.unique_id, field_value=patch.access, ) + # These two will have to be reapplied after config is built for versioned models self.patch_constraints(node, patch.constraints) + self.patch_time_spine(node, patch.time_spine) node.build_contract_checksum() - def patch_constraints(self, node, constraints): + def patch_constraints(self, node: ModelNode, constraints: List[Dict[str, Any]]) -> None: contract_config = node.config.get("contract") if contract_config.enforced is True: self._validate_constraint_prerequisites(node) @@ -827,10 +952,68 @@ def patch_constraints(self, node, constraints): f"Type must be one of {[ct.value for ct in ConstraintType]}" ) - node.constraints = [ModelLevelConstraint.from_dict(c) for c in constraints] + self._validate_pk_constraints(node, constraints) + node.constraints = [ModelLevelConstraint.from_dict(c) for c in constraints] + self._process_constraints_refs_and_sources(node) + + def _process_constraints_refs_and_sources(self, model_node: ModelNode) -> None: + """ + Populate model_node.refs and model_node.sources based on foreign-key constraint references, + whether defined at the model-level or column-level. + """ + for constraint in model_node.all_constraints: + if constraint.type == ConstraintType.foreign_key and constraint.to: + try: + ref_or_source = statically_parse_ref_or_source(constraint.to) + except ParsingError: + raise ParsingError( + f"Invalid 'ref' or 'source' syntax on foreign key constraint 'to' on model {model_node.name}: {constraint.to}." + ) + + if isinstance(ref_or_source, RefArgs): + model_node.refs.append(ref_or_source) + else: + model_node.sources.append(ref_or_source) + + def patch_time_spine(self, node: ModelNode, time_spine: Optional[TimeSpine]) -> None: + node.time_spine = time_spine + + def _validate_pk_constraints( + self, model_node: ModelNode, constraints: List[Dict[str, Any]] + ) -> None: + errors = [] + # check for primary key constraints defined at the column level + pk_col: List[str] = [] + for col in model_node.columns.values(): + for constraint in col.constraints: + if constraint.type == ConstraintType.primary_key: + pk_col.append(col.name) + + if len(pk_col) > 1: + errors.append( + f"Found {len(pk_col)} columns ({pk_col}) with primary key constraints defined. " + "Primary keys for multiple columns must be defined as a model level constraint." + ) + + if len(pk_col) > 0 and ( + any( + constraint.type == ConstraintType.primary_key + for constraint in model_node.constraints + ) + or any(constraint["type"] == ConstraintType.primary_key for constraint in constraints) + ): + errors.append( + "Primary key constraints defined at the model level and the columns level. " + "Primary keys can be defined at the model level or the column level, not both." + ) - def _validate_constraint_prerequisites(self, model_node: ModelNode): + if errors: + raise ParsingError( + f"Primary key constraint error: ({model_node.original_file_path})\n" + + "\n".join(errors) + ) + def _validate_constraint_prerequisites(self, model_node: ModelNode) -> None: column_warn_unsupported = [ constraint.warn_unsupported for column in model_node.columns.values() diff --git a/core/dbt/parser/search.py b/core/dbt/parser/search.py index 75e7fa6636c..444fb4439eb 100644 --- a/core/dbt/parser/search.py +++ b/core/dbt/parser/search.py @@ -1,13 +1,24 @@ import os from dataclasses import dataclass -from typing import List, Callable, Iterable, Set, Union, Iterator, TypeVar, Generic, Optional +from typing import ( + Callable, + Generic, + Iterable, + Iterator, + List, + Optional, + Set, + TypeVar, + Union, +) + from pathspec import PathSpec # type: ignore -from dbt.clients.jinja import extract_toplevel_blocks, BlockTag -from dbt.clients.system import find_matching from dbt.config import Project -from dbt.contracts.files import FilePath, AnySourceFile -from dbt.exceptions import ParsingError, DbtInternalError +from dbt.contracts.files import AnySourceFile, FilePath +from dbt.exceptions import DbtInternalError, ParsingError +from dbt_common.clients.jinja import BlockTag, extract_toplevel_blocks +from dbt_common.clients.system import find_matching # What's the point of wrapping a SourceFile with this class? diff --git a/core/dbt/parser/snapshots.py b/core/dbt/parser/snapshots.py index 72aec4ee976..3f9a7326c74 100644 --- a/core/dbt/parser/snapshots.py +++ b/core/dbt/parser/snapshots.py @@ -1,21 +1,18 @@ import os from typing import List -from dbt.dataclass_schema import ValidationError - -from dbt.contracts.graph.nodes import IntermediateSnapshotNode, SnapshotNode -from dbt.exceptions import SnapshopConfigError +from dbt.contracts.graph.nodes import SnapshotNode from dbt.node_types import NodeType from dbt.parser.base import SQLParser from dbt.parser.search import BlockContents, BlockSearcher, FileBlock from dbt.utils import split_path -class SnapshotParser(SQLParser[IntermediateSnapshotNode, SnapshotNode]): - def parse_from_dict(self, dct, validate=True) -> IntermediateSnapshotNode: +class SnapshotParser(SQLParser[SnapshotNode]): + def parse_from_dict(self, dct, validate=True) -> SnapshotNode: if validate: - IntermediateSnapshotNode.validate(dct) - return IntermediateSnapshotNode.from_dict(dct) + SnapshotNode.validate(dct) + return SnapshotNode.from_dict(dct) @property def resource_type(self) -> NodeType: @@ -25,24 +22,6 @@ def resource_type(self) -> NodeType: def get_compiled_path(cls, block: FileBlock): return block.path.relative_path - def set_snapshot_attributes(self, node): - # use the target_database setting if we got it, otherwise the - # `database` value of the node (ultimately sourced from the `database` - # config value), and if that is not set, use the database defined in - # the adapter's credentials. - if node.config.target_database: - node.database = node.config.target_database - elif not node.database: - node.database = self.root_project.credentials.database - - # the target schema must be set if we got here, so overwrite the node's - # schema - node.schema = node.config.target_schema - # We need to set relation_name again, since database/schema might have changed - self._update_node_relation_name(node) - - return node - def get_fqn(self, path: str, name: str) -> List[str]: """Get the FQN for the node. This impacts node selection and config application. @@ -55,21 +34,6 @@ def get_fqn(self, path: str, name: str) -> List[str]: fqn.append(name) return fqn - def transform(self, node: IntermediateSnapshotNode) -> SnapshotNode: - try: - # The config_call_dict is not serialized, because normally - # it is not needed after parsing. But since the snapshot node - # does this extra to_dict, save and restore it, to keep - # the model config when there is also schema config. - config_call_dict = node.config_call_dict - dct = node.to_dict(omit_none=True) - parsed_node = SnapshotNode.from_dict(dct) - parsed_node.config_call_dict = config_call_dict - self.set_snapshot_attributes(parsed_node) - return parsed_node - except ValidationError as exc: - raise SnapshopConfigError(exc, node) - def parse_file(self, file_block: FileBlock) -> None: blocks = BlockSearcher( source=[file_block], diff --git a/core/dbt/parser/sources.py b/core/dbt/parser/sources.py index 4095599a9c4..68dbed94ce5 100644 --- a/core/dbt/parser/sources.py +++ b/core/dbt/parser/sources.py @@ -1,7 +1,11 @@ import itertools +from dataclasses import replace from pathlib import Path -from typing import Iterable, Dict, Optional, Set, Any, List +from typing import Any, Dict, Iterable, List, Optional, Set + +from dbt.adapters.capability import Capability from dbt.adapters.factory import get_adapter +from dbt.artifacts.resources import FreshnessThreshold, SourceConfig, Time from dbt.config import RuntimeConfig from dbt.context.context_config import ( BaseContextConfigGenerator, @@ -9,28 +13,24 @@ UnrenderedConfigGenerator, ) from dbt.contracts.graph.manifest import Manifest, SourceKey -from dbt.contracts.graph.model_config import SourceConfig from dbt.contracts.graph.nodes import ( - UnpatchedSourceDefinition, - SourceDefinition, GenericTestNode, + SourceDefinition, + UnpatchedSourceDefinition, ) from dbt.contracts.graph.unparsed import ( - UnparsedSourceDefinition, SourcePatch, SourceTablePatch, - UnparsedSourceTableDefinition, - FreshnessThreshold, UnparsedColumn, - Time, + UnparsedSourceDefinition, + UnparsedSourceTableDefinition, ) -from dbt.events.functions import warn_or_error -from dbt.events.types import UnusedTables -from dbt.exceptions import DbtInternalError +from dbt.events.types import FreshnessConfigProblem, UnusedTables from dbt.node_types import NodeType - from dbt.parser.common import ParserRef from dbt.parser.schema_generic_tests import SchemaGenericTestParser +from dbt_common.events.functions import fire_event, warn_or_error +from dbt_common.exceptions import DbtInternalError # An UnparsedSourceDefinition is taken directly from the yaml @@ -117,7 +117,7 @@ def patch_source( source = UnparsedSourceDefinition.from_dict(source_dct) table = UnparsedSourceTableDefinition.from_dict(table_dct) - return unpatched.replace(source=source, table=table, patch_path=patch_path) + return replace(unpatched, source=source, table=table, patch_path=patch_path) # This converts an UnpatchedSourceDefinition to a SourceDefinition def parse_source(self, target: UnpatchedSourceDefinition) -> SourceDefinition: @@ -126,14 +126,22 @@ def parse_source(self, target: UnpatchedSourceDefinition) -> SourceDefinition: refs = ParserRef.from_target(table) unique_id = target.unique_id description = table.description or "" - meta = table.meta or {} source_description = source.description or "" - loaded_at_field = table.loaded_at_field or source.loaded_at_field + + # We need to be able to tell the difference between explicitly setting the loaded_at_field to None/null + # and when it's simply not set. This allows a user to override the source level loaded_at_field so that + # specific table can default to metadata-based freshness. + if table.loaded_at_field_present or table.loaded_at_field is not None: + loaded_at_field = table.loaded_at_field + else: + loaded_at_field = source.loaded_at_field # may be None, that's okay freshness = merge_freshness(source.freshness, table.freshness) quoting = source.quoting.merged(table.quoting) # path = block.path.original_file_path + table_meta = table.meta or {} source_meta = source.meta or {} + meta = {**source_meta, **table_meta} # make sure we don't do duplicate tags from source + table tags = sorted(set(itertools.chain(source.tags, table.tags))) @@ -184,6 +192,21 @@ def parse_source(self, target: UnpatchedSourceDefinition) -> SourceDefinition: unrendered_config=unrendered_config, ) + if ( + parsed_source.freshness + and not parsed_source.loaded_at_field + and not get_adapter(self.root_project).supports(Capability.TableLastModifiedMetadata) + ): + # Metadata-based freshness is being used by default for this node, + # but is not available through the configured adapter, so warn the + # user that freshness info will not be collected for this node at + # runtime. + fire_event( + FreshnessConfigProblem( + msg=f"The configured adapter does not support metadata-based freshness. A loaded_at_field must be specified for source '{source.name}.{table.name}'." + ) + ) + # relation name is added after instantiation because the adapter does # not provide the relation name for a UnpatchedSourceDefinition object parsed_source.relation_name = self._get_relation_name(parsed_source) @@ -203,10 +226,12 @@ def get_generic_test_parser_for(self, package_name: str) -> "SchemaGenericTestPa return generic_test_parser def get_source_tests(self, target: UnpatchedSourceDefinition) -> Iterable[GenericTestNode]: - for test, column in target.get_tests(): + is_root_project = True if self.root_project.project_name == target.package_name else False + target.validate_data_tests(is_root_project) + for data_test, column in target.get_tests(): yield self.parse_source_test( target=target, - test=test, + data_test=data_test, column=column, ) @@ -231,7 +256,7 @@ def get_patch_for( def parse_source_test( self, target: UnpatchedSourceDefinition, - test: Dict[str, Any], + data_test: Dict[str, Any], column: Optional[UnparsedColumn], ) -> GenericTestNode: column_name: Optional[str] @@ -251,7 +276,7 @@ def parse_source_test( generic_test_parser = self.get_generic_test_parser_for(target.package_name) node = generic_test_parser.parse_generic_test( target=target, - test=test, + data_test=data_test, tags=tags, column_name=column_name, schema_file_id=target.file_id, diff --git a/core/dbt/parser/sql.py b/core/dbt/parser/sql.py index 98e28aadc19..ffd16ccc015 100644 --- a/core/dbt/parser/sql.py +++ b/core/dbt/parser/sql.py @@ -3,13 +3,13 @@ from typing import Iterable from dbt.contracts.graph.manifest import SourceFile -from dbt.contracts.graph.nodes import SqlNode, Macro +from dbt.contracts.graph.nodes import Macro, SqlNode from dbt.contracts.graph.unparsed import UnparsedMacro -from dbt.exceptions import DbtInternalError from dbt.node_types import NodeType from dbt.parser.base import SimpleSQLParser from dbt.parser.macros import MacroParser from dbt.parser.search import FileBlock +from dbt_common.exceptions import DbtInternalError @dataclass diff --git a/core/dbt/parser/unit_tests.py b/core/dbt/parser/unit_tests.py new file mode 100644 index 00000000000..0e60c90274b --- /dev/null +++ b/core/dbt/parser/unit_tests.py @@ -0,0 +1,600 @@ +import csv +import os +from copy import deepcopy +from csv import DictReader +from io import StringIO +from pathlib import Path +from typing import Any, Dict, List, Optional, Set + +from dbt import utils +from dbt.artifacts.resources import ModelConfig, UnitTestConfig, UnitTestFormat +from dbt.config import RuntimeConfig +from dbt.context.context_config import ContextConfig +from dbt.context.providers import generate_parse_exposure, get_rendered +from dbt.contracts.files import FileHash, SchemaSourceFile +from dbt.contracts.graph.manifest import Manifest +from dbt.contracts.graph.model_config import UnitTestNodeConfig +from dbt.contracts.graph.nodes import ( + DependsOn, + ModelNode, + UnitTestDefinition, + UnitTestNode, + UnitTestSourceDefinition, +) +from dbt.contracts.graph.unparsed import UnparsedUnitTest +from dbt.exceptions import InvalidUnitTestGivenInput, ParsingError +from dbt.graph import UniqueId +from dbt.node_types import NodeType +from dbt.parser.schemas import ( + JSONValidationError, + ParseResult, + SchemaParser, + ValidationError, + YamlBlock, + YamlParseDictError, + YamlReader, +) +from dbt.utils import get_pseudo_test_path +from dbt_common.events.functions import fire_event +from dbt_common.events.types import SystemStdErr +from dbt_extractor import ExtractionError, py_extract_from_source # type: ignore + + +class UnitTestManifestLoader: + def __init__(self, manifest, root_project, selected) -> None: + self.manifest: Manifest = manifest + self.root_project: RuntimeConfig = root_project + # selected comes from the initial selection against a "regular" manifest + self.selected: Set[UniqueId] = selected + self.unit_test_manifest = Manifest(macros=manifest.macros) + + def load(self) -> Manifest: + for unique_id in self.selected: + if unique_id in self.manifest.unit_tests: + unit_test_case: UnitTestDefinition = self.manifest.unit_tests[unique_id] + self.parse_unit_test_case(unit_test_case) + return self.unit_test_manifest + + def parse_unit_test_case(self, test_case: UnitTestDefinition): + # Create unit test node based on the node being tested + # The tested_node has already been resolved and is in depends_on.nodes + tested_node_unique_id = test_case.depends_on.nodes[0] + tested_node = self.manifest.nodes[tested_node_unique_id] + assert isinstance(tested_node, ModelNode) + + # Create UnitTestNode based on model being tested. Since selection has + # already been done, we don't have to care about fields that are necessary + # for selection. + # Note: no depends_on, that's added later using input nodes + name = test_case.name + if tested_node.is_versioned: + name = name + f"_v{tested_node.version}" + expected_sql: Optional[str] = None + if test_case.expect.format == UnitTestFormat.SQL: + expected_rows: List[Dict[str, Any]] = [] + expected_sql = test_case.expect.rows # type: ignore + else: + assert isinstance(test_case.expect.rows, List) + expected_rows = deepcopy(test_case.expect.rows) + + assert isinstance(expected_rows, List) + unit_test_node = UnitTestNode( + name=name, + resource_type=NodeType.Unit, + package_name=test_case.package_name, + path=get_pseudo_test_path(name, test_case.original_file_path), + original_file_path=test_case.original_file_path, + unique_id=test_case.unique_id, + config=UnitTestNodeConfig( + materialized="unit", expected_rows=expected_rows, expected_sql=expected_sql + ), + raw_code=tested_node.raw_code, + database=tested_node.database, + schema=tested_node.schema, + alias=name, + fqn=test_case.unique_id.split("."), + checksum=FileHash.empty(), + tested_node_unique_id=tested_node.unique_id, + overrides=test_case.overrides, + ) + + ctx = generate_parse_exposure( + unit_test_node, # type: ignore + self.root_project, + self.manifest, + test_case.package_name, + ) + get_rendered(unit_test_node.raw_code, ctx, unit_test_node, capture_macros=True) + # unit_test_node now has a populated refs/sources + + self.unit_test_manifest.nodes[unit_test_node.unique_id] = unit_test_node + + # Now create input_nodes for the test inputs + """ + given: + - input: ref('my_model_a') + rows: [] + - input: ref('my_model_b') + rows: + - {id: 1, b: 2} + - {id: 2, b: 2} + """ + # Add the model "input" nodes, consisting of all referenced models in the unit test. + # This creates an ephemeral model for every input in every test, so there may be multiple + # input models substituting for the same input ref'd model. Note that since these are + # always "ephemeral" they just wrap the tested_node SQL in additional CTEs. No actual table + # or view is created. + for given in test_case.given: + # extract the original_input_node from the ref in the "input" key of the given list + original_input_node = self._get_original_input_node( + given.input, tested_node, test_case.name + ) + input_name = original_input_node.name + + common_fields = { + "resource_type": NodeType.Model, + # root directory for input and output fixtures + "original_file_path": unit_test_node.original_file_path, + "config": ModelConfig(materialized="ephemeral"), + "database": original_input_node.database, + "alias": original_input_node.identifier, + "schema": original_input_node.schema, + "fqn": original_input_node.fqn, + "checksum": FileHash.empty(), + "raw_code": self._build_fixture_raw_code(given.rows, None, given.format), + "package_name": original_input_node.package_name, + "unique_id": f"model.{original_input_node.package_name}.{input_name}", + "name": input_name, + "path": f"{input_name}.sql", + } + + if original_input_node.resource_type in ( + NodeType.Model, + NodeType.Seed, + NodeType.Snapshot, + ): + input_node = ModelNode( + **common_fields, + defer_relation=original_input_node.defer_relation, + ) + if ( + original_input_node.resource_type == NodeType.Model + and original_input_node.version + ): + input_node.version = original_input_node.version + + elif original_input_node.resource_type == NodeType.Source: + # We are reusing the database/schema/identifier from the original source, + # but that shouldn't matter since this acts as an ephemeral model which just + # wraps a CTE around the unit test node. + input_node = UnitTestSourceDefinition( + **common_fields, + source_name=original_input_node.source_name, # needed for source lookup + ) + # Sources need to go in the sources dictionary in order to create the right lookup + self.unit_test_manifest.sources[input_node.unique_id] = input_node # type: ignore + + # Both ModelNode and UnitTestSourceDefinition need to go in nodes dictionary + self.unit_test_manifest.nodes[input_node.unique_id] = input_node + + # Populate this_input_node_unique_id if input fixture represents node being tested + if original_input_node == tested_node: + unit_test_node.this_input_node_unique_id = input_node.unique_id + + # Add unique ids of input_nodes to depends_on + unit_test_node.depends_on.nodes.append(input_node.unique_id) + + def _build_fixture_raw_code(self, rows, column_name_to_data_types, fixture_format) -> str: + # We're not currently using column_name_to_data_types, but leaving here for + # possible future use. + if fixture_format == UnitTestFormat.SQL: + return rows + else: + return ("{{{{ get_fixture_sql({rows}, {column_name_to_data_types}) }}}}").format( + rows=rows, column_name_to_data_types=column_name_to_data_types + ) + + def _get_original_input_node(self, input: str, tested_node: ModelNode, test_case_name: str): + """ + Returns the original input node as defined in the project given an input reference + and the node being tested. + + input: str representing how input node is referenced in tested model sql + * examples: + - "ref('my_model_a')" + - "source('my_source_schema', 'my_source_name')" + - "this" + tested_node: ModelNode of representing node being tested + """ + if input.strip() == "this": + original_input_node = tested_node + else: + try: + statically_parsed = py_extract_from_source(f"{{{{ {input} }}}}") + except ExtractionError: + raise InvalidUnitTestGivenInput(input=input) + + if statically_parsed["refs"]: + ref = list(statically_parsed["refs"])[0] + name = ref.get("name") + package = ref.get("package") + version = ref.get("version") + # TODO: disabled lookup, versioned lookup, public models + original_input_node = self.manifest.ref_lookup.find( + name, package, version, self.manifest + ) + elif statically_parsed["sources"]: + source = list(statically_parsed["sources"])[0] + input_source_name, input_name = source + original_input_node = self.manifest.source_lookup.find( + f"{input_source_name}.{input_name}", + None, + self.manifest, + ) + else: + raise InvalidUnitTestGivenInput(input=input) + + if not original_input_node: + msg = f"Unit test '{test_case_name}' had an input ({input}) which was not found in the manifest." + raise ParsingError(msg) + + return original_input_node + + +class UnitTestParser(YamlReader): + def __init__(self, schema_parser: SchemaParser, yaml: YamlBlock) -> None: + super().__init__(schema_parser, yaml, "unit_tests") + self.schema_parser = schema_parser + self.yaml = yaml + + def parse(self) -> ParseResult: + for data in self.get_key_dicts(): + unit_test: UnparsedUnitTest = self._get_unit_test(data) + tested_model_node = find_tested_model_node( + self.manifest, self.project.project_name, unit_test.model + ) + unit_test_case_unique_id = ( + f"{NodeType.Unit}.{self.project.project_name}.{unit_test.model}.{unit_test.name}" + ) + unit_test_fqn = self._build_fqn( + self.project.project_name, + self.yaml.path.original_file_path, + unit_test.model, + unit_test.name, + ) + unit_test_config = self._build_unit_test_config(unit_test_fqn, unit_test.config) + + unit_test_definition = UnitTestDefinition( + name=unit_test.name, + model=unit_test.model, + resource_type=NodeType.Unit, + package_name=self.project.project_name, + path=self.yaml.path.relative_path, + original_file_path=self.yaml.path.original_file_path, + unique_id=unit_test_case_unique_id, + given=unit_test.given, + expect=unit_test.expect, + description=unit_test.description, + overrides=unit_test.overrides, + depends_on=DependsOn(), + fqn=unit_test_fqn, + config=unit_test_config, + versions=unit_test.versions, + ) + + if tested_model_node: + unit_test_definition.depends_on.nodes.append(tested_model_node.unique_id) + unit_test_definition.schema = tested_model_node.schema + + # Check that format and type of rows matches for each given input, + # convert rows to a list of dictionaries, and add the unique_id of + # the unit_test_definition to the fixture source_file for partial parsing. + self._validate_and_normalize_given(unit_test_definition) + self._validate_and_normalize_expect(unit_test_definition) + + # for calculating state:modified + unit_test_definition.build_unit_test_checksum() + assert isinstance(self.yaml.file, SchemaSourceFile) + self.manifest.add_unit_test(self.yaml.file, unit_test_definition) + + return ParseResult() + + def _get_unit_test(self, data: Dict[str, Any]) -> UnparsedUnitTest: + try: + UnparsedUnitTest.validate(data) + return UnparsedUnitTest.from_dict(data) + except (ValidationError, JSONValidationError) as exc: + raise YamlParseDictError(self.yaml.path, self.key, data, exc) + + def _build_unit_test_config( + self, unit_test_fqn: List[str], config_dict: Dict[str, Any] + ) -> UnitTestConfig: + config = ContextConfig( + self.schema_parser.root_project, + unit_test_fqn, + NodeType.Unit, + self.schema_parser.project.project_name, + ) + unit_test_config_dict = config.build_config_dict(patch_config_dict=config_dict) + unit_test_config_dict = self.render_entry(unit_test_config_dict) + + return UnitTestConfig.from_dict(unit_test_config_dict) + + def _build_fqn(self, package_name, original_file_path, model_name, test_name): + # This code comes from "get_fqn" and "get_fqn_prefix" in the base parser. + # We need to get the directories underneath the model-path. + path = Path(original_file_path) + relative_path = str(path.relative_to(*path.parts[:1])) + no_ext = os.path.splitext(relative_path)[0] + fqn = [package_name] + fqn.extend(utils.split_path(no_ext)[:-1]) + fqn.append(model_name) + fqn.append(test_name) + return fqn + + def _get_fixture(self, fixture_name: str, project_name: str): + fixture_unique_id = f"{NodeType.Fixture}.{project_name}.{fixture_name}" + if fixture_unique_id in self.manifest.fixtures: + fixture = self.manifest.fixtures[fixture_unique_id] + return fixture + else: + raise ParsingError( + f"File not found for fixture '{fixture_name}' in unit tests in {self.yaml.path.original_file_path}" + ) + + def _validate_and_normalize_given(self, unit_test_definition): + for ut_input in unit_test_definition.given: + self._validate_and_normalize_rows(ut_input, unit_test_definition, "input") + + def _validate_and_normalize_expect(self, unit_test_definition): + self._validate_and_normalize_rows( + unit_test_definition.expect, unit_test_definition, "expected" + ) + + def _validate_and_normalize_rows(self, ut_fixture, unit_test_definition, fixture_type) -> None: + if ut_fixture.format == UnitTestFormat.Dict: + if ut_fixture.rows is None and ut_fixture.fixture is None: # This is a seed + ut_fixture.rows = self._load_rows_from_seed(ut_fixture.input) + if not isinstance(ut_fixture.rows, list): + raise ParsingError( + f"Unit test {unit_test_definition.name} has {fixture_type} rows " + f"which do not match format {ut_fixture.format}" + ) + elif ut_fixture.format == UnitTestFormat.CSV: + if not (isinstance(ut_fixture.rows, str) or isinstance(ut_fixture.fixture, str)): + raise ParsingError( + f"Unit test {unit_test_definition.name} has {fixture_type} rows or fixtures " + f"which do not match format {ut_fixture.format}. Expected string." + ) + + if ut_fixture.fixture: + csv_rows = self.get_fixture_file_rows( + ut_fixture.fixture, self.project.project_name, unit_test_definition.unique_id + ) + else: + csv_rows = self._convert_csv_to_list_of_dicts(ut_fixture.rows) + + # Empty values (e.g. ,,) in a csv fixture should default to null, not "" + ut_fixture.rows = [ + {k: (None if v == "" else v) for k, v in row.items()} for row in csv_rows + ] + + elif ut_fixture.format == UnitTestFormat.SQL: + if not (isinstance(ut_fixture.rows, str) or isinstance(ut_fixture.fixture, str)): + raise ParsingError( + f"Unit test {unit_test_definition.name} has {fixture_type} rows or fixtures " + f"which do not match format {ut_fixture.format}. Expected string." + ) + + if ut_fixture.fixture: + ut_fixture.rows = self.get_fixture_file_rows( + ut_fixture.fixture, self.project.project_name, unit_test_definition.unique_id + ) + + # sanitize order of input + if ut_fixture.rows and ( + ut_fixture.format == UnitTestFormat.Dict or ut_fixture.format == UnitTestFormat.CSV + ): + self._promote_first_non_none_row(ut_fixture) + + def _promote_first_non_none_row(self, ut_fixture): + """ + Promote the first row with no None values to the top of the ut_fixture.rows list. + + This function modifies the ut_fixture object in place. + + Needed for databases like Redshift which uses the first value in a column to determine + the column type. If the first value is None, the type is assumed to be VARCHAR(1). + This leads to obscure type mismatch errors centered on a unit test fixture's `expect`. + See https://github.com/dbt-labs/dbt-redshift/issues/821 for more info. + """ + non_none_row_index = None + + # Iterate through each row and its index + for index, row in enumerate(ut_fixture.rows): + # Check if all values in the row are not None + if all(value is not None for value in row.values()): + non_none_row_index = index + break + + if non_none_row_index is None: + fire_event( + SystemStdErr( + bmsg="Unit Test fixtures benefit from having at least one row free of Null values to ensure consistent column types. Failure to meet this recommendation can result in type mismatch errors between unit test source models and `expected` fixtures." + ) + ) + else: + ut_fixture.rows[0], ut_fixture.rows[non_none_row_index] = ( + ut_fixture.rows[non_none_row_index], + ut_fixture.rows[0], + ) + + def get_fixture_file_rows(self, fixture_name, project_name, utdef_unique_id): + # find fixture file object and store unit_test_definition unique_id + fixture = self._get_fixture(fixture_name, project_name) + fixture_source_file = self.manifest.files[fixture.file_id] + fixture_source_file.unit_tests.append(utdef_unique_id) + return fixture.rows + + def _convert_csv_to_list_of_dicts(self, csv_string: str) -> List[Dict[str, Any]]: + dummy_file = StringIO(csv_string) + reader = csv.DictReader(dummy_file) + rows = [] + for row in reader: + rows.append(row) + return rows + + def _load_rows_from_seed(self, ref_str: str) -> List[Dict[str, Any]]: + """Read rows from seed file on disk if not specified in YAML config. If seed file doesn't exist, return empty list.""" + ref = py_extract_from_source("{{ " + ref_str + " }}")["refs"][0] + + rows: List[Dict[str, Any]] = [] + + seed_name = ref["name"] + package_name = ref.get("package", self.project.project_name) + + seed_node = self.manifest.ref_lookup.find(seed_name, package_name, None, self.manifest) + + if not seed_node or seed_node.resource_type != NodeType.Seed: + # Seed not found in custom package specified + if package_name != self.project.project_name: + raise ParsingError( + f"Unable to find seed '{package_name}.{seed_name}' for unit tests in '{package_name}' package" + ) + else: + raise ParsingError( + f"Unable to find seed '{package_name}.{seed_name}' for unit tests in directories: {self.project.seed_paths}" + ) + + seed_path = Path(seed_node.root_path) / seed_node.original_file_path + with open(seed_path, "r") as f: + for row in DictReader(f): + rows.append(row) + + return rows + + +def find_tested_model_node( + manifest: Manifest, current_project: str, unit_test_model: str +) -> Optional[ModelNode]: + model_name_split = unit_test_model.split() + model_name = model_name_split[0] + model_version = model_name_split[1] if len(model_name_split) == 2 else None + + tested_node = manifest.ref_lookup.find(model_name, current_project, model_version, manifest) + return tested_node + + +# This is called by the ManifestLoader after other processing has been done, +# so that model versions are available. +def process_models_for_unit_test( + manifest: Manifest, current_project: str, unit_test_def: UnitTestDefinition, models_to_versions +): + # If the unit tests doesn't have a depends_on.nodes[0] then we weren't able to resolve + # the model, either because versions hadn't been processed yet, or it's not a valid model name + if not unit_test_def.depends_on.nodes: + tested_node = find_tested_model_node(manifest, current_project, unit_test_def.model) + if not tested_node: + raise ParsingError( + f"Unable to find model '{current_project}.{unit_test_def.model}' for " + f"unit test '{unit_test_def.name}' in {unit_test_def.original_file_path}" + ) + unit_test_def.depends_on.nodes.append(tested_node.unique_id) + unit_test_def.schema = tested_node.schema + + # The UnitTestDefinition should only have one "depends_on" at this point, + # the one that's found by the "model" field. + target_model_id = unit_test_def.depends_on.nodes[0] + target_model = manifest.nodes[target_model_id] + assert isinstance(target_model, ModelNode) + + target_model_is_incremental = "macro.dbt.is_incremental" in target_model.depends_on.macros + unit_test_def_has_incremental_override = unit_test_def.overrides and isinstance( + unit_test_def.overrides.macros.get("is_incremental"), bool + ) + + if target_model_is_incremental and (not unit_test_def_has_incremental_override): + raise ParsingError( + f"Boolean override for 'is_incremental' must be provided for unit test '{unit_test_def.name}' in model '{target_model.name}'" + ) + + unit_test_def_incremental_override_true = ( + unit_test_def.overrides and unit_test_def.overrides.macros.get("is_incremental") + ) + unit_test_def_has_this_input = "this" in [i.input for i in unit_test_def.given] + + if ( + target_model_is_incremental + and unit_test_def_incremental_override_true + and (not unit_test_def_has_this_input) + ): + raise ParsingError( + f"Unit test '{unit_test_def.name}' for incremental model '{target_model.name}' must have a 'this' input" + ) + + # unit_test_versions = unit_test_def.versions + # We're setting up unit tests for versioned models, so if + # the model isn't versioned, we don't need to do anything + if not target_model.is_versioned: + if unit_test_def.versions and ( + unit_test_def.versions.include or unit_test_def.versions.exclude + ): + # If model is not versioned, we should not have an include or exclude + msg = ( + f"Unit test '{unit_test_def.name}' should not have a versions include or exclude " + f"when referencing non-versioned model '{target_model.name}'" + ) + raise ParsingError(msg) + else: + return + versioned_models = [] + if ( + target_model.package_name in models_to_versions + and target_model.name in models_to_versions[target_model.package_name] + ): + versioned_models = models_to_versions[target_model.package_name][target_model.name] + + versions_to_test = [] + if unit_test_def.versions is None: + versions_to_test = versioned_models + elif unit_test_def.versions.exclude: + for model_unique_id in versioned_models: + model = manifest.nodes[model_unique_id] + assert isinstance(model, ModelNode) + if model.version in unit_test_def.versions.exclude: + continue + else: + versions_to_test.append(model.unique_id) + elif unit_test_def.versions.include: + for model_unique_id in versioned_models: + model = manifest.nodes[model_unique_id] + assert isinstance(model, ModelNode) + if model.version in unit_test_def.versions.include: + versions_to_test.append(model.unique_id) + else: + continue + + if not versions_to_test: + msg = ( + f"Unit test '{unit_test_def.name}' referenced a version of '{target_model.name}' " + "which was not found." + ) + raise ParsingError(msg) + else: + # Create unit test definitions that match the model versions + original_unit_test_def = manifest.unit_tests.pop(unit_test_def.unique_id) + original_unit_test_dict = original_unit_test_def.to_dict() + schema_file = manifest.files[original_unit_test_def.file_id] + assert isinstance(schema_file, SchemaSourceFile) + schema_file.unit_tests.remove(original_unit_test_def.unique_id) + for versioned_model_unique_id in versions_to_test: + versioned_model = manifest.nodes[versioned_model_unique_id] + assert isinstance(versioned_model, ModelNode) + versioned_unit_test_unique_id = f"{NodeType.Unit}.{unit_test_def.package_name}.{unit_test_def.model}.{unit_test_def.name}_v{versioned_model.version}" + new_unit_test_def = UnitTestDefinition.from_dict(original_unit_test_dict) + new_unit_test_def.unique_id = versioned_unit_test_unique_id + new_unit_test_def.depends_on.nodes[0] = versioned_model_unique_id + new_unit_test_def.version = versioned_model.version + schema_file.unit_tests.append(versioned_unit_test_unique_id) + # fqn? + manifest.unit_tests[versioned_unit_test_unique_id] = new_unit_test_def diff --git a/core/dbt/plugins/__init__.py b/core/dbt/plugins/__init__.py index e6ed7198d80..37744d655e4 100644 --- a/core/dbt/plugins/__init__.py +++ b/core/dbt/plugins/__init__.py @@ -1,10 +1,7 @@ from typing import Optional -from .manager import PluginManager - # these are just exports, they need "noqa" so flake8 will not complain. -from .manager import dbtPlugin, dbt_hook # noqa - +from .manager import PluginManager, dbt_hook, dbtPlugin # noqa PLUGIN_MANAGER: Optional[PluginManager] = None diff --git a/core/dbt/plugins/contracts.py b/core/dbt/plugins/contracts.py index a9db066dfca..f6f5b4d6543 100644 --- a/core/dbt/plugins/contracts.py +++ b/core/dbt/plugins/contracts.py @@ -1,9 +1,9 @@ from typing import Dict # just exports, they need "noqa" so flake8 will not complain. -from dbt.contracts.util import ArtifactMixin as PluginArtifact, schema_version # noqa -from dbt.contracts.util import BaseArtifactMetadata, AdditionalPropertiesMixin # noqa -from dbt.dataclass_schema import dbtClassMixin, ExtensibleDbtClassMixin # noqa - +from dbt.artifacts.schemas.base import ArtifactMixin as PluginArtifact # noqa +from dbt.artifacts.schemas.base import BaseArtifactMetadata # noqa +from dbt.artifacts.schemas.base import schema_version # noqa +from dbt_common.dataclass_schema import ExtensibleDbtClassMixin, dbtClassMixin # noqa PluginArtifacts = Dict[str, PluginArtifact] diff --git a/core/dbt/plugins/manager.py b/core/dbt/plugins/manager.py index b51abc467a3..5ef8b8fb735 100644 --- a/core/dbt/plugins/manager.py +++ b/core/dbt/plugins/manager.py @@ -1,11 +1,15 @@ +import functools import importlib import pkgutil -from typing import Dict, List, Callable +from types import ModuleType +from typing import Callable, Dict, List, Mapping +import dbt.tracking from dbt.contracts.graph.manifest import Manifest -from dbt.exceptions import DbtRuntimeError from dbt.plugins.contracts import PluginArtifacts from dbt.plugins.manifest import PluginNodes +from dbt_common.exceptions import DbtRuntimeError +from dbt_common.tests import test_caching_enabled def dbt_hook(func): @@ -25,7 +29,7 @@ class dbtPlugin: Its interface is **not** stable and will likely change between dbt-core versions. """ - def __init__(self, project_name: str): + def __init__(self, project_name: str) -> None: self.project_name = project_name try: self.initialize() @@ -62,11 +66,25 @@ def get_manifest_artifacts(self, manifest: Manifest) -> PluginArtifacts: raise NotImplementedError(f"get_manifest_artifacts hook not implemented for {self.name}") +@functools.lru_cache(maxsize=None) +def _get_dbt_modules() -> Mapping[str, ModuleType]: + # This is an expensive function, especially in the context of testing, when + # it is called repeatedly, so we break it out and cache the result globally. + return { + name: importlib.import_module(name) + for _, name, _ in pkgutil.iter_modules() + if name.startswith(PluginManager.PLUGIN_MODULE_PREFIX) + } + + +_MODULES_CACHE = None + + class PluginManager: PLUGIN_MODULE_PREFIX = "dbt_" PLUGIN_ATTR_NAME = "plugins" - def __init__(self, plugins: List[dbtPlugin]): + def __init__(self, plugins: List[dbtPlugin]) -> None: self._plugins = plugins self._valid_hook_names = set() # default hook implementations from dbtPlugin @@ -90,11 +108,16 @@ def __init__(self, plugins: List[dbtPlugin]): @classmethod def from_modules(cls, project_name: str) -> "PluginManager": - discovered_dbt_modules = { - name: importlib.import_module(name) - for _, name, _ in pkgutil.iter_modules() - if name.startswith(cls.PLUGIN_MODULE_PREFIX) - } + + if test_caching_enabled(): + global _MODULES_CACHE + if _MODULES_CACHE is None: + discovered_dbt_modules = cls.get_prefixed_modules() + _MODULES_CACHE = discovered_dbt_modules + else: + discovered_dbt_modules = _MODULES_CACHE + else: + discovered_dbt_modules = cls.get_prefixed_modules() plugins = [] for name, module in discovered_dbt_modules.items(): @@ -108,6 +131,14 @@ def from_modules(cls, project_name: str) -> "PluginManager": plugins.append(plugin) return cls(plugins=plugins) + @classmethod + def get_prefixed_modules(cls): + return { + name: importlib.import_module(name) + for _, name, _ in pkgutil.iter_modules() + if name.startswith(cls.PLUGIN_MODULE_PREFIX) + } + def get_manifest_artifacts(self, manifest: Manifest) -> PluginArtifacts: all_plugin_artifacts = {} for hook_method in self.hooks.get("get_manifest_artifacts", []): @@ -119,5 +150,14 @@ def get_nodes(self) -> PluginNodes: all_plugin_nodes = PluginNodes() for hook_method in self.hooks.get("get_nodes", []): plugin_nodes = hook_method() + dbt.tracking.track_plugin_get_nodes( + { + "plugin_name": hook_method.__self__.name, # type: ignore + "num_model_nodes": len(plugin_nodes.models), + "num_model_packages": len( + {model.package_name for model in plugin_nodes.models.values()} + ), + } + ) all_plugin_nodes.update(plugin_nodes) return all_plugin_nodes diff --git a/core/dbt/plugins/manifest.py b/core/dbt/plugins/manifest.py index 112cd1565b3..84578df8c23 100644 --- a/core/dbt/plugins/manifest.py +++ b/core/dbt/plugins/manifest.py @@ -1,13 +1,13 @@ from dataclasses import dataclass, field from typing import Dict -from dbt.contracts.graph.node_args import ModelNodeArgs +from dbt.artifacts.resources import NodeVersion # noqa # all these are just exports, they need "noqa" so flake8 will not complain. from dbt.contracts.graph.manifest import Manifest # noqa -from dbt.node_types import AccessType, NodeType # noqa -from dbt.contracts.graph.unparsed import NodeVersion # noqa +from dbt.contracts.graph.node_args import ModelNodeArgs from dbt.graph.graph import UniqueId # noqa +from dbt.node_types import AccessType, NodeType # noqa @dataclass @@ -17,5 +17,5 @@ class PluginNodes: def add_model(self, model_args: ModelNodeArgs) -> None: self.models[model_args.unique_id] = model_args - def update(self, other: "PluginNodes"): + def update(self, other: "PluginNodes") -> None: self.models.update(other.models) diff --git a/core/dbt/selected_resources.py b/core/dbt/selected_resources.py index 871cf059beb..74104fa6ef8 100644 --- a/core/dbt/selected_resources.py +++ b/core/dbt/selected_resources.py @@ -1,4 +1,4 @@ -from typing import Set, Any +from typing import Any, Set SELECTED_RESOURCES = [] diff --git a/core/dbt/semver.py b/core/dbt/semver.py deleted file mode 100644 index e4020931f30..00000000000 --- a/core/dbt/semver.py +++ /dev/null @@ -1,473 +0,0 @@ -from dataclasses import dataclass -import re -from typing import List - -from dbt.exceptions import VersionsNotCompatibleError -import dbt.utils - -from dbt.dataclass_schema import dbtClassMixin, StrEnum -from typing import Optional - - -class Matchers(StrEnum): - GREATER_THAN = ">" - GREATER_THAN_OR_EQUAL = ">=" - LESS_THAN = "<" - LESS_THAN_OR_EQUAL = "<=" - EXACT = "=" - - -@dataclass -class VersionSpecification(dbtClassMixin): - major: Optional[str] = None - minor: Optional[str] = None - patch: Optional[str] = None - prerelease: Optional[str] = None - build: Optional[str] = None - matcher: Matchers = Matchers.EXACT - - -_MATCHERS = r"(?P\>=|\>|\<|\<=|=)?" -_NUM_NO_LEADING_ZEROS = r"(0|[1-9]\d*)" -_ALPHA = r"[0-9A-Za-z-]*" -_ALPHA_NO_LEADING_ZEROS = r"(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)" - -_BASE_VERSION_REGEX = r""" -(?P{num_no_leading_zeros})\. -(?P{num_no_leading_zeros})\. -(?P{num_no_leading_zeros}) -""".format( - num_no_leading_zeros=_NUM_NO_LEADING_ZEROS -) - -_VERSION_EXTRA_REGEX = r""" -(\-? - (?P - {alpha_no_leading_zeros}(\.{alpha_no_leading_zeros})*))? -(\+ - (?P - {alpha}(\.{alpha})*))? -""".format( - alpha_no_leading_zeros=_ALPHA_NO_LEADING_ZEROS, alpha=_ALPHA -) - - -_VERSION_REGEX_PAT_STR = r""" -^ -{matchers} -{base_version_regex} -{version_extra_regex} -$ -""".format( - matchers=_MATCHERS, - base_version_regex=_BASE_VERSION_REGEX, - version_extra_regex=_VERSION_EXTRA_REGEX, -) - -_VERSION_REGEX = re.compile(_VERSION_REGEX_PAT_STR, re.VERBOSE) - - -def _cmp(a, b): - """Return negative if ab.""" - return (a > b) - (a < b) - - -@dataclass -class VersionSpecifier(VersionSpecification): - def to_version_string(self, skip_matcher=False): - prerelease = "" - build = "" - matcher = "" - - if self.prerelease: - prerelease = "-" + self.prerelease - - if self.build: - build = "+" + self.build - - if not skip_matcher: - matcher = self.matcher - return "{}{}.{}.{}{}{}".format( - matcher, self.major, self.minor, self.patch, prerelease, build - ) - - @classmethod - def from_version_string(cls, version_string): - match = _VERSION_REGEX.match(version_string) - - if not match: - raise dbt.exceptions.SemverError( - f'"{version_string}" is not a valid semantic version.' - ) - - matched = {k: v for k, v in match.groupdict().items() if v is not None} - - return cls.from_dict(matched) - - def __str__(self): - return self.to_version_string() - - def to_range(self): - range_start: VersionSpecifier = UnboundedVersionSpecifier() - range_end: VersionSpecifier = UnboundedVersionSpecifier() - - if self.matcher == Matchers.EXACT: - range_start = self - range_end = self - - elif self.matcher in [Matchers.GREATER_THAN, Matchers.GREATER_THAN_OR_EQUAL]: - range_start = self - - elif self.matcher in [Matchers.LESS_THAN, Matchers.LESS_THAN_OR_EQUAL]: - range_end = self - - return VersionRange(start=range_start, end=range_end) - - def compare(self, other): - if self.is_unbounded or other.is_unbounded: - return 0 - - for key in ["major", "minor", "patch", "prerelease"]: - (a, b) = (getattr(self, key), getattr(other, key)) - if key == "prerelease": - if a is None and b is None: - continue - if a is None: - if self.matcher == Matchers.LESS_THAN: - # If 'a' is not a pre-release but 'b' is, and b must be - # less than a, return -1 to prevent installations of - # pre-releases with greater base version than a - # maximum specified non-pre-release version. - return -1 - # Otherwise, stable releases are considered greater than - # pre-release - return 1 - if b is None: - return -1 - - # Check the prerelease component only - prcmp = self._nat_cmp(a, b) - if prcmp != 0: # either -1 or 1 - return prcmp - # else is equal and will fall through - - else: # major/minor/patch, should all be numbers - if int(a) > int(b): - return 1 - elif int(a) < int(b): - return -1 - # else is equal and will fall through - - equal = ( - self.matcher == Matchers.GREATER_THAN_OR_EQUAL - and other.matcher == Matchers.LESS_THAN_OR_EQUAL - ) or ( - self.matcher == Matchers.LESS_THAN_OR_EQUAL - and other.matcher == Matchers.GREATER_THAN_OR_EQUAL - ) - if equal: - return 0 - - lt = ( - (self.matcher == Matchers.LESS_THAN and other.matcher == Matchers.LESS_THAN_OR_EQUAL) - or ( - other.matcher == Matchers.GREATER_THAN - and self.matcher == Matchers.GREATER_THAN_OR_EQUAL - ) - or (self.is_upper_bound and other.is_lower_bound) - ) - if lt: - return -1 - - gt = ( - (other.matcher == Matchers.LESS_THAN and self.matcher == Matchers.LESS_THAN_OR_EQUAL) - or ( - self.matcher == Matchers.GREATER_THAN - and other.matcher == Matchers.GREATER_THAN_OR_EQUAL - ) - or (self.is_lower_bound and other.is_upper_bound) - ) - if gt: - return 1 - - return 0 - - def __lt__(self, other): - return self.compare(other) == -1 - - def __gt__(self, other): - return self.compare(other) == 1 - - def __eq___(self, other): - return self.compare(other) == 0 - - def __cmp___(self, other): - return self.compare(other) - - @property - def is_unbounded(self): - return False - - @property - def is_lower_bound(self): - return self.matcher in [Matchers.GREATER_THAN, Matchers.GREATER_THAN_OR_EQUAL] - - @property - def is_upper_bound(self): - return self.matcher in [Matchers.LESS_THAN, Matchers.LESS_THAN_OR_EQUAL] - - @property - def is_exact(self): - return self.matcher == Matchers.EXACT - - @classmethod - def _nat_cmp(cls, a, b): - def cmp_prerelease_tag(a, b): - if isinstance(a, int) and isinstance(b, int): - return _cmp(a, b) - elif isinstance(a, int): - return -1 - elif isinstance(b, int): - return 1 - else: - return _cmp(a, b) - - a, b = a or "", b or "" - a_parts, b_parts = a.split("."), b.split(".") - a_parts = [int(x) if re.match(r"^\d+$", x) else x for x in a_parts] - b_parts = [int(x) if re.match(r"^\d+$", x) else x for x in b_parts] - for sub_a, sub_b in zip(a_parts, b_parts): - cmp_result = cmp_prerelease_tag(sub_a, sub_b) - if cmp_result != 0: - return cmp_result - else: - return _cmp(len(a), len(b)) - - -@dataclass -class VersionRange: - start: VersionSpecifier - end: VersionSpecifier - - def _try_combine_exact(self, a, b): - if a.compare(b) == 0: - return a - else: - raise VersionsNotCompatibleError() - - def _try_combine_lower_bound_with_exact(self, lower, exact): - comparison = lower.compare(exact) - - if comparison < 0 or (comparison == 0 and lower.matcher == Matchers.GREATER_THAN_OR_EQUAL): - return exact - - raise VersionsNotCompatibleError() - - def _try_combine_lower_bound(self, a, b): - if b.is_unbounded: - return a - elif a.is_unbounded: - return b - - if not (a.is_exact or b.is_exact): - comparison = a.compare(b) < 0 - - if comparison: - return b - else: - return a - - elif a.is_exact: - return self._try_combine_lower_bound_with_exact(b, a) - - elif b.is_exact: - return self._try_combine_lower_bound_with_exact(a, b) - - def _try_combine_upper_bound_with_exact(self, upper, exact): - comparison = upper.compare(exact) - - if comparison > 0 or (comparison == 0 and upper.matcher == Matchers.LESS_THAN_OR_EQUAL): - return exact - - raise VersionsNotCompatibleError() - - def _try_combine_upper_bound(self, a, b): - if b.is_unbounded: - return a - elif a.is_unbounded: - return b - - if not (a.is_exact or b.is_exact): - comparison = a.compare(b) > 0 - - if comparison: - return b - else: - return a - - elif a.is_exact: - return self._try_combine_upper_bound_with_exact(b, a) - - elif b.is_exact: - return self._try_combine_upper_bound_with_exact(a, b) - - def reduce(self, other): - start = None - - if self.start.is_exact and other.start.is_exact: - start = end = self._try_combine_exact(self.start, other.start) - - else: - start = self._try_combine_lower_bound(self.start, other.start) - end = self._try_combine_upper_bound(self.end, other.end) - - if start.compare(end) > 0: - raise VersionsNotCompatibleError() - - return VersionRange(start=start, end=end) - - def __str__(self): - result = [] - - if self.start.is_unbounded and self.end.is_unbounded: - return "ANY" - - if not self.start.is_unbounded: - result.append(self.start.to_version_string()) - - if not self.end.is_unbounded: - result.append(self.end.to_version_string()) - - return ", ".join(result) - - def to_version_string_pair(self): - to_return = [] - - if not self.start.is_unbounded: - to_return.append(self.start.to_version_string()) - - if not self.end.is_unbounded: - to_return.append(self.end.to_version_string()) - - return to_return - - -class UnboundedVersionSpecifier(VersionSpecifier): - def __init__(self, *args, **kwargs): - super().__init__( - matcher=Matchers.EXACT, major=None, minor=None, patch=None, prerelease=None, build=None - ) - - def __str__(self): - return "*" - - @property - def is_unbounded(self): - return True - - @property - def is_lower_bound(self): - return False - - @property - def is_upper_bound(self): - return False - - @property - def is_exact(self): - return False - - -def reduce_versions(*args): - version_specifiers = [] - - for version in args: - if isinstance(version, UnboundedVersionSpecifier) or version is None: - continue - - elif isinstance(version, VersionSpecifier): - version_specifiers.append(version) - - elif isinstance(version, VersionRange): - if not isinstance(version.start, UnboundedVersionSpecifier): - version_specifiers.append(version.start) - - if not isinstance(version.end, UnboundedVersionSpecifier): - version_specifiers.append(version.end) - - else: - version_specifiers.append(VersionSpecifier.from_version_string(version)) - - for version_specifier in version_specifiers: - if not isinstance(version_specifier, VersionSpecifier): - raise Exception(version_specifier) - - if not version_specifiers: - return VersionRange(start=UnboundedVersionSpecifier(), end=UnboundedVersionSpecifier()) - - try: - to_return = version_specifiers.pop().to_range() - - for version_specifier in version_specifiers: - to_return = to_return.reduce(version_specifier.to_range()) - except VersionsNotCompatibleError: - raise VersionsNotCompatibleError( - "Could not find a satisfactory version from options: {}".format([str(a) for a in args]) - ) - - return to_return - - -def versions_compatible(*args): - if len(args) == 1: - return True - - try: - reduce_versions(*args) - return True - except VersionsNotCompatibleError: - return False - - -def find_possible_versions(requested_range, available_versions): - possible_versions = [] - - for version_string in available_versions: - version = VersionSpecifier.from_version_string(version_string) - - if versions_compatible(version, requested_range.start, requested_range.end): - possible_versions.append(version) - - sorted_versions = sorted(possible_versions, reverse=True) - return [v.to_version_string(skip_matcher=True) for v in sorted_versions] - - -def resolve_to_specific_version(requested_range, available_versions): - max_version = None - max_version_string = None - - for version_string in available_versions: - version = VersionSpecifier.from_version_string(version_string) - - if versions_compatible(version, requested_range.start, requested_range.end) and ( - max_version is None or max_version.compare(version) < 0 - ): - max_version = version - max_version_string = version_string - - return max_version_string - - -def filter_installable(versions: List[str], install_prerelease: bool) -> List[str]: - installable = [] - installable_dict = {} - for version_string in versions: - version = VersionSpecifier.from_version_string(version_string) - if install_prerelease or not version.prerelease: - installable.append(version) - installable_dict[str(version)] = version_string - sorted_installable = sorted(installable) - sorted_installable_original_versions = [ - str(installable_dict.get(str(version))) for version in sorted_installable - ] - return sorted_installable_original_versions diff --git a/core/dbt/task/README.md b/core/dbt/task/README.md index 9de939e4cc4..2b32f5dbfa8 100644 --- a/core/dbt/task/README.md +++ b/core/dbt/task/README.md @@ -1 +1,43 @@ # Task README + +### Task Hierarchy +``` +BaseTask + ┣ CleanTask + ┣ ConfiguredTask + ┃ ┣ GraphRunnableTask + ┃ ┃ ┣ CloneTask + ┃ ┃ ┣ CompileTask + ┃ ┃ ┃ ┣ GenerateTask + ┃ ┃ ┃ ┣ RunTask + ┃ ┃ ┃ ┃ ┣ BuildTask + ┃ ┃ ┃ ┃ ┣ FreshnessTask + ┃ ┃ ┃ ┃ ┣ SeedTask + ┃ ┃ ┃ ┃ ┣ SnapshotTask + ┃ ┃ ┃ ┃ ┗ TestTask + ┃ ┃ ┃ ┗ ShowTask + ┃ ┃ ┗ ListTask + ┃ ┣ RetryTask + ┃ ┣ RunOperationTask + ┃ ┗ ServeTask + ┣ DebugTask + ┣ DepsTask + ┗ InitTask +``` + +### Runner Hierarchy +``` +BaseRunner + ┣ CloneRunner + ┣ CompileRunner + ┃ ┣ GenericSqlRunner + ┃ ┃ ┣ SqlCompileRunner + ┃ ┃ ┗ SqlExecuteRunner + ┃ ┣ ModelRunner + ┃ ┃ ┣ SeedRunner + ┃ ┃ ┗ SnapshotRunner + ┃ ┣ ShowRunner + ┃ ┗ TestRunner + ┣ FreshnessRunner + ┗ SavedQueryRunner +``` diff --git a/core/dbt/task/base.py b/core/dbt/task/base.py index 0aae0bd8851..b7a3a6a29ab 100644 --- a/core/dbt/task/base.py +++ b/core/dbt/task/base.py @@ -5,55 +5,49 @@ from abc import ABCMeta, abstractmethod from contextlib import nullcontext from datetime import datetime -from typing import Type, Union, Dict, Any, Optional +from pathlib import Path +from typing import Any, Dict, List, Optional, Set import dbt.exceptions +import dbt_common.exceptions.base from dbt import tracking -from dbt.adapters.factory import get_adapter -from dbt.config import RuntimeConfig, Project -from dbt.config.profile import read_profile -from dbt.contracts.graph.manifest import Manifest -from dbt.contracts.results import ( +from dbt.artifacts.resources.types import NodeType +from dbt.artifacts.schemas.results import ( NodeStatus, - RunResult, - collect_timing_info, - RunStatus, RunningStatus, + RunStatus, + TimingInfo, + collect_timing_info, ) -from dbt.events.contextvars import get_node_info -from dbt.events.functions import fire_event +from dbt.artifacts.schemas.run import RunResult +from dbt.cli.flags import Flags +from dbt.compilation import Compiler +from dbt.config import RuntimeConfig +from dbt.config.profile import read_profile +from dbt.constants import DBT_PROJECT_FILE_NAME +from dbt.contracts.graph.manifest import Manifest from dbt.events.types import ( - LogDbtProjectError, - LogDbtProfileError, CatchableExceptionOnRun, - InternalErrorOnRun, GenericExceptionOnRun, - NodeConnectionReleaseError, + InternalErrorOnRun, + LogDbtProfileError, + LogDbtProjectError, LogDebugStackTrace, - SkippingDetails, LogSkipBecauseError, NodeCompiling, + NodeConnectionReleaseError, NodeExecuting, -) -from dbt.exceptions import ( - NotImplementedError, - CompilationError, - DbtRuntimeError, - DbtInternalError, + SkippingDetails, ) from dbt.flags import get_flags from dbt.graph import Graph -from dbt.logger import log_manager -from .printer import print_run_result_error - +from dbt.task.printer import print_run_result_error +from dbt_common.events.contextvars import get_node_info +from dbt_common.events.functions import fire_event +from dbt_common.exceptions import DbtInternalError, DbtRuntimeError, NotImplementedError -class NoneConfig: - @classmethod - def from_args(cls, args): - return None - -def read_profiles(profiles_dir=None): +def read_profiles(profiles_dir: Optional[str] = None) -> Dict[str, Any]: """This is only used for some error handling""" if profiles_dir is None: profiles_dir = get_flags().PROFILES_DIR @@ -69,82 +63,50 @@ def read_profiles(profiles_dir=None): class BaseTask(metaclass=ABCMeta): - ConfigType: Union[Type[NoneConfig], Type[Project]] = NoneConfig - - def __init__(self, args, config, project=None): + def __init__(self, args: Flags) -> None: self.args = args - self.config = config - self.project = config if isinstance(config, Project) else project - @classmethod - def pre_init_hook(cls, args): - """A hook called before the task is initialized.""" - if args.log_format == "json": - log_manager.format_json() - else: - log_manager.format_text() + def __enter__(self): + self.orig_dir = os.getcwd() + return self - @classmethod - def set_log_format(cls): - if get_flags().LOG_FORMAT == "json": - log_manager.format_json() - else: - log_manager.format_text() - - @classmethod - def from_args(cls, args, *pargs, **kwargs): - try: - # This is usually RuntimeConfig - config = cls.ConfigType.from_args(args) - except dbt.exceptions.DbtProjectError as exc: - fire_event(LogDbtProjectError(exc=str(exc))) - - tracking.track_invalid_invocation(args=args, result_type=exc.result_type) - raise dbt.exceptions.DbtRuntimeError("Could not run dbt") from exc - except dbt.exceptions.DbtProfileError as exc: - all_profile_names = list(read_profiles(get_flags().PROFILES_DIR).keys()) - fire_event(LogDbtProfileError(exc=str(exc), profiles=all_profile_names)) - tracking.track_invalid_invocation(args=args, result_type=exc.result_type) - raise dbt.exceptions.DbtRuntimeError("Could not run dbt") from exc - return cls(args, config, *pargs, **kwargs) + def __exit__(self, exc_type, exc_value, traceback): + os.chdir(self.orig_dir) @abstractmethod def run(self): - raise dbt.exceptions.NotImplementedError("Not Implemented") + raise dbt_common.exceptions.base.NotImplementedError("Not Implemented") def interpret_results(self, results): return True -def get_nearest_project_dir(project_dir: Optional[str]) -> str: +def get_nearest_project_dir(project_dir: Optional[str]) -> Path: # If the user provides an explicit project directory, use that # but don't look at parent directories. if project_dir: - project_file = os.path.join(project_dir, "dbt_project.yml") - if os.path.exists(project_file): - return project_dir + cur_dir = Path(project_dir) + project_file = Path(project_dir) / DBT_PROJECT_FILE_NAME + if project_file.is_file(): + return cur_dir else: - raise dbt.exceptions.DbtRuntimeError( + raise dbt_common.exceptions.DbtRuntimeError( "fatal: Invalid --project-dir flag. Not a dbt project. " "Missing dbt_project.yml file" ) - root_path = os.path.abspath(os.sep) - cwd = os.getcwd() - - while cwd != root_path: - project_file = os.path.join(cwd, "dbt_project.yml") - if os.path.exists(project_file): - return cwd - cwd = os.path.dirname(cwd) - - raise dbt.exceptions.DbtRuntimeError( - "fatal: Not a dbt project (or any of the parent directories). " - "Missing dbt_project.yml file" - ) + cur_dir = Path.cwd() + project_file = cur_dir / DBT_PROJECT_FILE_NAME + if project_file.is_file(): + return cur_dir + else: + raise dbt_common.exceptions.DbtRuntimeError( + "fatal: Not a dbt project (or any of the parent directories). " + "Missing dbt_project.yml file" + ) -def move_to_nearest_project_dir(project_dir: Optional[str]) -> str: +def move_to_nearest_project_dir(project_dir: Optional[str]) -> Path: nearest_project_dir = get_nearest_project_dir(project_dir) os.chdir(nearest_project_dir) return nearest_project_dir @@ -154,32 +116,44 @@ def move_to_nearest_project_dir(project_dir: Optional[str]) -> str: # produce the same behavior. currently this class only contains manifest compilation, # holding a manifest, and moving direcories. class ConfiguredTask(BaseTask): - ConfigType = RuntimeConfig - - def __init__(self, args, config, manifest: Optional[Manifest] = None): - super().__init__(args, config) + def __init__( + self, args: Flags, config: RuntimeConfig, manifest: Optional[Manifest] = None + ) -> None: + super().__init__(args) + self.config = config self.graph: Optional[Graph] = None self.manifest = manifest + self.compiler = Compiler(self.config) - def compile_manifest(self): + def compile_manifest(self) -> None: if self.manifest is None: raise DbtInternalError("compile_manifest called before manifest was loaded") start_compile_manifest = time.perf_counter() - # we cannot get adapter in init since it will break rpc #5579 - adapter = get_adapter(self.config) - compiler = adapter.get_compiler() - self.graph = compiler.compile(self.manifest) + self.graph = self.compiler.compile(self.manifest) compile_time = time.perf_counter() - start_compile_manifest if dbt.tracking.active_user is not None: dbt.tracking.track_runnable_timing({"graph_compilation_elapsed": compile_time}) @classmethod - def from_args(cls, args, *pargs, **kwargs): + def from_args(cls, args: Flags, *pargs, **kwargs): move_to_nearest_project_dir(args.project_dir) - return super().from_args(args, *pargs, **kwargs) + try: + # This is usually RuntimeConfig + config = RuntimeConfig.from_args(args) + except dbt.exceptions.DbtProjectError as exc: + fire_event(LogDbtProjectError(exc=str(exc))) + + tracking.track_invalid_invocation(args=args, result_type=exc.result_type) + raise dbt_common.exceptions.DbtRuntimeError("Could not run dbt") from exc + except dbt.exceptions.DbtProfileError as exc: + all_profile_names = list(read_profiles(get_flags().PROFILES_DIR).keys()) + fire_event(LogDbtProfileError(exc=str(exc), profiles=all_profile_names)) + tracking.track_invalid_invocation(args=args, result_type=exc.result_type) + raise dbt_common.exceptions.DbtRuntimeError("Could not run dbt") from exc + return cls(args, config, *pargs, **kwargs) class ExecutionContext: @@ -187,14 +161,15 @@ class ExecutionContext: timing information and the newest (compiled vs executed) form of the node. """ - def __init__(self, node): - self.timing = [] + def __init__(self, node) -> None: + self.timing: List[TimingInfo] = [] self.node = node class BaseRunner(metaclass=ABCMeta): - def __init__(self, config, adapter, node, node_index, num_nodes): + def __init__(self, config, adapter, node, node_index: int, num_nodes: int) -> None: self.config = config + self.compiler = Compiler(config) self.adapter = adapter self.node = node self.node_index = node_index @@ -209,6 +184,9 @@ def __init__(self, config, adapter, node, node_index, num_nodes): def compile(self, manifest: Manifest) -> Any: pass + def _node_build_path(self) -> Optional[str]: + return self.node.build_path if hasattr(self.node, "build_path") else None + def get_result_status(self, result) -> Dict[str, str]: if result.status == NodeStatus.Error: return {"node_status": "error", "node_error": str(result.message)} @@ -296,9 +274,13 @@ def from_run_result(self, result, start_time, timing_info): failures=result.failures, ) - def compile_and_execute(self, manifest, ctx): + def compile_and_execute(self, manifest: Manifest, ctx: ExecutionContext): result = None - with self.adapter.connection_for(self.node) if get_flags().INTROSPECT else nullcontext(): + with ( + self.adapter.connection_named(self.node.unique_id, self.node) + if get_flags().INTROSPECT + else nullcontext() + ): ctx.node.update_event_status(node_status=RunningStatus.Compiling) fire_event( NodeCompiling( @@ -308,7 +290,7 @@ def compile_and_execute(self, manifest, ctx): with collect_timing_info("compile", ctx.timing.append): # if we fail here, we still have a compiled node to return # this has the benefit of showing a build path for the errant - # model + # model. This calls the 'compile' method in CompileTask ctx.node = self.compile(manifest) # for ephemeral nodes, we only want to compile, not run @@ -325,7 +307,7 @@ def compile_and_execute(self, manifest, ctx): return result - def _handle_catchable_exception(self, e, ctx): + def _handle_catchable_exception(self, e: DbtRuntimeError, ctx: ExecutionContext) -> str: if e.node is None: e.add_node(ctx.node) @@ -336,25 +318,29 @@ def _handle_catchable_exception(self, e, ctx): ) return str(e) - def _handle_internal_exception(self, e, ctx): - fire_event(InternalErrorOnRun(build_path=self.node.build_path, exc=str(e))) + def _handle_internal_exception(self, e: DbtInternalError, ctx: ExecutionContext) -> str: + fire_event( + InternalErrorOnRun( + build_path=self._node_build_path(), exc=str(e), node_info=get_node_info() + ) + ) return str(e) - def _handle_generic_exception(self, e, ctx): + def _handle_generic_exception(self, e: Exception, ctx: ExecutionContext) -> str: fire_event( GenericExceptionOnRun( - build_path=self.node.build_path, + build_path=self._node_build_path(), unique_id=self.node.unique_id, exc=str(e), + node_info=get_node_info(), ) ) fire_event(LogDebugStackTrace(exc_info=traceback.format_exc())) return str(e) - def handle_exception(self, e, ctx): - catchable_errors = (CompilationError, DbtRuntimeError) - if isinstance(e, catchable_errors): + def handle_exception(self, e: Exception, ctx: ExecutionContext) -> str: + if isinstance(e, DbtRuntimeError): error = self._handle_catchable_exception(e, ctx) elif isinstance(e, DbtInternalError): error = self._handle_internal_exception(e, ctx) @@ -362,7 +348,7 @@ def handle_exception(self, e, ctx): error = self._handle_generic_exception(e, ctx) return error - def safe_run(self, manifest): + def safe_run(self, manifest: Manifest): started = time.time() ctx = ExecutionContext(self.node) error = None @@ -409,31 +395,32 @@ def _safe_release_connection(self): return None - def before_execute(self): - raise NotImplementedError() + def before_execute(self) -> None: + raise NotImplementedError("before_execute is not implemented") def execute(self, compiled_node, manifest): - raise NotImplementedError() + raise NotImplementedError("execute is not implemented") def run(self, compiled_node, manifest): return self.execute(compiled_node, manifest) - def after_execute(self, result): - raise NotImplementedError() + def after_execute(self, result) -> None: + raise NotImplementedError("after_execute is not implemented") - def _skip_caused_by_ephemeral_failure(self): + def _skip_caused_by_ephemeral_failure(self) -> bool: if self.skip_cause is None or self.skip_cause.node is None: return False return self.skip_cause.node.is_ephemeral_model def on_skip(self): - schema_name = self.node.schema + schema_name = getattr(self.node, "schema", "") node_name = self.node.name error_message = None if not self.node.is_ephemeral_model: # if this model was skipped due to an upstream ephemeral model # failure, print a special 'error skip' message. + # Include skip_cause NodeStatus if self._skip_caused_by_ephemeral_failure(): fire_event( LogSkipBecauseError( @@ -441,8 +428,10 @@ def on_skip(self): relation=node_name, index=self.node_index, total=self.num_nodes, + status=self.skip_cause.status, ) ) + # skip_cause here should be the run_result from the ephemeral model print_run_result_error(result=self.skip_cause, newline=False) if self.skip_cause is None: # mypy appeasement raise DbtInternalError( @@ -473,6 +462,33 @@ def on_skip(self): node_result = RunResult.from_node(self.node, RunStatus.Skipped, error_message) return node_result - def do_skip(self, cause=None): + def do_skip(self, cause=None) -> None: self.skip = True self.skip_cause = cause + + +def resource_types_from_args( + args: Flags, all_resource_values: Set[NodeType], default_resource_values: Set[NodeType] +) -> Set[NodeType]: + + if not args.resource_types: + resource_types = default_resource_values + else: + # This is a list of strings, not NodeTypes + arg_resource_types = set(args.resource_types) + + if "all" in arg_resource_types: + arg_resource_types.remove("all") + arg_resource_types.update(all_resource_values) + if "default" in arg_resource_types: + arg_resource_types.remove("default") + arg_resource_types.update(default_resource_values) + # Convert to a set of NodeTypes now that the non-NodeType strings are gone + resource_types = set([NodeType(rt) for rt in list(arg_resource_types)]) + + if args.exclude_resource_types: + # Convert from a list of strings to a set of NodeTypes + exclude_resource_types = set([NodeType(rt) for rt in args.exclude_resource_types]) + resource_types = resource_types - exclude_resource_types + + return resource_types diff --git a/core/dbt/task/build.py b/core/dbt/task/build.py index 8a5dc39c9b7..625a4498d6a 100644 --- a/core/dbt/task/build.py +++ b/core/dbt/task/build.py @@ -1,14 +1,60 @@ -from .run import RunTask, ModelRunner as run_model_runner -from .snapshot import SnapshotRunner as snapshot_model_runner -from .seed import SeedRunner as seed_runner -from .test import TestRunner as test_runner +import threading +from typing import Dict, List, Optional, Set, Type -from dbt.adapters.factory import get_adapter -from dbt.contracts.results import NodeStatus +from dbt.artifacts.schemas.results import NodeStatus, RunStatus +from dbt.artifacts.schemas.run import RunResult +from dbt.cli.flags import Flags +from dbt.config.runtime import RuntimeConfig +from dbt.contracts.graph.manifest import Manifest +from dbt.events.types import LogNodeNoOpResult from dbt.exceptions import DbtInternalError -from dbt.graph import ResourceTypeSelector +from dbt.graph import Graph, GraphQueue, ResourceTypeSelector from dbt.node_types import NodeType -from dbt.task.test import TestSelector +from dbt.task.base import BaseRunner, resource_types_from_args +from dbt_common.events.functions import fire_event + +from .run import ModelRunner as run_model_runner +from .run import RunTask +from .seed import SeedRunner as seed_runner +from .snapshot import SnapshotRunner as snapshot_model_runner +from .test import TestRunner as test_runner + + +class SavedQueryRunner(BaseRunner): + # Stub. No-op Runner for Saved Queries, which require MetricFlow for execution. + @property + def description(self) -> str: + return f"saved query {self.node.name}" + + def before_execute(self) -> None: + pass + + def compile(self, manifest: Manifest): + return self.node + + def after_execute(self, result) -> None: + fire_event( + LogNodeNoOpResult( + description=self.description, + index=self.node_index, + total=self.num_nodes, + node_info=self.node.node_info, + ) + ) + + def execute(self, compiled_node, manifest): + # no-op + return RunResult( + node=compiled_node, + status=RunStatus.Success, + timing=[], + thread_id=threading.current_thread().name, + execution_time=0, + message="NO-OP", + adapter_response={}, + failures=0, + agate_table=None, + ) class BuildTask(RunTask): @@ -19,41 +65,137 @@ class BuildTask(RunTask): I.E. a resource of type Model is handled by the ModelRunner which is imported as run_model_runner.""" - MARK_DEPENDENT_ERRORS_STATUSES = [NodeStatus.Error, NodeStatus.Fail] + MARK_DEPENDENT_ERRORS_STATUSES = [NodeStatus.Error, NodeStatus.Fail, NodeStatus.Skipped] RUNNER_MAP = { NodeType.Model: run_model_runner, NodeType.Snapshot: snapshot_model_runner, NodeType.Seed: seed_runner, NodeType.Test: test_runner, + NodeType.Unit: test_runner, + NodeType.SavedQuery: SavedQueryRunner, } ALL_RESOURCE_VALUES = frozenset({x for x in RUNNER_MAP.keys()}) - @property - def resource_types(self): - if not self.args.resource_types: - return list(self.ALL_RESOURCE_VALUES) + def __init__(self, args: Flags, config: RuntimeConfig, manifest: Manifest) -> None: + super().__init__(args, config, manifest) + self.selected_unit_tests: Set = set() + self.model_to_unit_test_map: Dict[str, List] = {} + + def resource_types(self, no_unit_tests: bool = False) -> List[NodeType]: + resource_types = resource_types_from_args( + self.args, set(self.ALL_RESOURCE_VALUES), set(self.ALL_RESOURCE_VALUES) + ) + + # First we get selected_nodes including unit tests, then without, + # and do a set difference. + if no_unit_tests is True and NodeType.Unit in resource_types: + resource_types.remove(NodeType.Unit) + return list(resource_types) + + # overrides get_graph_queue in runnable.py + def get_graph_queue(self) -> GraphQueue: + # Following uses self.selection_arg and self.exclusion_arg + spec = self.get_selection_spec() + + # selector including unit tests + full_selector = self.get_node_selector(no_unit_tests=False) + # selected node unique_ids with unit_tests + full_selected_nodes = full_selector.get_selected(spec) - values = set(self.args.resource_types) + # This selector removes the unit_tests from the selector + selector_wo_unit_tests = self.get_node_selector(no_unit_tests=True) + # selected node unique_ids without unit_tests + selected_nodes_wo_unit_tests = selector_wo_unit_tests.get_selected(spec) - if "all" in values: - values.remove("all") - values.update(self.ALL_RESOURCE_VALUES) + # Get the difference in the sets of nodes with and without unit tests and + # save it + selected_unit_tests = full_selected_nodes - selected_nodes_wo_unit_tests + self.selected_unit_tests = selected_unit_tests + self.build_model_to_unit_test_map(selected_unit_tests) - return list(values) + # get_graph_queue in the selector will remove NodeTypes not specified + # in the node_selector (filter_selection). + return selector_wo_unit_tests.get_graph_queue(spec) - def get_node_selector(self) -> ResourceTypeSelector: + # overrides handle_job_queue in runnable.py + def handle_job_queue(self, pool, callback): + if self.run_count == 0: + self.num_nodes = self.num_nodes + len(self.selected_unit_tests) + node = self.job_queue.get() + if ( + node.resource_type == NodeType.Model + and self.model_to_unit_test_map + and node.unique_id in self.model_to_unit_test_map + ): + self.handle_model_with_unit_tests_node(node, pool, callback) + + else: + self.handle_job_queue_node(node, pool, callback) + + def handle_model_with_unit_tests_node(self, node, pool, callback): + self._raise_set_error() + args = [node] + if self.config.args.single_threaded: + callback(self.call_model_and_unit_tests_runner(*args)) + else: + pool.apply_async(self.call_model_and_unit_tests_runner, args=args, callback=callback) + + def call_model_and_unit_tests_runner(self, node) -> RunResult: + assert self.manifest + for unit_test_unique_id in self.model_to_unit_test_map[node.unique_id]: + unit_test_node = self.manifest.unit_tests[unit_test_unique_id] + unit_test_runner = self.get_runner(unit_test_node) + # If the model is marked skip, also skip the unit tests + if node.unique_id in self._skipped_children: + # cause is only for ephemeral nodes + unit_test_runner.do_skip(cause=None) + result = self.call_runner(unit_test_runner) + self._handle_result(result) + if result.status in self.MARK_DEPENDENT_ERRORS_STATUSES: + # The _skipped_children dictionary can contain a run_result for ephemeral nodes, + # but that should never be the case here. + self._skipped_children[node.unique_id] = None + runner = self.get_runner(node) + if runner.node.unique_id in self._skipped_children: + cause = self._skipped_children.pop(runner.node.unique_id) + runner.do_skip(cause=cause) + return self.call_runner(runner) + + # handle non-model-plus-unit-tests nodes + def handle_job_queue_node(self, node, pool, callback): + self._raise_set_error() + runner = self.get_runner(node) + # we finally know what we're running! Make sure we haven't decided + # to skip it due to upstream failures + if runner.node.unique_id in self._skipped_children: + cause = self._skipped_children.pop(runner.node.unique_id) + runner.do_skip(cause=cause) + args = [runner] + if self.config.args.single_threaded: + callback(self.call_runner(*args)) + else: + pool.apply_async(self.call_runner, args=args, callback=callback) + + # Make a map of model unique_ids to selected unit test unique_ids, + # for processing before the model. + def build_model_to_unit_test_map(self, selected_unit_tests): + dct = {} + for unit_test_unique_id in selected_unit_tests: + unit_test = self.manifest.unit_tests[unit_test_unique_id] + model_unique_id = unit_test.depends_on.nodes[0] + if model_unique_id not in dct: + dct[model_unique_id] = [] + dct[model_unique_id].append(unit_test.unique_id) + self.model_to_unit_test_map = dct + + # We return two different kinds of selectors, one with unit tests and one without + def get_node_selector(self, no_unit_tests=False) -> ResourceTypeSelector: if self.manifest is None or self.graph is None: raise DbtInternalError("manifest and graph must be set to get node selection") - resource_types = self.resource_types + resource_types = self.resource_types(no_unit_tests) - if resource_types == [NodeType.Test]: - return TestSelector( - graph=self.graph, - manifest=self.manifest, - previous_state=self.previous_state, - ) return ResourceTypeSelector( graph=self.graph, manifest=self.manifest, @@ -61,12 +203,11 @@ def get_node_selector(self) -> ResourceTypeSelector: resource_types=resource_types, ) - def get_runner_type(self, node): + def get_runner_type(self, node) -> Optional[Type[BaseRunner]]: return self.RUNNER_MAP.get(node.resource_type) - def compile_manifest(self): + # Special build compile_manifest method to pass add_test_edges to the compiler + def compile_manifest(self) -> None: if self.manifest is None: raise DbtInternalError("compile_manifest called before manifest was loaded") - adapter = get_adapter(self.config) - compiler = adapter.get_compiler() - self.graph = compiler.compile(self.manifest, add_test_edges=True) + self.graph: Graph = self.compiler.compile(self.manifest, add_test_edges=True) diff --git a/core/dbt/task/clean.py b/core/dbt/task/clean.py index 6f31fc81ecd..cb328ebc99e 100644 --- a/core/dbt/task/clean.py +++ b/core/dbt/task/clean.py @@ -1,54 +1,56 @@ -import os.path -import os -import shutil -from typing import List +from pathlib import Path +from shutil import rmtree from dbt import deprecations -from dbt.events.functions import fire_event -from dbt.events.types import ( - CheckCleanPath, - ConfirmCleanPath, - ProtectedCleanPath, - FinishedCleanPaths, -) -from dbt.task.base import ( - BaseTask, - move_to_nearest_project_dir, -) +from dbt.cli.flags import Flags +from dbt.config.project import Project +from dbt.events.types import CheckCleanPath, ConfirmCleanPath, FinishedCleanPaths +from dbt.task.base import BaseTask, move_to_nearest_project_dir +from dbt_common.events.functions import fire_event +from dbt_common.exceptions import DbtRuntimeError class CleanTask(BaseTask): - def run(self): + def __init__(self, args: Flags, config: Project): + super().__init__(args) + self.config = config + self.project = config + + def run(self) -> None: """ This function takes all the paths in the target file and cleans the project paths that are not protected. """ - move_to_nearest_project_dir(self.args.project_dir) + project_dir = move_to_nearest_project_dir(self.args.project_dir) + + potential_clean_paths = set(Path(p).resolve() for p in self.project.clean_targets) + source_paths = set( + Path(p).resolve() for p in (*self.project.all_source_paths, *self.project.test_paths) + ) + clean_paths = potential_clean_paths.difference(source_paths) + + if potential_clean_paths != clean_paths: + raise DbtRuntimeError( + f"dbt will not clean the following source paths: {[str(s) for s in source_paths.intersection(potential_clean_paths)]}" + ) + + paths_outside_project = set( + path for path in clean_paths if project_dir not in path.absolute().parents + ) + if paths_outside_project and self.args.clean_project_files_only: + raise DbtRuntimeError( + f"dbt will not clean the following directories outside the project: {[str(p) for p in paths_outside_project]}" + ) + if ( "dbt_modules" in self.project.clean_targets and self.config.packages_install_path not in self.config.clean_targets ): deprecations.warn("install-packages-path") - for path in self.project.clean_targets: - fire_event(CheckCleanPath(path=path)) - if not is_protected_path(path, self.project.model_paths, self.project.test_paths): - shutil.rmtree(path, True) - fire_event(ConfirmCleanPath(path=path)) - else: - fire_event(ProtectedCleanPath(path=path)) - - fire_event(FinishedCleanPaths()) + for path in clean_paths: + fire_event(CheckCleanPath(path=str(path))) + rmtree(path, True) + fire_event(ConfirmCleanPath(path=str(path))) -def is_protected_path(path: str, model_paths: List[str], test_paths: List[str]) -> bool: - """This function identifies protected paths.""" - abs_path = os.path.abspath(path) - protected_paths = model_paths + test_paths + ["."] - protected_abs_paths = [os.path.abspath(p) for p in protected_paths] - return abs_path in set(protected_abs_paths) or is_project_path(abs_path) - - -def is_project_path(path: str) -> bool: - """This function identifies project paths.""" - proj_path = os.path.abspath(".") - return not os.path.commonprefix([proj_path, os.path.abspath(path)]) == proj_path + fire_event(FinishedCleanPaths()) diff --git a/core/dbt/task/clone.py b/core/dbt/task/clone.py index 87fb1a78106..09a7942aa31 100644 --- a/core/dbt/task/clone.py +++ b/core/dbt/task/clone.py @@ -1,25 +1,26 @@ import threading -from typing import AbstractSet, Any, List, Iterable, Set +from typing import AbstractSet, Any, Collection, Iterable, List, Optional, Set, Type from dbt.adapters.base import BaseRelation +from dbt.artifacts.resources.types import NodeType +from dbt.artifacts.schemas.run import RunResult, RunStatus from dbt.clients.jinja import MacroGenerator from dbt.context.providers import generate_runtime_model_context -from dbt.contracts.results import RunStatus, RunResult -from dbt.dataclass_schema import dbtClassMixin -from dbt.exceptions import DbtInternalError, CompilationError +from dbt.contracts.graph.manifest import Manifest from dbt.graph import ResourceTypeSelector -from dbt.node_types import NodeType -from dbt.parser.manifest import write_manifest -from dbt.task.base import BaseRunner +from dbt.node_types import REFABLE_NODE_TYPES +from dbt.task.base import BaseRunner, resource_types_from_args from dbt.task.run import _validate_materialization_relations_dict -from dbt.task.runnable import GraphRunnableTask +from dbt.task.runnable import GraphRunnableMode, GraphRunnableTask +from dbt_common.dataclass_schema import dbtClassMixin +from dbt_common.exceptions import CompilationError, DbtInternalError class CloneRunner(BaseRunner): - def before_execute(self): + def before_execute(self) -> None: pass - def after_execute(self, result): + def after_execute(self, result) -> None: pass def _build_run_model_result(self, model, context): @@ -44,7 +45,7 @@ def _build_run_model_result(self, model, context): failures=None, ) - def compile(self, manifest): + def compile(self, manifest: Manifest): # no-op return self.node @@ -91,9 +92,17 @@ def execute(self, model, manifest): class CloneTask(GraphRunnableTask): - def raise_on_first_error(self): + def raise_on_first_error(self) -> bool: return False + def get_run_mode(self) -> GraphRunnableMode: + return GraphRunnableMode.Independent + + def _get_deferred_manifest(self) -> Optional[Manifest]: + # Unlike other commands, 'clone' always requires a state manifest + # Load previous state, regardless of whether --defer flag has been set + return self._get_previous_state() + def get_model_schemas(self, adapter, selected_uids: Iterable[str]) -> Set[BaseRelation]: if self.manifest is None: raise DbtInternalError("manifest was None in get_model_schemas") @@ -108,7 +117,7 @@ def get_model_schemas(self, adapter, selected_uids: Iterable[str]) -> Set[BaseRe # cache the 'other' schemas too! if node.defer_relation: # type: ignore - other_relation = adapter.Relation.create_from_node( + other_relation = adapter.Relation.create_from( self.config, node.defer_relation # type: ignore ) result.add(other_relation.without_identifier()) @@ -117,28 +126,22 @@ def get_model_schemas(self, adapter, selected_uids: Iterable[str]) -> Set[BaseRe def before_run(self, adapter, selected_uids: AbstractSet[str]): with adapter.connection_named("master"): - # unlike in other tasks, we want to add information from the --state manifest *before* caching! - self.defer_to_manifest(adapter, selected_uids) - # only create *our* schemas, but cache *other* schemas in addition + self.defer_to_manifest() + # only create target schemas, but also cache defer_relation schemas schemas_to_create = super().get_model_schemas(adapter, selected_uids) self.create_schemas(adapter, schemas_to_create) schemas_to_cache = self.get_model_schemas(adapter, selected_uids) self.populate_adapter_cache(adapter, schemas_to_cache) @property - def resource_types(self): - if not self.args.resource_types: - return NodeType.refable() - - values = set(self.args.resource_types) - - if "all" in values: - values.remove("all") - values.update(NodeType.refable()) - - values = [NodeType(val) for val in values if val in NodeType.refable()] + def resource_types(self) -> List[NodeType]: + resource_types: Collection[NodeType] = resource_types_from_args( + self.args, set(REFABLE_NODE_TYPES), set(REFABLE_NODE_TYPES) + ) - return list(values) + # filter out any non-refable node types + resource_types = [rt for rt in resource_types if rt in REFABLE_NODE_TYPES] + return list(resource_types) def get_node_selector(self) -> ResourceTypeSelector: resource_types = self.resource_types @@ -152,19 +155,5 @@ def get_node_selector(self) -> ResourceTypeSelector: resource_types=resource_types, ) - def get_runner_type(self, _): + def get_runner_type(self, _) -> Optional[Type[BaseRunner]]: return CloneRunner - - # Note that this is different behavior from --defer with other commands, which *merge* - # selected nodes from this manifest + unselected nodes from the other manifest - def defer_to_manifest(self, adapter, selected_uids: AbstractSet[str]): - deferred_manifest = self._get_deferred_manifest() - if deferred_manifest is None: - return - if self.manifest is None: - raise DbtInternalError( - "Expected to defer to manifest, but there is no runtime manifest to defer from!" - ) - self.manifest.add_from_artifact(other=deferred_manifest) - # TODO: is it wrong to write the manifest here? I think it's right... - write_manifest(self.manifest, self.config.target_path) diff --git a/core/dbt/task/compile.py b/core/dbt/task/compile.py index ba505e1a6ec..fcb4c69d4d4 100644 --- a/core/dbt/task/compile.py +++ b/core/dbt/task/compile.py @@ -1,30 +1,28 @@ import threading -from typing import AbstractSet, Optional - -from dbt.contracts.graph.manifest import WritableManifest -from dbt.contracts.results import RunStatus, RunResult -from dbt.events.base_types import EventLevel -from dbt.events.functions import fire_event -from dbt.events.types import CompiledNode, Note, ParseInlineNodeError -from dbt.exceptions import ( - CompilationError, - DbtInternalError, - Exception as DbtException, -) +from typing import Optional, Type +from dbt.artifacts.schemas.run import RunResult, RunStatus +from dbt.contracts.graph.manifest import Manifest +from dbt.events.types import CompiledNode, ParseInlineNodeError from dbt.graph import ResourceTypeSelector -from dbt.node_types import NodeType -from dbt.parser.manifest import write_manifest, process_node +from dbt.node_types import EXECUTABLE_NODE_TYPES, NodeType +from dbt.parser.manifest import process_node from dbt.parser.sql import SqlBlockParser from dbt.task.base import BaseRunner from dbt.task.runnable import GraphRunnableTask +from dbt_common.events.base_types import EventLevel +from dbt_common.events.functions import fire_event +from dbt_common.events.types import Note +from dbt_common.exceptions import CompilationError +from dbt_common.exceptions import DbtBaseException as DbtException +from dbt_common.exceptions import DbtInternalError class CompileRunner(BaseRunner): - def before_execute(self): + def before_execute(self) -> None: pass - def after_execute(self, result): + def after_execute(self, result) -> None: pass def execute(self, compiled_node, manifest): @@ -39,9 +37,8 @@ def execute(self, compiled_node, manifest): failures=None, ) - def compile(self, manifest): - compiler = self.adapter.get_compiler() - return compiler.compile_node(self.node, manifest, {}) + def compile(self, manifest: Manifest): + return self.compiler.compile_node(self.node, manifest, {}) class CompileTask(GraphRunnableTask): @@ -49,14 +46,14 @@ class CompileTask(GraphRunnableTask): # it should be removed before the task is complete _inline_node_id = None - def raise_on_first_error(self): + def raise_on_first_error(self) -> bool: return True def get_node_selector(self) -> ResourceTypeSelector: if getattr(self.args, "inline", None): resource_types = [NodeType.SqlOperation] else: - resource_types = NodeType.executable() + resource_types = EXECUTABLE_NODE_TYPES if self.manifest is None or self.graph is None: raise DbtInternalError("manifest and graph must be set to get perform node selection") @@ -67,10 +64,10 @@ def get_node_selector(self) -> ResourceTypeSelector: resource_types=resource_types, ) - def get_runner_type(self, _): + def get_runner_type(self, _) -> Optional[Type[BaseRunner]]: return CompileRunner - def task_end_messages(self, results): + def task_end_messages(self, results) -> None: is_inline = bool(getattr(self.args, "inline", None)) output_format = getattr(self.args, "output", "text") @@ -101,26 +98,6 @@ def task_end_messages(self, results): ) ) - def _get_deferred_manifest(self) -> Optional[WritableManifest]: - return super()._get_deferred_manifest() if self.args.defer else None - - def defer_to_manifest(self, adapter, selected_uids: AbstractSet[str]): - deferred_manifest = self._get_deferred_manifest() - if deferred_manifest is None: - return - if self.manifest is None: - raise DbtInternalError( - "Expected to defer to manifest, but there is no runtime manifest to defer from!" - ) - self.manifest.merge_from_artifact( - adapter=adapter, - other=deferred_manifest, - selected=selected_uids, - favor_state=bool(self.args.favor_state), - ) - # TODO: is it wrong to write the manifest here? I think it's right... - write_manifest(self.manifest, self.config.project_target_path) - def _runtime_initialize(self): if getattr(self.args, "inline", None): try: @@ -129,6 +106,12 @@ def _runtime_initialize(self): ) sql_node = block_parser.parse_remote(self.args.inline, "inline_query") process_node(self.config, self.manifest, sql_node) + # Special hack to remove disabled, if it's there. This would only happen + # if all models are disabled in dbt_project + if sql_node.config.enabled is False: + sql_node.config.enabled = True + self.manifest.disabled.pop(sql_node.unique_id) + self.manifest.nodes[sql_node.unique_id] = sql_node # keep track of the node added to the manifest self._inline_node_id = sql_node.unique_id except CompilationError as exc: @@ -146,14 +129,14 @@ def _runtime_initialize(self): raise DbtException("Error parsing inline query") super()._runtime_initialize() - def after_run(self, adapter, results): + def after_run(self, adapter, results) -> None: # remove inline node from manifest if self._inline_node_id: self.manifest.nodes.pop(self._inline_node_id) self._inline_node_id = None super().after_run(adapter, results) - def _handle_result(self, result): + def _handle_result(self, result) -> None: super()._handle_result(result) if ( diff --git a/core/dbt/task/debug.py b/core/dbt/task/debug.py index 769c7fd8cad..a16346c51cb 100644 --- a/core/dbt/task/debug.py +++ b/core/dbt/task/debug.py @@ -3,30 +3,28 @@ import os import platform import sys - from collections import namedtuple from enum import Flag -from typing import Optional, Dict, Any, List, Tuple +from pathlib import Path +from typing import Any, Dict, List, Optional, Tuple -from dbt.events.functions import fire_event -from dbt.events.types import ( - OpenCommand, - DebugCmdOut, - DebugCmdResult, -) -import dbt.clients.system import dbt.exceptions +import dbt_common.clients.system +import dbt_common.exceptions from dbt.adapters.factory import get_adapter, register_adapter -from dbt.config import PartialProject, Project, Profile -from dbt.config.renderer import DbtProjectYamlRenderer, ProfileRenderer -from dbt.contracts.results import RunStatus +from dbt.artifacts.schemas.results import RunStatus +from dbt.cli.flags import Flags from dbt.clients.yaml_helper import load_yaml_text +from dbt.config import PartialProject, Profile, Project +from dbt.config.renderer import DbtProjectYamlRenderer, ProfileRenderer +from dbt.events.types import DebugCmdOut, DebugCmdResult, OpenCommand from dbt.links import ProfileConfigDocs -from dbt.ui import green, red -from dbt.events.format import pluralize -from dbt.version import get_installed_version - +from dbt.mp_context import get_mp_context from dbt.task.base import BaseTask, get_nearest_project_dir +from dbt.version import get_installed_version +from dbt_common.events.format import pluralize +from dbt_common.events.functions import fire_event +from dbt_common.ui import green, red ONLY_PROFILE_MESSAGE = """ A `dbt_project.yml` file was not found in this directory. @@ -74,19 +72,19 @@ class DebugRunStatus(Flag): class DebugTask(BaseTask): - def __init__(self, args, config): - super().__init__(args, config) + def __init__(self, args: Flags) -> None: + super().__init__(args) self.profiles_dir = args.PROFILES_DIR self.profile_path = os.path.join(self.profiles_dir, "profiles.yml") try: self.project_dir = get_nearest_project_dir(self.args.project_dir) - except dbt.exceptions.Exception: + except dbt_common.exceptions.DbtBaseException: # we probably couldn't find a project directory. Set project dir # to whatever was given, or default to the current directory. if args.project_dir: self.project_dir = args.project_dir else: - self.project_dir = os.getcwd() + self.project_dir = Path.cwd() self.project_path = os.path.join(self.project_dir, "dbt_project.yml") self.cli_vars: Dict[str, Any] = args.vars @@ -94,20 +92,14 @@ def __init__(self, args, config): self.profile: Optional[Profile] = None self.raw_profile_data: Optional[Dict[str, Any]] = None self.profile_name: Optional[str] = None - self.project: Optional[Project] = None - - @property - def project_profile(self): - if self.project is None: - return None - return self.project.profile_name def run(self) -> bool: # WARN: this is a legacy workflow that is not compatible with other runtime flags if self.args.config_dir: fire_event( OpenCommand( - open_cmd=dbt.clients.system.open_dir_cmd(), profiles_dir=str(self.profiles_dir) + open_cmd=dbt_common.clients.system.open_dir_cmd(), + profiles_dir=str(self.profiles_dir), ) ) return DebugRunStatus.SUCCESS.value @@ -125,7 +117,7 @@ def run(self) -> bool: fire_event(DebugCmdOut(msg="Using dbt_project.yml file at {}".format(self.project_path))) if load_profile_status.run_status == RunStatus.Success: if self.profile is None: - raise dbt.exceptions.DbtInternalError( + raise dbt_common.exceptions.DbtInternalError( "Profile should not be None if loading profile completed" ) else: @@ -149,13 +141,14 @@ def run(self) -> bool: dependencies_statuses = self.test_dependencies() # Test connection - self.test_connection() + connection_status = self.test_connection() # Log messages from any fails all_statuses: List[SubtaskStatus] = [ load_profile_status, load_project_status, *dependencies_statuses, + connection_status, ] all_failing_statuses: List[SubtaskStatus] = list( filter(lambda status: status.run_status == RunStatus.Error, all_statuses) @@ -198,7 +191,9 @@ def _load_profile(self) -> SubtaskStatus: ), ) - raw_profile_data = load_yaml_text(dbt.clients.system.load_file_contents(self.profile_path)) + raw_profile_data = load_yaml_text( + dbt_common.clients.system.load_file_contents(self.profile_path) + ) if isinstance(raw_profile_data, dict): self.raw_profile_data = raw_profile_data @@ -216,7 +211,7 @@ def _load_profile(self) -> SubtaskStatus: # https://github.com/dbt-labs/dbt-core/issues/6259 getattr(self.args, "threads", None), ) - except dbt.exceptions.DbtConfigError as exc: + except dbt_common.exceptions.DbtConfigError as exc: profile_errors.append(str(exc)) else: if len(profile_names) == 1: @@ -261,7 +256,7 @@ def _choose_profile_names(self) -> Tuple[List[str], str]: try: return [Profile.pick_profile_name(args_profile, project_profile)], "" - except dbt.exceptions.DbtConfigError: + except dbt_common.exceptions.DbtConfigError: pass # try to guess @@ -339,11 +334,11 @@ def _load_project(self) -> SubtaskStatus: try: self.project = Project.from_project_root( - self.project_dir, + str(self.project_dir), renderer, verify_version=self.args.VERSION_CHECK, ) - except dbt.exceptions.DbtConfigError as exc: + except dbt_common.exceptions.DbtConfigError as exc: return SubtaskStatus( log_msg=red("ERROR invalid"), run_status=RunStatus.Error, @@ -391,8 +386,8 @@ def _target_found(self) -> str: def test_git(self) -> SubtaskStatus: try: - dbt.clients.system.run_cmd(os.getcwd(), ["git", "--help"]) - except dbt.exceptions.ExecutableError as exc: + dbt_common.clients.system.run_cmd(os.getcwd(), ["git", "--help"]) + except dbt_common.exceptions.ExecutableError as exc: return SubtaskStatus( log_msg=red("ERROR"), run_status=RunStatus.Error, @@ -440,7 +435,7 @@ def test_configuration(self, profile_status_msg, project_status_msg): @staticmethod def attempt_connection(profile) -> Optional[str]: """Return a string containing the error message, or None if there was no error.""" - register_adapter(profile) + register_adapter(profile, get_mp_context()) adapter = get_adapter(profile) try: with adapter.connection_named("debug"): @@ -486,7 +481,7 @@ def test_connection(self) -> SubtaskStatus: return status @classmethod - def validate_connection(cls, target_dict): + def validate_connection(cls, target_dict) -> None: """Validate a connection dictionary. On error, raises a DbtConfigError.""" target_name = "test" # make a fake profile that we can parse diff --git a/core/dbt/task/deps.py b/core/dbt/task/deps.py index 849c17b5779..4c3a7134b20 100644 --- a/core/dbt/task/deps.py +++ b/core/dbt/task/deps.py @@ -1,42 +1,101 @@ -from typing import Any, Optional +import json +from hashlib import sha1 from pathlib import Path -import dbt.utils +from typing import Any, Dict, List, Optional + +import yaml + import dbt.deprecations import dbt.exceptions - -from dbt.config.renderer import DbtProjectYamlRenderer +import dbt.utils +from dbt.config import Project +from dbt.config.project import load_yml_dict, package_config_from_data +from dbt.config.renderer import PackageRenderer +from dbt.constants import PACKAGE_LOCK_FILE_NAME, PACKAGE_LOCK_HASH_KEY +from dbt.contracts.project import PackageSpec from dbt.deps.base import downloads_directory -from dbt.deps.resolver import resolve_packages from dbt.deps.registry import RegistryPinnedPackage - -from dbt.events.functions import fire_event +from dbt.deps.resolver import resolve_lock_packages, resolve_packages from dbt.events.types import ( + DepsAddPackage, + DepsFoundDuplicatePackage, + DepsInstallInfo, + DepsListSubdirectory, + DepsLockUpdating, DepsNoPackagesFound, + DepsNotifyUpdatesAvailable, DepsStartPackageInstall, DepsUpdateAvailable, DepsUpToDate, - DepsInstallInfo, - DepsListSubdirectory, - DepsNotifyUpdatesAvailable, - Formatting, ) -from dbt.clients import system - from dbt.task.base import BaseTask, move_to_nearest_project_dir +from dbt_common.clients import system +from dbt_common.events.functions import fire_event +from dbt_common.events.types import Formatting -from dbt.config import Project + +class dbtPackageDumper(yaml.Dumper): + def increase_indent(self, flow=False, indentless=False): + return super(dbtPackageDumper, self).increase_indent(flow, False) + + +def _create_sha1_hash(packages: List[PackageSpec]) -> str: + """Create a SHA1 hash of the packages list, + this is used to determine if the packages for current execution matches + the previous lock. + + Args: + list[Packages]: list of packages specified that are already rendered + + Returns: + str: SHA1 hash of the packages list + """ + package_strs = [json.dumps(package.to_dict(), sort_keys=True) for package in packages] + package_strs = sorted(package_strs) + + return sha1("\n".join(package_strs).encode("utf-8")).hexdigest() + + +def _create_packages_yml_entry(package: str, version: Optional[str], source: str) -> dict: + """Create a formatted entry to add to `packages.yml` or `package-lock.yml` file + + Args: + package (str): Name of package to download + version (str): Version of package to download + source (str): Source of where to download package from + + Returns: + dict: Formatted dict to write to `packages.yml` or `package-lock.yml` file + """ + package_key = source + version_key = "version" + + if source == "hub": + package_key = "package" + + packages_yml_entry = {package_key: package} + + if source == "git": + version_key = "revision" + + if version: + if "," in version: + version = version.split(",") # type: ignore + + packages_yml_entry[version_key] = version + + return packages_yml_entry class DepsTask(BaseTask): - def __init__(self, args: Any, project: Project): + def __init__(self, args: Any, project: Project) -> None: + super().__init__(args=args) # N.B. This is a temporary fix for a bug when using relative paths via # --project-dir with deps. A larger overhaul of our path handling methods # is needed to fix this the "right" way. # See GH-7615 project.project_root = str(Path(project.project_root).resolve()) - - move_to_nearest_project_dir(project.project_root) - super().__init__(args=args, config=None, project=project) + self.project = project self.cli_vars = args.vars def track_package_install( @@ -59,40 +118,159 @@ def track_package_install( {"name": package_name, "source": source_type, "version": version}, ) - def run(self) -> None: - system.make_directory(self.project.packages_install_path) + def check_for_duplicate_packages(self, packages_yml): + """Loop through contents of `packages.yml` to ensure no duplicate package names + versions. + + This duplicate check will take into consideration exact match of a package name, as well as + a check to see if a package name exists within a name (i.e. a package name inside a git URL). + + Args: + packages_yml (dict): In-memory read of `packages.yml` contents + + Returns: + dict: Updated or untouched packages_yml contents + """ + for i, pkg_entry in enumerate(packages_yml["packages"]): + for val in pkg_entry.values(): + if self.args.add_package["name"] in val: + del packages_yml["packages"][i] + + fire_event(DepsFoundDuplicatePackage(removed_package=pkg_entry)) + + return packages_yml + + def add(self): + packages_yml_filepath = ( + f"{self.project.project_root}/{self.project.packages_specified_path}" + ) + if not system.path_exists(packages_yml_filepath): + with open(packages_yml_filepath, "w") as package_yml: + yaml.safe_dump({"packages": []}, package_yml) + fire_event(Formatting("Created packages.yml")) + + new_package_entry = _create_packages_yml_entry( + self.args.add_package["name"], self.args.add_package["version"], self.args.source + ) + + with open(packages_yml_filepath, "r") as user_yml_obj: + packages_yml = yaml.safe_load(user_yml_obj) + packages_yml = self.check_for_duplicate_packages(packages_yml) + packages_yml["packages"].append(new_package_entry) + + self.project.packages.packages = package_config_from_data(packages_yml).packages + + if packages_yml: + with open(packages_yml_filepath, "w") as pkg_obj: + pkg_obj.write( + yaml.dump(packages_yml, Dumper=dbtPackageDumper, default_flow_style=False) + ) + + fire_event( + DepsAddPackage( + package_name=self.args.add_package["name"], + version=self.args.add_package["version"], + packages_filepath=packages_yml_filepath, + ) + ) + + def lock(self) -> None: + lock_filepath = f"{self.project.project_root}/{PACKAGE_LOCK_FILE_NAME}" + packages = self.project.packages.packages + packages_installed: Dict[str, Any] = {"packages": []} + if not packages: fire_event(DepsNoPackagesFound()) return with downloads_directory(): - final_deps = resolve_packages(packages, self.project, self.cli_vars) + resolved_deps = resolve_packages(packages, self.project, self.cli_vars) + + # this loop is to create the package-lock.yml in the same format as original packages.yml + # package-lock.yml includes both the stated packages in packages.yml along with dependent packages + for package in resolved_deps: + packages_installed["packages"].append(package.to_dict()) + packages_installed[PACKAGE_LOCK_HASH_KEY] = _create_sha1_hash( + self.project.packages.packages + ) + + with open(lock_filepath, "w") as lock_obj: + yaml.dump(packages_installed, lock_obj, Dumper=dbtPackageDumper) + + fire_event(DepsLockUpdating(lock_filepath=lock_filepath)) + + def run(self) -> None: + move_to_nearest_project_dir(self.args.project_dir) + if self.args.add_package: + self.add() + + # Check lock file exist and generated by the same pacakges.yml + # or dependencies.yml. + lock_file_path = f"{self.project.project_root}/{PACKAGE_LOCK_FILE_NAME}" + if not system.path_exists(lock_file_path): + self.lock() + elif self.args.upgrade: + self.lock() + else: + # Check dependency definition is modified or not. + current_hash = _create_sha1_hash(self.project.packages.packages) + previous_hash = load_yml_dict(lock_file_path).get(PACKAGE_LOCK_HASH_KEY, None) + if previous_hash != current_hash: + self.lock() + + # Early return when 'dbt deps --lock' + # Just resolve packages and write lock file, don't actually install packages + if self.args.lock: + return + + if system.path_exists(self.project.packages_install_path): + system.rmtree(self.project.packages_install_path) - renderer = DbtProjectYamlRenderer(None, self.cli_vars) + system.make_directory(self.project.packages_install_path) + + packages_lock_dict = load_yml_dict(f"{self.project.project_root}/{PACKAGE_LOCK_FILE_NAME}") + + renderer = PackageRenderer(self.cli_vars) + packages_lock_config = package_config_from_data( + renderer.render_data(packages_lock_dict), packages_lock_dict + ).packages + + if not packages_lock_config: + fire_event(DepsNoPackagesFound()) + return + + with downloads_directory(): + lock_defined_deps = resolve_lock_packages(packages_lock_config) + renderer = PackageRenderer(self.cli_vars) packages_to_upgrade = [] - for package in final_deps: + + for package in lock_defined_deps: package_name = package.name source_type = package.source_type() version = package.get_version() fire_event(DepsStartPackageInstall(package_name=package_name)) package.install(self.project, renderer) + fire_event(DepsInstallInfo(version_name=package.nice_version_name())) + if isinstance(package, RegistryPinnedPackage): version_latest = package.get_version_latest() + if version_latest != version: packages_to_upgrade.append(package_name) fire_event(DepsUpdateAvailable(version_latest=version_latest)) else: fire_event(DepsUpToDate()) + if package.get_subdirectory(): fire_event(DepsListSubdirectory(subdirectory=package.get_subdirectory())) self.track_package_install( package_name=package_name, source_type=source_type, version=version ) + if packages_to_upgrade: fire_event(Formatting("")) fire_event(DepsNotifyUpdatesAvailable(packages=packages_to_upgrade)) diff --git a/core/dbt/task/docs/__init__.py b/core/dbt/task/docs/__init__.py new file mode 100644 index 00000000000..3cd9e13e846 --- /dev/null +++ b/core/dbt/task/docs/__init__.py @@ -0,0 +1,3 @@ +import os + +DOCS_INDEX_FILE_PATH = os.path.normpath(os.path.join(os.path.dirname(__file__), "index.html")) diff --git a/core/dbt/task/generate.py b/core/dbt/task/docs/generate.py similarity index 58% rename from core/dbt/task/generate.py rename to core/dbt/task/docs/generate.py index 5e21213e8fb..7f238cf4e44 100644 --- a/core/dbt/task/generate.py +++ b/core/dbt/task/docs/generate.py @@ -1,42 +1,49 @@ import os import shutil +from dataclasses import replace from datetime import datetime -from typing import Dict, List, Any, Optional, Tuple, Set -import agate - -from dbt.dataclass_schema import ValidationError +from itertools import chain +from typing import Any, Dict, Iterable, List, Optional, Set, Tuple -from .compile import CompileTask +import agate +import dbt.compilation +import dbt.exceptions +import dbt.utils +import dbt_common.utils.formatting +from dbt.adapters.events.types import ( + BuildingCatalog, + CannotGenerateDocs, + CatalogWritten, + WriteCatalogFailure, +) from dbt.adapters.factory import get_adapter -from dbt.contracts.graph.nodes import ResultNode -from dbt.contracts.graph.manifest import Manifest -from dbt.contracts.results import ( - NodeStatus, - TableMetadata, - CatalogTable, +from dbt.artifacts.schemas.catalog import ( + CatalogArtifact, + CatalogKey, CatalogResults, + CatalogTable, + ColumnMetadata, PrimitiveDict, - CatalogKey, - StatsItem, StatsDict, - ColumnMetadata, - CatalogArtifact, -) -from dbt.exceptions import DbtInternalError, AmbiguousCatalogMatchError -from dbt.include.global_project import DOCS_INDEX_FILE_PATH -from dbt.events.functions import fire_event -from dbt.events.types import ( - WriteCatalogFailure, - CatalogWritten, - CannotGenerateDocs, - BuildingCatalog, + StatsItem, + TableMetadata, ) +from dbt.artifacts.schemas.results import NodeStatus +from dbt.constants import MANIFEST_FILE_NAME +from dbt.contracts.graph.manifest import Manifest +from dbt.contracts.graph.nodes import ResultNode +from dbt.exceptions import AmbiguousCatalogMatchError +from dbt.graph import ResourceTypeSelector +from dbt.graph.graph import UniqueId +from dbt.node_types import EXECUTABLE_NODE_TYPES, NodeType from dbt.parser.manifest import write_manifest -import dbt.utils -import dbt.compilation -import dbt.exceptions - +from dbt.task.compile import CompileTask +from dbt.task.docs import DOCS_INDEX_FILE_PATH +from dbt_common.clients.system import load_file_contents +from dbt_common.dataclass_schema import ValidationError +from dbt_common.events.functions import fire_event +from dbt_common.exceptions import DbtInternalError CATALOG_FILENAME = "catalog.json" @@ -63,7 +70,7 @@ def build_catalog_table(data) -> CatalogTable: # keys are database name, schema name, table name class Catalog(Dict[CatalogKey, CatalogTable]): - def __init__(self, columns: List[PrimitiveDict]): + def __init__(self, columns: List[PrimitiveDict]) -> None: super().__init__() for col in columns: self.add_column(col) @@ -82,7 +89,7 @@ def get_table(self, data: PrimitiveDict) -> CatalogTable: str(data["table_name"]), ) except KeyError as exc: - raise dbt.exceptions.CompilationError( + raise dbt_common.exceptions.CompilationError( "Catalog information missing required key {} (got {})".format(exc, data) ) table: CatalogTable @@ -104,8 +111,13 @@ def add_column(self, data: PrimitiveDict): table.columns[column.name] = column def make_unique_id_map( - self, manifest: Manifest + self, manifest: Manifest, selected_node_ids: Optional[Set[UniqueId]] = None ) -> Tuple[Dict[str, CatalogTable], Dict[str, CatalogTable]]: + """ + Create mappings between CatalogKeys and CatalogTables for nodes and sources, filtered by selected_node_ids. + + By default, selected_node_ids is None and all nodes and sources defined in the manifest are included in the mappings. + """ nodes: Dict[str, CatalogTable] = {} sources: Dict[str, CatalogTable] = {} @@ -115,7 +127,8 @@ def make_unique_id_map( key = table.key() if key in node_map: unique_id = node_map[key] - nodes[unique_id] = table.replace(unique_id=unique_id) + if selected_node_ids is None or unique_id in selected_node_ids: + nodes[unique_id] = replace(table, unique_id=unique_id) unique_ids = source_map.get(table.key(), set()) for unique_id in unique_ids: @@ -125,8 +138,8 @@ def make_unique_id_map( sources[unique_id].to_dict(omit_none=True), table.to_dict(omit_none=True), ) - else: - sources[unique_id] = table.replace(unique_id=unique_id) + elif selected_node_ids is None or unique_id in selected_node_ids: + sources[unique_id] = replace(table, unique_id=unique_id) return nodes, sources @@ -176,7 +189,7 @@ def format_stats(stats: PrimitiveDict) -> StatsDict: def mapping_key(node: ResultNode) -> CatalogKey: - dkey = dbt.utils.lowercase(node.database) + dkey = dbt_common.utils.formatting.lowercase(node.database) return CatalogKey(dkey, node.schema.lower(), node.identifier.lower()) @@ -230,14 +243,49 @@ def run(self) -> CatalogArtifact: if self.manifest is None: raise DbtInternalError("self.manifest was None in run!") + selected_node_ids: Optional[Set[UniqueId]] = None if self.args.empty_catalog: catalog_table: agate.Table = agate.Table([]) exceptions: List[Exception] = [] + selected_node_ids = set() else: adapter = get_adapter(self.config) with adapter.connection_named("generate_catalog"): fire_event(BuildingCatalog()) - catalog_table, exceptions = adapter.get_catalog(self.manifest) + # Get a list of relations we need from the catalog + relations = None + if self.job_queue is not None: + selected_node_ids = self.job_queue.get_selected_nodes() + selected_nodes = self._get_nodes_from_ids(self.manifest, selected_node_ids) + + # Source selection is handled separately from main job_queue selection because + # SourceDefinition nodes cannot be safely compiled / run by the CompileRunner / CompileTask, + # but should still be included in the catalog based on the selection spec + selected_source_ids = self._get_selected_source_ids() + selected_source_nodes = self._get_nodes_from_ids( + self.manifest, selected_source_ids + ) + selected_node_ids.update(selected_source_ids) + selected_nodes.extend(selected_source_nodes) + + relations = { + adapter.Relation.create_from(adapter.config, node) + for node in selected_nodes + } + + # This generates the catalog as an agate.Table + catalogable_nodes = chain( + [ + node + for node in self.manifest.nodes.values() + if (node.is_relational and not node.is_ephemeral_model) + ], + self.manifest.sources.values(), + ) + used_schemas = self.manifest.get_used_schemas() + catalog_table, exceptions = adapter.get_filtered_catalog( + catalogable_nodes, used_schemas, relations + ) catalog_data: List[PrimitiveDict] = [ dict(zip(catalog_table.column_names, map(dbt.utils._coerce_decimal, row))) @@ -250,7 +298,7 @@ def run(self) -> CatalogArtifact: if exceptions: errors = [str(e) for e in exceptions] - nodes, sources = catalog.make_unique_id_map(self.manifest) + nodes, sources = catalog.make_unique_id_map(self.manifest, selected_node_ids) results = self.get_catalog_results( nodes=nodes, sources=sources, @@ -259,16 +307,45 @@ def run(self) -> CatalogArtifact: errors=errors, ) - path = os.path.join(self.config.project_target_path, CATALOG_FILENAME) - results.write(path) + catalog_path = os.path.join(self.config.project_target_path, CATALOG_FILENAME) + results.write(catalog_path) if self.args.compile: write_manifest(self.manifest, self.config.project_target_path) + if self.args.static: + + # Read manifest.json and catalog.json + read_manifest_data = load_file_contents( + os.path.join(self.config.project_target_path, MANIFEST_FILE_NAME) + ) + read_catalog_data = load_file_contents(catalog_path) + + # Create new static index file contents + index_data = load_file_contents(DOCS_INDEX_FILE_PATH) + index_data = index_data.replace('"MANIFEST.JSON INLINE DATA"', read_manifest_data) + index_data = index_data.replace('"CATALOG.JSON INLINE DATA"', read_catalog_data) + + # Write out the new index file + static_index_path = os.path.join(self.config.project_target_path, "static_index.html") + with open(static_index_path, "wb") as static_index_file: + static_index_file.write(bytes(index_data, "utf8")) + if exceptions: fire_event(WriteCatalogFailure(num_exceptions=len(exceptions))) - fire_event(CatalogWritten(path=os.path.abspath(path))) + fire_event(CatalogWritten(path=os.path.abspath(catalog_path))) return results + def get_node_selector(self) -> ResourceTypeSelector: + if self.manifest is None or self.graph is None: + raise DbtInternalError("manifest and graph must be set to perform node selection") + return ResourceTypeSelector( + graph=self.graph, + manifest=self.manifest, + previous_state=self.previous_state, + resource_types=EXECUTABLE_NODE_TYPES, + include_empty_nodes=True, + ) + def get_catalog_results( self, nodes: Dict[str, CatalogTable], @@ -296,3 +373,29 @@ def interpret_results(self, results: Optional[CatalogResults]) -> bool: return True return super().interpret_results(compile_results) + + @staticmethod + def _get_nodes_from_ids(manifest: Manifest, node_ids: Iterable[str]) -> List[ResultNode]: + selected: List[ResultNode] = [] + for unique_id in node_ids: + if unique_id in manifest.nodes: + node = manifest.nodes[unique_id] + if node.is_relational and not node.is_ephemeral_model: + selected.append(node) + elif unique_id in manifest.sources: + source = manifest.sources[unique_id] + selected.append(source) + return selected + + def _get_selected_source_ids(self) -> Set[UniqueId]: + if self.manifest is None or self.graph is None: + raise DbtInternalError("manifest and graph must be set to perform node selection") + + source_selector = ResourceTypeSelector( + graph=self.graph, + manifest=self.manifest, + previous_state=self.previous_state, + resource_types=[NodeType.Source], + ) + + return source_selector.get_graph_queue(self.get_selection_spec()).get_selected_nodes() diff --git a/core/dbt/task/docs/index.html b/core/dbt/task/docs/index.html new file mode 100644 index 00000000000..9be6f0f2b7f --- /dev/null +++ b/core/dbt/task/docs/index.html @@ -0,0 +1,250 @@ + + + + + + + dbt Docs + + + + + + + + + + + + + + + +
+ icons + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + + + diff --git a/core/dbt/task/serve.py b/core/dbt/task/docs/serve.py similarity index 81% rename from core/dbt/task/serve.py rename to core/dbt/task/docs/serve.py index 060c4c93d17..ce362eaeb4e 100644 --- a/core/dbt/task/serve.py +++ b/core/dbt/task/docs/serve.py @@ -6,8 +6,8 @@ import click -from dbt.include.global_project import DOCS_INDEX_FILE_PATH from dbt.task.base import ConfiguredTask +from dbt.task.docs import DOCS_INDEX_FILE_PATH class ServeTask(ConfiguredTask): @@ -16,11 +16,12 @@ def run(self): shutil.copyfile(DOCS_INDEX_FILE_PATH, "index.html") port = self.args.port + host = self.args.host if self.args.browser: webbrowser.open_new_tab(f"http://localhost:{port}") - with socketserver.TCPServer(("", port), SimpleHTTPRequestHandler) as httpd: + with socketserver.TCPServer((host, port), SimpleHTTPRequestHandler) as httpd: click.echo(f"Serving docs at {port}") click.echo(f"To access from your browser, navigate to: http://localhost:{port}") click.echo("\n\n") diff --git a/core/dbt/task/freshness.py b/core/dbt/task/freshness.py index 32f09dd7470..eb1508acb36 100644 --- a/core/dbt/task/freshness.py +++ b/core/dbt/task/freshness.py @@ -1,40 +1,51 @@ import os import threading import time +from typing import AbstractSet, Dict, List, Optional, Type -from .base import BaseRunner -from .printer import ( - print_run_result_error, -) -from .runnable import GraphRunnableTask - -from dbt.contracts.results import ( +from dbt import deprecations +from dbt.adapters.base.impl import FreshnessResponse +from dbt.adapters.base.relation import BaseRelation +from dbt.adapters.capability import Capability +from dbt.adapters.contracts.connection import AdapterResponse +from dbt.artifacts.schemas.freshness import ( FreshnessResult, + FreshnessStatus, PartialSourceFreshnessResult, SourceFreshnessResult, - FreshnessStatus, -) -from dbt.exceptions import DbtRuntimeError, DbtInternalError -from dbt.events.functions import fire_event -from dbt.events.types import ( - FreshnessCheckComplete, - LogStartLine, - LogFreshnessResult, ) -from dbt.node_types import NodeType - +from dbt.contracts.graph.manifest import Manifest +from dbt.contracts.graph.nodes import HookNode, SourceDefinition +from dbt.contracts.results import RunStatus +from dbt.events.types import FreshnessCheckComplete, LogFreshnessResult, LogStartLine from dbt.graph import ResourceTypeSelector -from dbt.contracts.graph.nodes import SourceDefinition +from dbt.node_types import NodeType, RunHookType +from dbt_common.events.base_types import EventLevel +from dbt_common.events.functions import fire_event +from dbt_common.events.types import Note +from dbt_common.exceptions import DbtInternalError, DbtRuntimeError +from .base import BaseRunner +from .printer import print_run_result_error +from .run import RunTask RESULT_FILE_NAME = "sources.json" class FreshnessRunner(BaseRunner): + def __init__(self, config, adapter, node, node_index, num_nodes) -> None: + super().__init__(config, adapter, node, node_index, num_nodes) + self._metadata_freshness_cache: Dict[BaseRelation, FreshnessResult] = {} + + def set_metadata_freshness_cache( + self, metadata_freshness_cache: Dict[BaseRelation, FreshnessResult] + ) -> None: + self._metadata_freshness_cache = metadata_freshness_cache + def on_skip(self): raise DbtRuntimeError("Freshness: nodes cannot be skipped!") - def before_execute(self): + def before_execute(self) -> None: description = "freshness of {0.source_name}.{0.name}".format(self.node) fire_event( LogStartLine( @@ -45,7 +56,7 @@ def before_execute(self): ) ) - def after_execute(self, result): + def after_execute(self, result) -> None: if hasattr(result, "node"): source_name = result.node.source_name table_name = result.node.name @@ -95,26 +106,45 @@ def from_run_result(self, result, start_time, timing_info): return result def execute(self, compiled_node, manifest): - # we should only be here if we compiled_node.has_freshness, and - # therefore loaded_at_field should be a str. If this invariant is - # broken, raise! - if compiled_node.loaded_at_field is None: - raise DbtInternalError( - "Got to execute for source freshness of a source that has no loaded_at_field!" - ) - - relation = self.adapter.Relation.create_from_source(compiled_node) + relation = self.adapter.Relation.create_from(self.config, compiled_node) # given a Source, calculate its freshness. - with self.adapter.connection_for(compiled_node): + with self.adapter.connection_named(compiled_node.unique_id, compiled_node): self.adapter.clear_transaction() - adapter_response, freshness = self.adapter.calculate_freshness( - relation, - compiled_node.loaded_at_field, - compiled_node.freshness.filter, - manifest=manifest, - ) + adapter_response: Optional[AdapterResponse] = None + freshness: Optional[FreshnessResponse] = None + + if compiled_node.loaded_at_field is not None: + adapter_response, freshness = self.adapter.calculate_freshness( + relation, + compiled_node.loaded_at_field, + compiled_node.freshness.filter, + macro_resolver=manifest, + ) + + status = compiled_node.freshness.status(freshness["age"]) + elif self.adapter.supports(Capability.TableLastModifiedMetadata): + if compiled_node.freshness.filter is not None: + fire_event( + Note( + msg=f"A filter cannot be applied to a metadata freshness check on source '{compiled_node.name}'." + ), + EventLevel.WARN, + ) - status = compiled_node.freshness.status(freshness["age"]) + metadata_source = self.adapter.Relation.create_from(self.config, compiled_node) + if metadata_source in self._metadata_freshness_cache: + freshness = self._metadata_freshness_cache[metadata_source] + else: + adapter_response, freshness = self.adapter.calculate_freshness_from_metadata( + relation, + macro_resolver=manifest, + ) + + status = compiled_node.freshness.status(freshness["age"]) + else: + raise DbtRuntimeError( + f"Could not compute freshness for source {compiled_node.name}: no 'loaded_at_field' provided and {self.adapter.type()} adapter does not support metadata-based freshness checks." + ) # adapter_response was not returned in previous versions, so this will be None # we cannot call to_dict() on NoneType @@ -133,10 +163,10 @@ def execute(self, compiled_node, manifest): **freshness, ) - def compile(self, manifest): + def compile(self, manifest: Manifest): if self.node.resource_type != NodeType.Source: # should be unreachable... - raise DbtRuntimeError("fresnhess runner: got a non-Source") + raise DbtRuntimeError("freshness runner: got a non-Source") # we don't do anything interesting when we compile a source node return self.node @@ -150,18 +180,18 @@ def node_is_match(self, node): return node.has_freshness -class FreshnessTask(GraphRunnableTask): - def defer_to_manifest(self, adapter, selected_uids): - # freshness don't defer - return +class FreshnessTask(RunTask): + def __init__(self, args, config, manifest) -> None: + super().__init__(args, config, manifest) + self._metadata_freshness_cache: Dict[BaseRelation, FreshnessResult] = {} - def result_path(self): + def result_path(self) -> str: if self.args.output: return os.path.realpath(self.args.output) else: return os.path.join(self.config.project_target_path, RESULT_FILE_NAME) - def raise_on_first_error(self): + def raise_on_first_error(self) -> bool: return False def get_node_selector(self): @@ -174,7 +204,18 @@ def get_node_selector(self): resource_types=[NodeType.Source], ) - def get_runner_type(self, _): + def before_run(self, adapter, selected_uids: AbstractSet[str]) -> None: + super().before_run(adapter, selected_uids) + if adapter.supports(Capability.TableLastModifiedMetadataBatch): + self.populate_metadata_freshness_cache(adapter, selected_uids) + + def get_runner(self, node) -> BaseRunner: + freshness_runner = super().get_runner(node) + assert isinstance(freshness_runner, FreshnessRunner) + freshness_runner.set_metadata_freshness_cache(self._metadata_freshness_cache) + return freshness_runner + + def get_runner_type(self, _) -> Optional[Type[BaseRunner]]: return FreshnessRunner def get_result(self, results, elapsed_time, generated_at): @@ -182,9 +223,59 @@ def get_result(self, results, elapsed_time, generated_at): elapsed_time=elapsed_time, generated_at=generated_at, results=results ) - def task_end_messages(self, results): + def task_end_messages(self, results) -> None: for result in results: - if result.status in (FreshnessStatus.Error, FreshnessStatus.RuntimeErr): + if result.status in ( + FreshnessStatus.Error, + FreshnessStatus.RuntimeErr, + RunStatus.Error, + ): print_run_result_error(result) fire_event(FreshnessCheckComplete()) + + def get_hooks_by_type(self, hook_type: RunHookType) -> List[HookNode]: + hooks = super().get_hooks_by_type(hook_type) + if self.args.source_freshness_run_project_hooks: + return hooks + else: + if hooks: + deprecations.warn("source-freshness-project-hooks") + return [] + + def populate_metadata_freshness_cache(self, adapter, selected_uids: AbstractSet[str]) -> None: + if self.manifest is None: + raise DbtInternalError("Manifest must be set to populate metadata freshness cache") + + batch_metadata_sources: List[BaseRelation] = [] + for selected_source_uid in list(selected_uids): + source = self.manifest.sources.get(selected_source_uid) + if source and source.loaded_at_field is None: + metadata_source = adapter.Relation.create_from(self.config, source) + batch_metadata_sources.append(metadata_source) + + fire_event( + Note( + msg=f"Pulling freshness from warehouse metadata tables for {len(batch_metadata_sources)} sources" + ), + EventLevel.INFO, + ) + + try: + _, metadata_freshness_results = adapter.calculate_freshness_from_metadata_batch( + batch_metadata_sources + ) + self._metadata_freshness_cache.update(metadata_freshness_results) + except Exception as e: + # This error handling is intentionally very coarse. + # If anything goes wrong during batch metadata calculation, we can safely + # leave _metadata_freshness_cache unpopulated. + # Downstream, this will be gracefully handled as a cache miss and non-batch + # metadata-based freshness will still be performed on a source-by-source basis. + fire_event( + Note(msg=f"Metadata freshness could not be computed in batch: {e}"), + EventLevel.WARN, + ) + + def get_freshness_metadata_cache(self) -> Dict[BaseRelation, FreshnessResult]: + return self._metadata_freshness_cache diff --git a/core/dbt/task/init.py b/core/dbt/task/init.py index 4f7509bc708..61d8f30e039 100644 --- a/core/dbt/task/init.py +++ b/core/dbt/task/init.py @@ -1,40 +1,35 @@ import copy import os -from pathlib import Path import re import shutil +from pathlib import Path from typing import Optional -import yaml import click +import yaml import dbt.config -import dbt.clients.system -from dbt.flags import get_flags -from dbt.version import _get_adapter_plugin_names -from dbt.adapters.factory import load_plugin, get_include_paths - +import dbt_common.clients.system +from dbt.adapters.factory import get_include_paths, load_plugin +from dbt.config.profile import read_profile from dbt.contracts.util import Identifier as ProjectName - -from dbt.events.functions import fire_event from dbt.events.types import ( - StarterProjectPath, ConfigFolderDirectory, + InvalidProfileTemplateYAML, NoSampleProfileFound, + ProfileWrittenWithProjectTemplateYAML, ProfileWrittenWithSample, ProfileWrittenWithTargetTemplateYAML, - ProfileWrittenWithProjectTemplateYAML, - SettingUpProfile, - InvalidProfileTemplateYAML, - ProjectNameAlreadyExists, ProjectCreated, + ProjectNameAlreadyExists, + SettingUpProfile, + StarterProjectPath, ) - -from dbt.include.starter_project import PACKAGE_PATH as starter_project_directory - -from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME - +from dbt.flags import get_flags from dbt.task.base import BaseTask, move_to_nearest_project_dir +from dbt.version import _get_adapter_plugin_names +from dbt_common.events.functions import fire_event +from dbt_common.exceptions import DbtRuntimeError DOCS_URL = "https://docs.getdbt.com/docs/configure-your-profile" SLACK_URL = "https://community.getdbt.com/" @@ -55,7 +50,12 @@ class InitTask(BaseTask): - def copy_starter_repo(self, project_name): + def copy_starter_repo(self, project_name: str) -> None: + # Lazy import to avoid ModuleNotFoundError + from dbt.include.starter_project import ( + PACKAGE_PATH as starter_project_directory, + ) + fire_event(StarterProjectPath(dir=starter_project_directory)) shutil.copytree( starter_project_directory, project_name, ignore=shutil.ignore_patterns(*IGNORE_FILES) @@ -66,7 +66,7 @@ def create_profiles_dir(self, profiles_dir: str) -> bool: profiles_path = Path(profiles_dir) if not profiles_path.exists(): fire_event(ConfigFolderDirectory(dir=profiles_dir)) - dbt.clients.system.make_directory(profiles_dir) + dbt_common.clients.system.make_directory(profiles_dir) return True return False @@ -188,6 +188,15 @@ def create_profile_from_target(self, adapter: str, profile_name: str): # sample_profiles.yml self.create_profile_from_sample(adapter, profile_name) + def check_if_profile_exists(self, profile_name: str) -> bool: + """ + Validate that the specified profile exists. Can't use the regular profile validation + routine because it assumes the project file exists + """ + profiles_dir = get_flags().PROFILES_DIR + raw_profiles = read_profile(profiles_dir) + return profile_name in raw_profiles + def check_if_can_write_profile(self, profile_name: Optional[str] = None) -> bool: """Using either a provided profile name or that specified in dbt_project.yml, check if the profile already exists in profiles.yml, and if so ask the @@ -233,8 +242,31 @@ def ask_for_adapter_choice(self) -> str: numeric_choice = click.prompt(prompt_msg, type=click.INT) return available_adapters[numeric_choice - 1] + def setup_profile(self, profile_name: str) -> None: + """Set up a new profile for a project""" + fire_event(SettingUpProfile()) + if not self.check_if_can_write_profile(profile_name=profile_name): + return + # If a profile_template.yml exists in the project root, that effectively + # overrides the profile_template.yml for the given target. + profile_template_path = Path("profile_template.yml") + if profile_template_path.exists(): + try: + # This relies on a valid profile_template.yml from the user, + # so use a try: except to fall back to the default on failure + self.create_profile_using_project_profile_template(profile_name) + return + except Exception: + fire_event(InvalidProfileTemplateYAML()) + adapter = self.ask_for_adapter_choice() + self.create_profile_from_target(adapter, profile_name=profile_name) + def get_valid_project_name(self) -> str: """Returns a valid project name, either from CLI arg or user prompt.""" + + # Lazy import to avoid ModuleNotFoundError + from dbt.include.global_project import PROJECT_NAME as GLOBAL_PROJECT_NAME + name = self.args.project_name internal_package_names = {GLOBAL_PROJECT_NAME} available_adapters = list(_get_adapter_plugin_names()) @@ -247,11 +279,11 @@ def get_valid_project_name(self) -> str: return name - def create_new_project(self, project_name: str): + def create_new_project(self, project_name: str, profile_name: str): self.copy_starter_repo(project_name) os.chdir(project_name) with open("dbt_project.yml", "r") as f: - content = f"{f.read()}".format(project_name=project_name, profile_name=project_name) + content = f"{f.read()}".format(project_name=project_name, profile_name=profile_name) with open("dbt_project.yml", "w") as f: f.write(content) fire_event( @@ -270,13 +302,22 @@ def run(self): try: move_to_nearest_project_dir(self.args.project_dir) in_project = True - except dbt.exceptions.DbtRuntimeError: + except dbt_common.exceptions.DbtRuntimeError: in_project = False if in_project: + # If --profile was specified, it means use an existing profile, which is not + # applicable to this case + if self.args.profile: + raise DbtRuntimeError( + msg="Can not init existing project with specified profile, edit dbt_project.yml instead" + ) + # When dbt init is run inside an existing project, # just setup the user's profile. - profile_name = self.get_profile_name_from_current_project() + if not self.args.skip_profile_setup: + profile_name = self.get_profile_name_from_current_project() + self.setup_profile(profile_name) else: # When dbt init is run outside of an existing project, # create a new project and set up the user's profile. @@ -285,24 +326,21 @@ def run(self): if project_path.exists(): fire_event(ProjectNameAlreadyExists(name=project_name)) return - self.create_new_project(project_name) - profile_name = project_name - # Ask for adapter only if skip_profile_setup flag is not provided. - if not self.args.skip_profile_setup: - fire_event(SettingUpProfile()) - if not self.check_if_can_write_profile(profile_name=profile_name): - return - # If a profile_template.yml exists in the project root, that effectively - # overrides the profile_template.yml for the given target. - profile_template_path = Path("profile_template.yml") - if profile_template_path.exists(): - try: - # This relies on a valid profile_template.yml from the user, - # so use a try: except to fall back to the default on failure - self.create_profile_using_project_profile_template(profile_name) - return - except Exception: - fire_event(InvalidProfileTemplateYAML()) - adapter = self.ask_for_adapter_choice() - self.create_profile_from_target(adapter, profile_name=profile_name) + # If the user specified an existing profile to use, use it instead of generating a new one + user_profile_name = self.args.profile + if user_profile_name: + if not self.check_if_profile_exists(user_profile_name): + raise DbtRuntimeError( + msg="Could not find profile named '{}'".format(user_profile_name) + ) + self.create_new_project(project_name, user_profile_name) + else: + profile_name = project_name + # Create the profile after creating the project to avoid leaving a random profile + # if the former fails. + self.create_new_project(project_name, profile_name) + + # Ask for adapter only if skip_profile_setup flag is not provided + if not self.args.skip_profile_setup: + self.setup_profile(profile_name) diff --git a/core/dbt/task/list.py b/core/dbt/task/list.py index eb8c4784845..2638920976a 100644 --- a/core/dbt/task/list.py +++ b/core/dbt/task/list.py @@ -1,20 +1,26 @@ import json - -from dbt.contracts.graph.nodes import Exposure, SourceDefinition, Metric -from dbt.flags import get_flags +from typing import Iterator, List + +from dbt.cli.flags import Flags +from dbt.config.runtime import RuntimeConfig +from dbt.contracts.graph.manifest import Manifest +from dbt.contracts.graph.nodes import ( + Exposure, + Metric, + SavedQuery, + SemanticModel, + SourceDefinition, + UnitTestDefinition, +) +from dbt.events.types import NoNodesSelected from dbt.graph import ResourceTypeSelector -from dbt.task.runnable import GraphRunnableTask -from dbt.task.test import TestSelector from dbt.node_types import NodeType -from dbt.events.functions import ( - fire_event, - warn_or_error, -) -from dbt.events.types import ( - NoNodesSelected, - ListCmdOut, -) -from dbt.exceptions import DbtRuntimeError, DbtInternalError +from dbt.task.base import resource_types_from_args +from dbt.task.runnable import GraphRunnableTask +from dbt_common.events.contextvars import task_contextvars +from dbt_common.events.functions import fire_event, warn_or_error +from dbt_common.events.types import PrintEvent +from dbt_common.exceptions import DbtInternalError, DbtRuntimeError class ListTask(GraphRunnableTask): @@ -27,6 +33,9 @@ class ListTask(GraphRunnableTask): NodeType.Source, NodeType.Exposure, NodeType.Metric, + NodeType.SavedQuery, + NodeType.SemanticModel, + NodeType.Unit, ) ) ALL_RESOURCE_VALUES = DEFAULT_RESOURCE_VALUES | frozenset((NodeType.Analysis,)) @@ -45,7 +54,7 @@ class ListTask(GraphRunnableTask): ) ) - def __init__(self, args, config, manifest): + def __init__(self, args: Flags, config: RuntimeConfig, manifest: Manifest) -> None: super().__init__(args, config, manifest) if self.args.models: if self.args.select: @@ -58,25 +67,31 @@ def __init__(self, args, config, manifest): def _iterate_selected_nodes(self): selector = self.get_node_selector() spec = self.get_selection_spec() - nodes = sorted(selector.get_selected(spec)) - if not nodes: + unique_ids = sorted(selector.get_selected(spec)) + if not unique_ids: warn_or_error(NoNodesSelected()) return if self.manifest is None: raise DbtInternalError("manifest is None in _iterate_selected_nodes") - for node in nodes: - if node in self.manifest.nodes: - yield self.manifest.nodes[node] - elif node in self.manifest.sources: - yield self.manifest.sources[node] - elif node in self.manifest.exposures: - yield self.manifest.exposures[node] - elif node in self.manifest.metrics: - yield self.manifest.metrics[node] + for unique_id in unique_ids: + if unique_id in self.manifest.nodes: + yield self.manifest.nodes[unique_id] + elif unique_id in self.manifest.sources: + yield self.manifest.sources[unique_id] + elif unique_id in self.manifest.exposures: + yield self.manifest.exposures[unique_id] + elif unique_id in self.manifest.metrics: + yield self.manifest.metrics[unique_id] + elif unique_id in self.manifest.semantic_models: + yield self.manifest.semantic_models[unique_id] + elif unique_id in self.manifest.unit_tests: + yield self.manifest.unit_tests[unique_id] + elif unique_id in self.manifest.saved_queries: + yield self.manifest.saved_queries[unique_id] else: raise DbtRuntimeError( - f'Got an unexpected result from node selection: "{node}"' - f"Expected a source or a node!" + f'Got an unexpected result from node selection: "{unique_id}"' + f"Listing this node type is not yet supported!" ) def generate_selectors(self): @@ -96,6 +111,18 @@ def generate_selectors(self): # metrics are searched for by pkg.metric_name metric_selector = ".".join([node.package_name, node.name]) yield f"metric:{metric_selector}" + elif node.resource_type == NodeType.SavedQuery: + assert isinstance(node, SavedQuery) + saved_query_selector = ".".join([node.package_name, node.name]) + yield f"saved_query:{saved_query_selector}" + elif node.resource_type == NodeType.SemanticModel: + assert isinstance(node, SemanticModel) + semantic_model_selector = ".".join([node.package_name, node.name]) + yield f"semantic_model:{semantic_model_selector}" + elif node.resource_type == NodeType.Unit: + assert isinstance(node, UnitTestDefinition) + unit_test_selector = ".".join([node.package_name, node.versioned_name]) + yield f"unit_test:{unit_test_selector}" else: # everything else is from `fqn` yield ".".join(node.fqn) @@ -118,53 +145,47 @@ def generate_json(self): } ) - def generate_paths(self): + def generate_paths(self) -> Iterator[str]: for node in self._iterate_selected_nodes(): yield node.original_file_path def run(self): - self.compile_manifest() - output = self.args.output - if output == "selector": - generator = self.generate_selectors - elif output == "name": - generator = self.generate_names - elif output == "json": - generator = self.generate_json - elif output == "path": - generator = self.generate_paths - else: - raise DbtInternalError("Invalid output {}".format(output)) + # We set up a context manager here with "task_contextvars" because we + # we need the project_root in compile_manifest. + with task_contextvars(project_root=self.config.project_root): + self.compile_manifest() + output = self.args.output + if output == "selector": + generator = self.generate_selectors + elif output == "name": + generator = self.generate_names + elif output == "json": + generator = self.generate_json + elif output == "path": + generator = self.generate_paths + else: + raise DbtInternalError("Invalid output {}".format(output)) - return self.output_results(generator()) + return self.output_results(generator()) def output_results(self, results): """Log, or output a plain, newline-delimited, and ready-to-pipe list of nodes found.""" for result in results: self.node_results.append(result) - if get_flags().LOG_FORMAT == "json": - fire_event(ListCmdOut(msg=result)) - else: - # Cleaner to leave as print than to mutate the logger not to print timestamps. - print(result) + # No formatting, still get to stdout when --quiet is used + fire_event(PrintEvent(msg=result)) return self.node_results @property - def resource_types(self): + def resource_types(self) -> List[NodeType]: if self.args.models: return [NodeType.Model] - if not self.args.resource_types: - return list(self.DEFAULT_RESOURCE_VALUES) + resource_types = resource_types_from_args( + self.args, set(self.ALL_RESOURCE_VALUES), set(self.DEFAULT_RESOURCE_VALUES) + ) - values = set(self.args.resource_types) - if "default" in values: - values.remove("default") - values.update(self.DEFAULT_RESOURCE_VALUES) - if "all" in values: - values.remove("all") - values.update(self.ALL_RESOURCE_VALUES) - return list(values) + return list(resource_types) @property def selection_arg(self): @@ -175,27 +196,16 @@ def selection_arg(self): else: return self.args.select - def defer_to_manifest(self, adapter, selected_uids): - # list don't defer - return - - def get_node_selector(self): + def get_node_selector(self) -> ResourceTypeSelector: if self.manifest is None or self.graph is None: raise DbtInternalError("manifest and graph must be set to get perform node selection") - if self.resource_types == [NodeType.Test]: - return TestSelector( - graph=self.graph, - manifest=self.manifest, - previous_state=self.previous_state, - ) - else: - return ResourceTypeSelector( - graph=self.graph, - manifest=self.manifest, - previous_state=self.previous_state, - resource_types=self.resource_types, - include_empty_nodes=True, - ) + return ResourceTypeSelector( + graph=self.graph, + manifest=self.manifest, + previous_state=self.previous_state, + resource_types=self.resource_types, + include_empty_nodes=True, + ) def interpret_results(self, results): # list command should always return 0 as exit code diff --git a/core/dbt/task/printer.py b/core/dbt/task/printer.py index 9fae854bdb4..a5995d50b40 100644 --- a/core/dbt/task/printer.py +++ b/core/dbt/task/printer.py @@ -1,29 +1,23 @@ -from typing import Dict -from dbt.logger import ( - DbtStatusMessage, - TextOnly, -) -from dbt.events.functions import fire_event +from typing import Dict, Optional + +from dbt.artifacts.schemas.results import NodeStatus +from dbt.contracts.graph.nodes import Group from dbt.events.types import ( - Formatting, - RunResultWarning, - RunResultWarningMessage, - RunResultFailure, - StatsLine, + CheckNodeTestFailure, + EndOfRunSummary, RunResultError, RunResultErrorNoMessage, + RunResultFailure, + RunResultWarning, + RunResultWarningMessage, SQLCompiledPath, - CheckNodeTestFailure, - FirstRunResultError, - AfterFirstRunResultError, - EndOfRunSummary, + StatsLine, ) - -from dbt.tracking import InvocationProcessor -from dbt.events.format import pluralize - -from dbt.contracts.results import NodeStatus from dbt.node_types import NodeType +from dbt_common.events.base_types import EventLevel +from dbt_common.events.format import pluralize +from dbt_common.events.functions import fire_event +from dbt_common.events.types import Formatting def get_counts(flat_nodes) -> str: @@ -35,11 +29,11 @@ def get_counts(flat_nodes) -> str: if node.resource_type == NodeType.Model: t = "{} {}".format(node.get_materialization(), t) elif node.resource_type == NodeType.Operation: - t = "hook" + t = "project hook" counts[t] = counts.get(t, 0) + 1 - stat_line = ", ".join([pluralize(v, k) for k, v in counts.items()]) + stat_line = ", ".join([pluralize(v, k).replace("_", " ") for k, v in counts.items()]) return stat_line @@ -71,65 +65,77 @@ def print_run_status_line(results) -> None: stats[result_type] += 1 stats["total"] += 1 - with TextOnly(): - fire_event(Formatting("")) + fire_event(Formatting("")) fire_event(StatsLine(stats=stats)) -def print_run_result_error(result, newline: bool = True, is_warning: bool = False) -> None: - if newline: - with TextOnly(): - fire_event(Formatting("")) - +def print_run_result_error( + result, newline: bool = True, is_warning: bool = False, group: Optional[Group] = None +) -> None: + # set node_info for logging events + node_info = None + if hasattr(result, "node") and result.node: + node_info = result.node.node_info if result.status == NodeStatus.Fail or (is_warning and result.status == NodeStatus.Warn): + if newline: + fire_event(Formatting("")) if is_warning: + group_dict = group.to_logging_dict() if group else None fire_event( RunResultWarning( resource_type=result.node.resource_type, node_name=result.node.name, path=result.node.original_file_path, + node_info=node_info, + group=group_dict, ) ) else: + group_dict = group.to_logging_dict() if group else None fire_event( RunResultFailure( resource_type=result.node.resource_type, node_name=result.node.name, path=result.node.original_file_path, + node_info=node_info, + group=group_dict, ) ) if result.message: if is_warning: - fire_event(RunResultWarningMessage(msg=result.message)) + fire_event(RunResultWarningMessage(msg=result.message, node_info=node_info)) else: - fire_event(RunResultError(msg=result.message)) + group_dict = group.to_logging_dict() if group else None + fire_event( + RunResultError(msg=result.message, node_info=node_info, group=group_dict) + ) else: - fire_event(RunResultErrorNoMessage(status=result.status)) + fire_event(RunResultErrorNoMessage(status=result.status, node_info=node_info)) - if result.node.build_path is not None: - with TextOnly(): - fire_event(Formatting("")) - fire_event(SQLCompiledPath(path=result.node.compiled_path)) + if result.node.compiled_path is not None: + fire_event(Formatting("")) + fire_event(SQLCompiledPath(path=result.node.compiled_path, node_info=node_info)) if result.node.should_store_failures: - with TextOnly(): - fire_event(Formatting("")) - fire_event(CheckNodeTestFailure(relation_name=result.node.relation_name)) - + fire_event(Formatting("")) + fire_event( + CheckNodeTestFailure(relation_name=result.node.relation_name, node_info=node_info) + ) + elif result.status == NodeStatus.Skipped and result.message is not None: + if newline: + fire_event(Formatting(""), level=EventLevel.DEBUG) + fire_event(RunResultError(msg=result.message), level=EventLevel.DEBUG) elif result.message is not None: - first = True - for line in result.message.split("\n"): - # TODO: why do we format like this? Is there a reason this needs to - # be split instead of sending it as a single log line? - if first: - fire_event(FirstRunResultError(msg=line)) - first = False - else: - fire_event(AfterFirstRunResultError(msg=line)) + if newline: + fire_event(Formatting("")) + group_dict = group.to_logging_dict() if group else None + fire_event(RunResultError(msg=result.message, node_info=node_info, group=group_dict)) -def print_run_end_messages(results, keyboard_interrupt: bool = False) -> None: +def print_run_end_messages( + results, keyboard_interrupt: bool = False, groups: Optional[Dict[str, Group]] = None +) -> None: errors, warnings = [], [] for r in results: if r.status in (NodeStatus.RuntimeErr, NodeStatus.Error, NodeStatus.Fail): @@ -141,21 +147,21 @@ def print_run_end_messages(results, keyboard_interrupt: bool = False) -> None: elif r.status == NodeStatus.Warn: warnings.append(r) - with DbtStatusMessage(), InvocationProcessor(): - with TextOnly(): - fire_event(Formatting("")) - fire_event( - EndOfRunSummary( - num_errors=len(errors), - num_warnings=len(warnings), - keyboard_interrupt=keyboard_interrupt, - ) + fire_event(Formatting("")) + fire_event( + EndOfRunSummary( + num_errors=len(errors), + num_warnings=len(warnings), + keyboard_interrupt=keyboard_interrupt, ) + ) - for error in errors: - print_run_result_error(error, is_warning=False) + for error in errors: + group = groups.get(error.node.unique_id) if groups and hasattr(error, "node") else None + print_run_result_error(error, is_warning=False, group=group) - for warning in warnings: - print_run_result_error(warning, is_warning=True) + for warning in warnings: + group = groups.get(warning.node.unique_id) if groups and hasattr(warning, "node") else None + print_run_result_error(warning, is_warning=True, group=group) - print_run_status_line(results) + print_run_status_line(results) diff --git a/core/dbt/task/retry.py b/core/dbt/task/retry.py index 3a14932aea8..fd943b1151f 100644 --- a/core/dbt/task/retry.py +++ b/core/dbt/task/retry.py @@ -1,24 +1,42 @@ from pathlib import Path +from click import get_current_context +from click.core import ParameterSource + +from dbt.artifacts.schemas.results import NodeStatus from dbt.cli.flags import Flags from dbt.cli.types import Command as CliCommand from dbt.config import RuntimeConfig -from dbt.contracts.results import NodeStatus -from dbt.contracts.state import PreviousState -from dbt.exceptions import DbtRuntimeError +from dbt.contracts.state import load_result_state +from dbt.flags import get_flags, set_flags from dbt.graph import GraphQueue +from dbt.parser.manifest import parse_manifest from dbt.task.base import ConfiguredTask from dbt.task.build import BuildTask from dbt.task.clone import CloneTask from dbt.task.compile import CompileTask -from dbt.task.generate import GenerateTask +from dbt.task.docs.generate import GenerateTask from dbt.task.run import RunTask from dbt.task.run_operation import RunOperationTask from dbt.task.seed import SeedTask from dbt.task.snapshot import SnapshotTask from dbt.task.test import TestTask +from dbt_common.exceptions import DbtRuntimeError RETRYABLE_STATUSES = {NodeStatus.Error, NodeStatus.Fail, NodeStatus.Skipped, NodeStatus.RuntimeErr} +IGNORE_PARENT_FLAGS = { + "log_path", + "output_path", + "profiles_dir", + "profiles_dir_exists_false", + "project_dir", + "defer_state", + "deprecated_state", + "target_path", + "warn_error", +} + +ALLOW_CLI_OVERRIDE_FLAGS = {"vars"} TASK_DICT = { "build": BuildTask, @@ -46,54 +64,65 @@ class RetryTask(ConfiguredTask): - def __init__(self, args, config, manifest): - super().__init__(args, config, manifest) - - state_path = self.args.state or self.config.target_path - - if self.args.warn_error: - RETRYABLE_STATUSES.add(NodeStatus.Warn) - - self.previous_state = PreviousState( - state_path=Path(state_path), - target_path=Path(self.config.target_path), - project_root=Path(self.config.project_root), + def __init__(self, args: Flags, config: RuntimeConfig) -> None: + # load previous run results + state_path = args.state or config.target_path + self.previous_results = load_result_state( + Path(config.project_root) / Path(state_path) / "run_results.json" ) - - if not self.previous_state.results: + if not self.previous_results: raise DbtRuntimeError( f"Could not find previous run in '{state_path}' target directory" ) - - self.previous_args = self.previous_state.results.args + self.previous_args = self.previous_results.args self.previous_command_name = self.previous_args.get("which") - self.task_class = TASK_DICT.get(self.previous_command_name) - - def run(self): - unique_ids = set( - [ - result.unique_id - for result in self.previous_state.results.results - if result.status in RETRYABLE_STATUSES - ] - ) - cli_command = CMD_DICT.get(self.previous_command_name) + # Reslove flags and config + if args.warn_error: + RETRYABLE_STATUSES.add(NodeStatus.Warn) + cli_command = CMD_DICT.get(self.previous_command_name) # type: ignore # Remove these args when their default values are present, otherwise they'll raise an exception args_to_remove = { "show": lambda x: True, "resource_types": lambda x: x == [], "warn_error_options": lambda x: x == {"exclude": [], "include": []}, } - for k, v in args_to_remove.items(): if k in self.previous_args and v(self.previous_args[k]): del self.previous_args[k] - - retry_flags = Flags.from_dict(cli_command, self.previous_args) + previous_args = { + k: v for k, v in self.previous_args.items() if k not in IGNORE_PARENT_FLAGS + } + click_context = get_current_context() + current_args = { + k: v + for k, v in args.__dict__.items() + if k in IGNORE_PARENT_FLAGS + or ( + click_context.get_parameter_source(k) == ParameterSource.COMMANDLINE + and k in ALLOW_CLI_OVERRIDE_FLAGS + ) + } + combined_args = {**previous_args, **current_args} + retry_flags = Flags.from_dict(cli_command, combined_args) # type: ignore + set_flags(retry_flags) retry_config = RuntimeConfig.from_args(args=retry_flags) + # Parse manifest using resolved config/flags + manifest = parse_manifest(retry_config, False, True, retry_flags.write_json) # type: ignore + super().__init__(args, retry_config, manifest) + self.task_class = TASK_DICT.get(self.previous_command_name) # type: ignore + + def run(self): + unique_ids = set( + [ + result.unique_id + for result in self.previous_results.results + if result.status in RETRYABLE_STATUSES + ] + ) + class TaskWrapper(self.task_class): def get_graph_queue(self): new_graph = self.graph.get_subset_graph(unique_ids) @@ -104,8 +133,8 @@ def get_graph_queue(self): ) task = TaskWrapper( - retry_flags, - retry_config, + get_flags(), + self.config, self.manifest, ) diff --git a/core/dbt/task/run.py b/core/dbt/task/run.py index 4b1cea04727..e6e380b4063 100644 --- a/core/dbt/task/run.py +++ b/core/dbt/task/run.py @@ -1,58 +1,60 @@ import functools +import os import threading import time -from typing import List, Dict, Any, Iterable, Set, Tuple, Optional, AbstractSet - -from dbt.dataclass_schema import dbtClassMixin - -from .compile import CompileRunner, CompileTask - -from .printer import ( - print_run_end_messages, - get_counts, -) from datetime import datetime -from dbt import tracking -from dbt import utils +from typing import AbstractSet, Any, Dict, Iterable, List, Optional, Set, Tuple, Type + +from dbt import tracking, utils from dbt.adapters.base import BaseRelation +from dbt.adapters.events.types import ( + DatabaseErrorRunningHook, + FinishedRunningStats, + HooksRunning, +) +from dbt.adapters.exceptions import MissingMaterializationError +from dbt.artifacts.resources import Hook +from dbt.artifacts.resources.types import BatchSize +from dbt.artifacts.schemas.results import ( + BaseResult, + NodeStatus, + RunningStatus, + RunStatus, +) +from dbt.artifacts.schemas.run import RunResult +from dbt.cli.flags import Flags from dbt.clients.jinja import MacroGenerator +from dbt.config.runtime import RuntimeConfig from dbt.context.providers import generate_runtime_model_context -from dbt.contracts.graph.model_config import Hook -from dbt.contracts.graph.nodes import HookNode, ResultNode -from dbt.contracts.results import NodeStatus, RunResult, RunStatus, RunningStatus, BaseResult -from dbt.exceptions import ( - CompilationError, - DbtInternalError, - MissingMaterializationError, - DbtRuntimeError, - DbtValidationError, -) -from dbt.events.functions import fire_event, get_invocation_id +from dbt.contracts.graph.manifest import Manifest +from dbt.contracts.graph.nodes import HookNode, ModelNode, ResultNode from dbt.events.types import ( - DatabaseErrorRunningHook, - Formatting, - HooksRunning, - FinishedRunningStats, - LogModelResult, - LogStartLine, LogHookEndLine, LogHookStartLine, + LogModelResult, + LogStartLine, + RunningOperationCaughtError, ) -from dbt.events.base_types import EventLevel -from dbt.logger import ( - TextOnly, - HookMetadata, - UniqueID, - TimestampNamed, - DbtModelState, -) +from dbt.exceptions import CompilationError, DbtInternalError, DbtRuntimeError from dbt.graph import ResourceTypeSelector from dbt.hooks import get_hook_dict +from dbt.materializations.incremental.microbatch import MicrobatchBuilder from dbt.node_types import NodeType, RunHookType +from dbt.task.base import BaseRunner +from dbt_common.clients.jinja import MacroProtocol +from dbt_common.dataclass_schema import dbtClassMixin +from dbt_common.events.base_types import EventLevel +from dbt_common.events.contextvars import log_contextvars +from dbt_common.events.functions import fire_event, get_invocation_id +from dbt_common.events.types import Formatting +from dbt_common.exceptions import DbtValidationError + +from .compile import CompileRunner, CompileTask +from .printer import get_counts, print_run_end_messages class Timer: - def __init__(self): + def __init__(self) -> None: self.start = None self.end = None @@ -183,9 +185,24 @@ def get_node_representation(self): relation = relation.include(database=False) return str(relation) - def describe_node(self): + def describe_node(self) -> str: # TODO CL 'language' will be moved to node level when we change representation - return f"{self.node.language} {self.node.get_materialization()} model {self.get_node_representation()}" + materialization_strategy = self.node.config.get("incremental_strategy") + materialization = ( + "microbatch" + if materialization_strategy == "microbatch" + else self.node.get_materialization() + ) + return f"{self.node.language} {materialization} model {self.get_node_representation()}" + + def describe_batch(self, batch_start: Optional[datetime]) -> str: + # Only visualize date if batch_start year/month/day + formatted_batch_start = ( + batch_start.date() + if (batch_start and self.node.config.batch_size != BatchSize.hour) + else batch_start + ) + return f"batch {formatted_batch_start} of {self.get_node_representation()}" def print_start_line(self): fire_event( @@ -217,10 +234,55 @@ def print_result_line(self, result): level=level, ) - def before_execute(self): + def print_batch_result_line( + self, + result: RunResult, + batch_start: Optional[datetime], + batch_idx: int, + batch_total: int, + exception: Optional[Exception], + ): + description = self.describe_batch(batch_start) + if result.status == NodeStatus.Error: + status = result.status + level = EventLevel.ERROR + else: + status = result.message + level = EventLevel.INFO + fire_event( + LogModelResult( + description=description, + status=status, + index=batch_idx, + total=batch_total, + execution_time=result.execution_time, + node_info=self.node.node_info, + ), + level=level, + ) + if exception: + fire_event(RunningOperationCaughtError(exc=str(exception))) + + def print_batch_start_line( + self, batch_start: Optional[datetime], batch_idx: int, batch_total: int + ) -> None: + if batch_start is None: + return + + batch_description = self.describe_batch(batch_start) + fire_event( + LogStartLine( + description=batch_description, + index=batch_idx, + total=batch_total, + node_info=self.node.node_info, + ) + ) + + def before_execute(self) -> None: self.print_start_line() - def after_execute(self, result): + def after_execute(self, result) -> None: track_model_run(self.node_index, self.num_nodes, result) self.print_result_line(result) @@ -242,6 +304,35 @@ def _build_run_model_result(self, model, context): failures=result.get("failures"), ) + def _build_run_microbatch_model_result( + self, model: ModelNode, batch_run_results: List[RunResult] + ) -> RunResult: + failures = sum([result.failures for result in batch_run_results if result.failures]) + return RunResult( + node=model, + # TODO We should do something like RunStatus.PartialSuccess if there is a mixture of success and failures + status=RunStatus.Success if failures != len(batch_run_results) else RunStatus.Error, + timing=[], + thread_id=threading.current_thread().name, + # TODO -- why isn't this getting propagated to logs? + execution_time=0, + message="SUCCESS" if failures != len(batch_run_results) else "ERROR", + adapter_response={}, + failures=failures, + ) + + def _build_failed_run_batch_result(self, model: ModelNode) -> RunResult: + return RunResult( + node=model, + status=RunStatus.Error, + timing=[], + thread_id=threading.current_thread().name, + execution_time=0, + message="ERROR", + adapter_response={}, + failures=1, + ) + def _materialization_relations(self, result: Any, model) -> List[BaseRelation]: if isinstance(result, str): msg = ( @@ -259,6 +350,48 @@ def _materialization_relations(self, result: Any, model) -> List[BaseRelation]: ) raise CompilationError(msg, node=model) + def _execute_model( + self, + hook_ctx: Any, + context_config: Any, + model: ModelNode, + context: Dict[str, Any], + materialization_macro: MacroProtocol, + ) -> RunResult: + try: + result = MacroGenerator( + materialization_macro, context, stack=context["context_macro_stack"] + )() + finally: + self.adapter.post_model_hook(context_config, hook_ctx) + + for relation in self._materialization_relations(result, model): + self.adapter.cache_added(relation.incorporate(dbt_created=True)) + + return self._build_run_model_result(model, context) + + def _execute_microbatch_model( + self, + hook_ctx: Any, + context_config: Any, + model: ModelNode, + manifest: Manifest, + context: Dict[str, Any], + materialization_macro: MacroProtocol, + ) -> RunResult: + batch_results = None + try: + batch_results = self._execute_microbatch_materialization( + model, manifest, context, materialization_macro + ) + finally: + self.adapter.post_model_hook(context_config, hook_ctx) + + if batch_results is not None: + return self._build_run_microbatch_model_result(model, batch_results) + else: + return self._build_run_model_result(model, context) + def execute(self, model, manifest): context = generate_runtime_model_context(model, self.config, manifest) @@ -287,34 +420,109 @@ def execute(self, model, manifest): ) hook_ctx = self.adapter.pre_model_hook(context_config) - try: - result = MacroGenerator( - materialization_macro, context, stack=context["context_macro_stack"] - )() - finally: - self.adapter.post_model_hook(context_config, hook_ctx) - for relation in self._materialization_relations(result, model): - self.adapter.cache_added(relation.incorporate(dbt_created=True)) + if ( + os.environ.get("DBT_EXPERIMENTAL_MICROBATCH") + and model.config.materialized == "incremental" + and model.config.incremental_strategy == "microbatch" + ): + return self._execute_microbatch_model( + hook_ctx, context_config, model, manifest, context, materialization_macro + ) + else: + return self._execute_model( + hook_ctx, context_config, model, context, materialization_macro + ) - return self._build_run_model_result(model, context) + def _execute_microbatch_materialization( + self, + model: ModelNode, + manifest: Manifest, + context: Dict[str, Any], + materialization_macro: MacroProtocol, + ) -> List[RunResult]: + batch_results: List[RunResult] = [] + microbatch_builder = MicrobatchBuilder( + model=model, + is_incremental=self._is_incremental(model), + event_time_start=getattr(self.config.args, "EVENT_TIME_START", None), + event_time_end=getattr(self.config.args, "EVENT_TIME_END", None), + ) + end = microbatch_builder.build_end_time() + start = microbatch_builder.build_start_time(end) + batches = microbatch_builder.build_batches(start, end) + + # iterate over each batch, calling materialization_macro to get a batch-level run result + for batch_idx, batch in enumerate(batches): + self.print_batch_start_line(batch[0], batch_idx + 1, len(batches)) + + exception = None + try: + # Set start/end in context prior to re-compiling + model.config["__dbt_internal_microbatch_event_time_start"] = batch[0] + model.config["__dbt_internal_microbatch_event_time_end"] = batch[1] + + # Recompile node to re-resolve refs with event time filters rendered, update context + self.compiler.compile_node(model, manifest, {}) + context["model"] = model + context["sql"] = model.compiled_code + context["compiled_code"] = model.compiled_code + + # Materialize batch and cache any materialized relations + result = MacroGenerator( + materialization_macro, context, stack=context["context_macro_stack"] + )() + for relation in self._materialization_relations(result, model): + self.adapter.cache_added(relation.incorporate(dbt_created=True)) + + # Build result fo executed batch + batch_run_result = self._build_run_model_result(model, context) + # Update context vars for future batches + context["is_incremental"] = lambda: True + context["should_full_refresh"] = lambda: False + except Exception as e: + exception = e + batch_run_result = self._build_failed_run_batch_result(model) + + self.print_batch_result_line( + batch_run_result, batch[0], batch_idx + 1, len(batches), exception + ) + batch_results.append(batch_run_result) + + return batch_results + + def _is_incremental(self, model) -> bool: + # TODO: Remove. This is a temporary method. We're working with adapters on + # a strategy to ensure we can access the `is_incremental` logic without drift + relation_info = self.adapter.Relation.create_from(self.config, model) + relation = self.adapter.get_relation( + relation_info.database, relation_info.schema, relation_info.name + ) + return ( + relation is not None + and relation.type == "table" + and model.config.materialized == "incremental" + and not (getattr(self.config.args, "FULL_REFRESH", False) or model.config.full_refresh) + ) class RunTask(CompileTask): - def __init__(self, args, config, manifest): + def __init__(self, args: Flags, config: RuntimeConfig, manifest: Manifest) -> None: super().__init__(args, config, manifest) - self.ran_hooks = [] + self.ran_hooks: List[HookNode] = [] self._total_executed = 0 def index_offset(self, value: int) -> int: return self._total_executed + value - def raise_on_first_error(self): + def raise_on_first_error(self) -> bool: return False - def get_hook_sql(self, adapter, hook, idx, num_hooks, extra_context): - compiler = adapter.get_compiler() - compiled = compiler.compile_node(hook, self.manifest, extra_context) + def get_hook_sql(self, adapter, hook, idx, num_hooks, extra_context) -> str: + if self.manifest is None: + raise DbtInternalError("compile_node called before manifest was loaded") + + compiled = self.compiler.compile_node(hook, self.manifest, extra_context) statement = compiled.compiled_code hook_index = hook.index or num_hooks hook_obj = get_hook(statement, index=hook_index) @@ -337,7 +545,7 @@ def get_hooks_by_type(self, hook_type: RunHookType) -> List[HookNode]: hooks.sort(key=self._hook_keyfunc) return hooks - def run_hooks(self, adapter, hook_type: RunHookType, extra_context): + def run_hooks(self, adapter, hook_type: RunHookType, extra_context) -> None: ordered_hooks = self.get_hooks_by_type(hook_type) # on-run-* hooks should run outside of a transaction. This happens @@ -348,31 +556,27 @@ def run_hooks(self, adapter, hook_type: RunHookType, extra_context): return num_hooks = len(ordered_hooks) - with TextOnly(): - fire_event(Formatting("")) + fire_event(Formatting("")) fire_event(HooksRunning(num_hooks=num_hooks, hook_type=hook_type)) - startctx = TimestampNamed("node_started_at") - finishctx = TimestampNamed("node_finished_at") - for idx, hook in enumerate(ordered_hooks, start=1): - hook.update_event_status( - started_at=datetime.utcnow().isoformat(), node_status=RunningStatus.Started - ) - sql = self.get_hook_sql(adapter, hook, idx, num_hooks, extra_context) - - hook_text = "{}.{}.{}".format(hook.package_name, hook_type, hook.index) - hook_meta_ctx = HookMetadata(hook, self.index_offset(idx)) - with UniqueID(hook.unique_id): - with hook_meta_ctx, startctx: - fire_event( - LogHookStartLine( - statement=hook_text, - index=idx, - total=num_hooks, - node_info=hook.node_info, - ) + # We want to include node_info in the appropriate log files, so use + # log_contextvars + with log_contextvars(node_info=hook.node_info): + hook.update_event_status( + started_at=datetime.utcnow().isoformat(), node_status=RunningStatus.Started + ) + sql = self.get_hook_sql(adapter, hook, idx, num_hooks, extra_context) + + hook_text = "{}.{}.{}".format(hook.package_name, hook_type, hook.index) + fire_event( + LogHookStartLine( + statement=hook_text, + index=idx, + total=num_hooks, + node_info=hook.node_info, ) + ) with Timer() as timer: if len(sql.strip()) > 0: @@ -383,26 +587,24 @@ def run_hooks(self, adapter, hook_type: RunHookType, extra_context): self.ran_hooks.append(hook) hook.update_event_status(finished_at=datetime.utcnow().isoformat()) - with finishctx, DbtModelState({"node_status": "passed"}): - hook.update_event_status(node_status=RunStatus.Success) - fire_event( - LogHookEndLine( - statement=hook_text, - status=status, - index=idx, - total=num_hooks, - execution_time=timer.elapsed, - node_info=hook.node_info, - ) + hook.update_event_status(node_status=RunStatus.Success) + fire_event( + LogHookEndLine( + statement=hook_text, + status=status, + index=idx, + total=num_hooks, + execution_time=timer.elapsed, + node_info=hook.node_info, ) - # `_event_status` dict is only used for logging. Make sure - # it gets deleted when we're done with it - hook.clear_event_status() + ) + # `_event_status` dict is only used for logging. Make sure + # it gets deleted when we're done with it + hook.clear_event_status() self._total_executed += len(ordered_hooks) - with TextOnly(): - fire_event(Formatting("")) + fire_event(Formatting("")) def safe_run_hooks( self, adapter, hook_type: RunHookType, extra_context: Dict[str, Any] @@ -423,7 +625,7 @@ def safe_run_hooks( ) ) - def print_results_line(self, results, execution_time): + def print_results_line(self, results, execution_time) -> None: nodes = [r.node for r in results if hasattr(r, "node")] + self.ran_hooks stat_line = get_counts(nodes) @@ -432,23 +634,22 @@ def print_results_line(self, results, execution_time): if execution_time is not None: execution = utils.humanize_execution_time(execution_time=execution_time) - with TextOnly(): - fire_event(Formatting("")) + fire_event(Formatting("")) fire_event( FinishedRunningStats( stat_line=stat_line, execution=execution, execution_time=execution_time ) ) - def before_run(self, adapter, selected_uids: AbstractSet[str]): + def before_run(self, adapter, selected_uids: AbstractSet[str]) -> None: with adapter.connection_named("master"): + self.defer_to_manifest() required_schemas = self.get_model_schemas(adapter, selected_uids) self.create_schemas(adapter, required_schemas) self.populate_adapter_cache(adapter, required_schemas) - self.defer_to_manifest(adapter, selected_uids) self.safe_run_hooks(adapter, RunHookType.Start, {}) - def after_run(self, adapter, results): + def after_run(self, adapter, results) -> None: # in on-run-end hooks, provide the value 'database_schemas', which is a # list of unique (database, schema) pairs that successfully executed # models were in. For backwards compatibility, include the old @@ -481,9 +682,20 @@ def get_node_selector(self) -> ResourceTypeSelector: resource_types=[NodeType.Model], ) - def get_runner_type(self, _): + def get_runner_type(self, _) -> Optional[Type[BaseRunner]]: return ModelRunner - def task_end_messages(self, results): + def get_groups_for_nodes(self, nodes): + node_to_group_name_map = {i: k for k, v in self.manifest.group_map.items() for i in v} + group_name_to_group_map = {v.name: v for v in self.manifest.groups.values()} + + return { + node.unique_id: group_name_to_group_map.get(node_to_group_name_map.get(node.unique_id)) + for node in nodes + } + + def task_end_messages(self, results) -> None: + groups = self.get_groups_for_nodes([r.node for r in results if hasattr(r, "node")]) + if results: - print_run_end_messages(results) + print_run_end_messages(results, groups=groups) diff --git a/core/dbt/task/run_operation.py b/core/dbt/task/run_operation.py index 15f0e9c65cf..6f7cd7b64c0 100644 --- a/core/dbt/task/run_operation.py +++ b/core/dbt/task/run_operation.py @@ -2,27 +2,31 @@ import threading import traceback from datetime import datetime +from typing import TYPE_CHECKING -import agate - -import dbt.exceptions +import dbt_common.exceptions from dbt.adapters.factory import get_adapter +from dbt.artifacts.schemas.results import RunStatus, TimingInfo +from dbt.artifacts.schemas.run import RunResult, RunResultsArtifact from dbt.contracts.files import FileHash from dbt.contracts.graph.nodes import HookNode -from dbt.contracts.results import RunResultsArtifact, RunResult, RunStatus, TimingInfo -from dbt.events.functions import fire_event from dbt.events.types import ( + LogDebugStackTrace, RunningOperationCaughtError, RunningOperationUncaughtError, - LogDebugStackTrace, ) -from dbt.exceptions import DbtInternalError from dbt.node_types import NodeType from dbt.task.base import ConfiguredTask +from dbt_common.events.functions import fire_event +from dbt_common.exceptions import DbtInternalError RESULT_FILE_NAME = "run_results.json" +if TYPE_CHECKING: + import agate + + class RunOperationTask(ConfiguredTask): def _get_macro_parts(self): macro_name = self.args.macro @@ -33,7 +37,7 @@ def _get_macro_parts(self): return package_name, macro_name - def _run_unsafe(self, package_name, macro_name) -> agate.Table: + def _run_unsafe(self, package_name, macro_name) -> "agate.Table": adapter = get_adapter(self.config) macro_kwargs = self.args.args @@ -41,7 +45,7 @@ def _run_unsafe(self, package_name, macro_name) -> agate.Table: with adapter.connection_named("macro_{}".format(macro_name)): adapter.clear_transaction() res = adapter.execute_macro( - macro_name, project=package_name, kwargs=macro_kwargs, manifest=self.manifest + macro_name, project=package_name, kwargs=macro_kwargs, macro_resolver=self.manifest ) return res @@ -56,7 +60,7 @@ def run(self) -> RunResultsArtifact: try: self._run_unsafe(package_name, macro_name) - except dbt.exceptions.Exception as exc: + except dbt_common.exceptions.DbtBaseException as exc: fire_event(RunningOperationCaughtError(exc=str(exc))) fire_event(LogDebugStackTrace(exc_info=traceback.format_exc())) success = False diff --git a/core/dbt/task/runnable.py b/core/dbt/task/runnable.py index f872372f577..19a2a968df8 100644 --- a/core/dbt/task/runnable.py +++ b/core/dbt/task/runnable.py @@ -5,74 +5,77 @@ from datetime import datetime from multiprocessing.dummy import Pool as ThreadPool from pathlib import Path -from typing import Optional, Dict, List, Set, Tuple, Iterable, AbstractSet +from typing import AbstractSet, Dict, Iterable, List, Optional, Set, Tuple, Type, Union import dbt.exceptions import dbt.tracking import dbt.utils +import dbt_common.utils.formatting from dbt.adapters.base import BaseRelation from dbt.adapters.factory import get_adapter -from dbt.contracts.graph.manifest import WritableManifest -from dbt.contracts.graph.nodes import ResultNode -from dbt.contracts.results import ( +from dbt.artifacts.schemas.results import ( + BaseResult, NodeStatus, - RunExecutionResult, RunningStatus, - RunResult, RunStatus, ) +from dbt.artifacts.schemas.run import RunExecutionResult, RunResult +from dbt.cli.flags import Flags +from dbt.config.runtime import RuntimeConfig +from dbt.contracts.graph.manifest import Manifest +from dbt.contracts.graph.nodes import ResultNode from dbt.contracts.state import PreviousState -from dbt.events.contextvars import log_contextvars, task_contextvars -from dbt.events.functions import fire_event, warn_or_error from dbt.events.types import ( - Formatting, - LogCancelLine, - DefaultSelector, - NodeStart, - NodeFinished, - QueryCancelationUnsupported, ConcurrencyLine, + DefaultSelector, EndRunResult, + GenericExceptionOnRun, + LogCancelLine, + MarkSkippedChildren, + NodeFinished, + NodeStart, NothingToDo, + QueryCancelationUnsupported, ) -from dbt.exceptions import ( - DbtInternalError, - NotImplementedError, - DbtRuntimeError, - FailFastError, -) +from dbt.exceptions import DbtInternalError, DbtRuntimeError, FailFastError from dbt.flags import get_flags -from dbt.graph import GraphQueue, NodeSelector, SelectionSpec, parse_difference -from dbt.logger import ( - DbtProcessState, - TextOnly, - UniqueID, - TimestampNamed, - DbtModelState, - ModelMetadata, - NodeCount, +from dbt.graph import ( + GraphQueue, + NodeSelector, + SelectionSpec, + UniqueId, + parse_difference, ) from dbt.parser.manifest import write_manifest -from dbt.task.base import ConfiguredTask -from .printer import ( - print_run_result_error, - print_run_end_messages, -) +from dbt.task.base import BaseRunner, ConfiguredTask +from dbt_common.context import _INVOCATION_CONTEXT_VAR, get_invocation_context +from dbt_common.dataclass_schema import StrEnum +from dbt_common.events.contextvars import log_contextvars, task_contextvars +from dbt_common.events.functions import fire_event, warn_or_error +from dbt_common.events.types import Formatting +from dbt_common.exceptions import NotImplementedError + +from .printer import print_run_end_messages, print_run_result_error RESULT_FILE_NAME = "run_results.json" -RUNNING_STATE = DbtProcessState("running") + + +class GraphRunnableMode(StrEnum): + Topological = "topological" + Independent = "independent" class GraphRunnableTask(ConfiguredTask): MARK_DEPENDENT_ERRORS_STATUSES = [NodeStatus.Error] - def __init__(self, args, config, manifest): + def __init__(self, args: Flags, config: RuntimeConfig, manifest: Manifest) -> None: super().__init__(args, config, manifest) + self.config = config self._flattened_nodes: Optional[List[ResultNode]] = None - self._raise_next_tick = None - self._skipped_children = {} + self._raise_next_tick: Optional[DbtRuntimeError] = None + self._skipped_children: Dict[str, Optional[RunResult]] = {} self.job_queue: Optional[GraphQueue] = None - self.node_results = [] + self.node_results: List[BaseResult] = [] self.num_nodes: int = 0 self.previous_state: Optional[PreviousState] = None self.previous_defer_state: Optional[PreviousState] = None @@ -106,14 +109,11 @@ def exclusion_arg(self): def get_selection_spec(self) -> SelectionSpec: default_selector_name = self.config.get_default_selector_name() - # TODO: The "eager" string below needs to be replaced with programatic access - # to the default value for the indirect selection parameter in - # dbt.cli.params.indirect_selection - # - # Doing that is actually a little tricky, so I'm punting it to a new ticket GH #6397 - indirect_selection = getattr(self.args, "INDIRECT_SELECTION", "eager") - - if self.args.selector: + spec: Union[SelectionSpec, bool] + if hasattr(self.args, "inline") and self.args.inline: + # We want an empty selection spec. + spec = parse_difference(None, None) + elif self.args.selector: # use pre-defined selector (--selector) spec = self.config.get_selector(self.args.selector) elif not (self.selection_arg or self.exclusion_arg) and default_selector_name: @@ -121,22 +121,40 @@ def get_selection_spec(self) -> SelectionSpec: fire_event(DefaultSelector(name=default_selector_name)) spec = self.config.get_selector(default_selector_name) else: + # This is what's used with no default selector and no selection # use --select and --exclude args - spec = parse_difference(self.selection_arg, self.exclusion_arg, indirect_selection) - return spec + spec = parse_difference(self.selection_arg, self.exclusion_arg) + # mypy complains because the return values of get_selector and parse_difference + # are different + return spec # type: ignore @abstractmethod def get_node_selector(self) -> NodeSelector: raise NotImplementedError(f"get_node_selector not implemented for task {type(self)}") - @abstractmethod - def defer_to_manifest(self, adapter, selected_uids: AbstractSet[str]): - raise NotImplementedError(f"defer_to_manifest not implemented for task {type(self)}") + def defer_to_manifest(self): + deferred_manifest = self._get_deferred_manifest() + if deferred_manifest is None: + return + if self.manifest is None: + raise DbtInternalError( + "Expected to defer to manifest, but there is no runtime manifest to defer from!" + ) + self.manifest.merge_from_artifact(other=deferred_manifest) def get_graph_queue(self) -> GraphQueue: selector = self.get_node_selector() + # Following uses self.selection_arg and self.exclusion_arg spec = self.get_selection_spec() - return selector.get_graph_queue(spec) + + preserve_edges = True + if self.get_run_mode() == GraphRunnableMode.Independent: + preserve_edges = False + + return selector.get_graph_queue(spec, preserve_edges) + + def get_run_mode(self) -> GraphRunnableMode: + return GraphRunnableMode.Topological def _runtime_initialize(self): self.compile_manifest() @@ -152,23 +170,27 @@ def _runtime_initialize(self): self._flattened_nodes.append(self.manifest.nodes[uid]) elif uid in self.manifest.sources: self._flattened_nodes.append(self.manifest.sources[uid]) + elif uid in self.manifest.saved_queries: + self._flattened_nodes.append(self.manifest.saved_queries[uid]) + elif uid in self.manifest.unit_tests: + self._flattened_nodes.append(self.manifest.unit_tests[uid]) else: raise DbtInternalError( - f"Node selection returned {uid}, expected a node or a source" + f"Node selection returned {uid}, expected a node, a source, or a unit test" ) self.num_nodes = len([n for n in self._flattened_nodes if not n.is_ephemeral_model]) - def raise_on_first_error(self): + def raise_on_first_error(self) -> bool: return False - def get_runner_type(self, node): + def get_runner_type(self, node) -> Optional[Type[BaseRunner]]: raise NotImplementedError("Not Implemented") - def result_path(self): + def result_path(self) -> str: return os.path.join(self.config.project_target_path, RESULT_FILE_NAME) - def get_runner(self, node): + def get_runner(self, node) -> BaseRunner: adapter = get_adapter(self.config) run_count: int = 0 num_nodes: int = 0 @@ -182,36 +204,56 @@ def get_runner(self, node): num_nodes = self.num_nodes cls = self.get_runner_type(node) + + if cls is None: + raise DbtInternalError("Could not find runner type for node.") + return cls(self.config, adapter, node, run_count, num_nodes) - def call_runner(self, runner): - uid_context = UniqueID(runner.node.unique_id) - with RUNNING_STATE, uid_context, log_contextvars(node_info=runner.node.node_info): - startctx = TimestampNamed("node_started_at") - index = self.index_offset(runner.node_index) + def call_runner(self, runner: BaseRunner) -> RunResult: + with log_contextvars(node_info=runner.node.node_info): runner.node.update_event_status( started_at=datetime.utcnow().isoformat(), node_status=RunningStatus.Started ) - extended_metadata = ModelMetadata(runner.node, index) - - with startctx, extended_metadata: - fire_event( - NodeStart( - node_info=runner.node.node_info, - ) + fire_event( + NodeStart( + node_info=runner.node.node_info, ) - status: Dict[str, str] = {} + ) try: result = runner.run_with_hooks(self.manifest) + except Exception as e: + thread_exception = e finally: - finishctx = TimestampNamed("finished_at") - with finishctx, DbtModelState(status): + if result is not None: fire_event( NodeFinished( node_info=runner.node.node_info, run_result=result.to_msg_dict(), ) ) + else: + msg = f"Exception on worker thread. {thread_exception}" + + fire_event( + GenericExceptionOnRun( + unique_id=runner.node.unique_id, + exc=str(thread_exception), + node_info=runner.node.node_info, + ) + ) + + result = RunResult( + status=RunStatus.Error, # type: ignore + timing=[], + thread_id="", + execution_time=0.0, + adapter_response={}, + message=msg, + failures=None, + node=runner.node, + ) + # `_event_status` dict is only used for logging. Make sure # it gets deleted when we're done with it runner.node.clear_event_status() @@ -266,16 +308,7 @@ def callback(result): self.job_queue.mark_done(result.node.unique_id) while not self.job_queue.empty(): - node = self.job_queue.get() - self._raise_set_error() - runner = self.get_runner(node) - # we finally know what we're running! Make sure we haven't decided - # to skip it due to upstream failures - if runner.node.unique_id in self._skipped_children: - cause = self._skipped_children.pop(runner.node.unique_id) - runner.do_skip(cause=cause) - args = (runner,) - self._submit(pool, args, callback) + self.handle_job_queue(pool, callback) # block on completion if get_flags().FAIL_FAST: @@ -292,7 +325,20 @@ def callback(result): return - def _handle_result(self, result): + # The build command overrides this + def handle_job_queue(self, pool, callback): + node = self.job_queue.get() + self._raise_set_error() + runner = self.get_runner(node) + # we finally know what we're running! Make sure we haven't decided + # to skip it due to upstream failures + if runner.node.unique_id in self._skipped_children: + cause = self._skipped_children.pop(runner.node.unique_id) + runner.do_skip(cause=cause) + args = [runner] + self._submit(pool, args, callback) + + def _handle_result(self, result: RunResult) -> None: """Mark the result as completed, insert the `CompileResultNode` into the manifest, and mark any descendants (potentially with a 'cause' if the result was an ephemeral model) as skipped. @@ -306,6 +352,7 @@ def _handle_result(self, result): if self.manifest is None: raise DbtInternalError("manifest was None in _handle_result") + # If result.status == NodeStatus.Error, plus Fail for build command if result.status in self.MARK_DEPENDENT_ERRORS_STATUSES: if is_ephemeral: cause = result @@ -313,15 +360,6 @@ def _handle_result(self, result): cause = None self._mark_dependent_errors(node.unique_id, result, cause) - interim_run_result = self.get_result( - results=self.node_results, - elapsed_time=time.time() - self.started_at, - generated_at=datetime.utcnow(), - ) - - if self.args.write_json and hasattr(interim_run_result, "write"): - interim_run_result.write(self.result_path()) - def _cancel_connections(self, pool): """Given a pool, cancel all adapter connections and wait until all runners gentle terminates. @@ -350,17 +388,14 @@ def execute_nodes(self): num_threads = self.config.threads target_name = self.config.target_name - # following line can be removed when legacy logger is removed - with NodeCount(self.num_nodes): - fire_event( - ConcurrencyLine( - num_threads=num_threads, target_name=target_name, node_count=self.num_nodes - ) + fire_event( + ConcurrencyLine( + num_threads=num_threads, target_name=target_name, node_count=self.num_nodes ) - with TextOnly(): - fire_event(Formatting("")) + ) + fire_event(Formatting("")) - pool = ThreadPool(num_threads) + pool = ThreadPool(num_threads, self._pool_thread_initializer, [get_invocation_context()]) try: self.run_queue(pool) except FailFastError as failure: @@ -377,9 +412,19 @@ def execute_nodes(self): print_run_result_error(failure.result) # ensure information about all nodes is propagated to run results when failing fast return self.node_results - except KeyboardInterrupt: + except (KeyboardInterrupt, SystemExit): + run_result = self.get_result( + results=self.node_results, + elapsed_time=time.time() - self.started_at, + generated_at=datetime.utcnow(), + ) + + if self.args.write_json and hasattr(run_result, "write"): + run_result.write(self.result_path()) + self._cancel_connections(pool) print_run_end_messages(self.node_results, keyboard_interrupt=True) + raise pool.close() @@ -387,10 +432,23 @@ def execute_nodes(self): return self.node_results - def _mark_dependent_errors(self, node_id, result, cause): + @staticmethod + def _pool_thread_initializer(invocation_context): + _INVOCATION_CONTEXT_VAR.set(invocation_context) + + def _mark_dependent_errors( + self, node_id: str, result: RunResult, cause: Optional[RunResult] + ) -> None: if self.graph is None: raise DbtInternalError("graph is None in _mark_dependent_errors") - for dep_node_id in self.graph.get_dependent_nodes(node_id): + fire_event( + MarkSkippedChildren( + unique_id=node_id, + status=result.status, + run_result=result.to_msg_dict(), + ) + ) + for dep_node_id in self.graph.get_dependent_nodes(UniqueId(node_id)): self._skipped_children[dep_node_id] = cause def populate_adapter_cache( @@ -399,11 +457,21 @@ def populate_adapter_cache( if not self.args.populate_cache: return + if self.manifest is None: + raise DbtInternalError("manifest was None in populate_adapter_cache") + start_populate_cache = time.perf_counter() + # the cache only cares about executable nodes + cachable_nodes = [ + node + for node in self.manifest.nodes.values() + if (node.is_relational and not node.is_ephemeral_model and not node.is_external_node) + ] + if get_flags().CACHE_SELECTED_ONLY is True: - adapter.set_relations_cache(self.manifest, required_schemas=required_schemas) + adapter.set_relations_cache(cachable_nodes, required_schemas=required_schemas) else: - adapter.set_relations_cache(self.manifest) + adapter.set_relations_cache(cachable_nodes) cache_populate_time = time.perf_counter() - start_populate_cache if dbt.tracking.active_user is not None: dbt.tracking.track_runnable_timing( @@ -412,10 +480,10 @@ def populate_adapter_cache( def before_run(self, adapter, selected_uids: AbstractSet[str]): with adapter.connection_named("master"): + self.defer_to_manifest() self.populate_adapter_cache(adapter) - self.defer_to_manifest(adapter, selected_uids) - def after_run(self, adapter, results): + def after_run(self, adapter, results) -> None: pass def print_results_line(self, node_results, elapsed): @@ -443,7 +511,7 @@ def run(self): Run dbt for the query, based on the graph. """ # We set up a context manager here with "task_contextvars" because we - # we need the project_root in runtime_initialize. + # need the project_root in runtime_initialize. with task_contextvars(project_root=self.config.project_root): self._runtime_initialize() @@ -453,8 +521,7 @@ def run(self): ) if len(self._flattened_nodes) == 0: - with TextOnly(): - fire_event(Formatting("")) + fire_event(Formatting("")) warn_or_error(NothingToDo()) result = self.get_result( results=[], @@ -462,8 +529,7 @@ def run(self): elapsed_time=0.0, ) else: - with TextOnly(): - fire_event(Formatting("")) + fire_event(Formatting("")) selected_uids = frozenset(n.unique_id for n in self._flattened_nodes) result = self.execute_with_hooks(selected_uids) @@ -532,7 +598,7 @@ def create_schemas(self, adapter, required_schemas: Set[BaseRelation]): def list_schemas(db_only: BaseRelation) -> List[Tuple[Optional[str], str]]: # the database can be None on some warehouses that don't support it database_quoted: Optional[str] - db_lowercase = dbt.utils.lowercase(db_only.database) + db_lowercase = dbt_common.utils.formatting.lowercase(db_only.database) if db_only.database is None: database_quoted = None else: @@ -554,7 +620,9 @@ def create_schema(relation: BaseRelation) -> None: list_futures = [] create_futures = [] - with dbt.utils.executor(self.config) as tpe: + # TODO: following has a mypy issue because profile and project config + # defines threads as int and HasThreadingConfig defines it as Optional[int] + with dbt_common.utils.executor(self.config) as tpe: # type: ignore for req in required_databases: if req.database is None: name = "list_schemas" @@ -572,7 +640,7 @@ def create_schema(relation: BaseRelation) -> None: # skip this continue db: Optional[str] = info.database - db_lower: Optional[str] = dbt.utils.lowercase(db) + db_lower: Optional[str] = dbt_common.utils.formatting.lowercase(db) schema: str = info.schema db_schema = (db_lower, schema.lower()) @@ -584,7 +652,7 @@ def create_schema(relation: BaseRelation) -> None: create_futures.append(fut) for create_future in as_completed(create_futures): - # trigger/re-raise any excceptions while creating schemas + # trigger/re-raise any exceptions while creating schemas create_future.result() def get_result(self, results, elapsed_time, generated_at): @@ -595,10 +663,10 @@ def get_result(self, results, elapsed_time, generated_at): args=dbt.utils.args_to_dict(self.args), ) - def task_end_messages(self, results): + def task_end_messages(self, results) -> None: print_run_end_messages(results) - def _get_deferred_manifest(self) -> Optional[WritableManifest]: + def _get_previous_state(self) -> Optional[Manifest]: state = self.previous_defer_state or self.previous_state if not state: raise DbtRuntimeError( @@ -606,5 +674,8 @@ def _get_deferred_manifest(self) -> Optional[WritableManifest]: ) if not state.manifest: - raise DbtRuntimeError(f'Could not find manifest in --state path: "{state}"') + raise DbtRuntimeError(f'Could not find manifest in --state path: "{state.state_path}"') return state.manifest + + def _get_deferred_manifest(self) -> Optional[Manifest]: + return self._get_previous_state() if self.args.defer else None diff --git a/core/dbt/task/seed.py b/core/dbt/task/seed.py index 9ec1df3b81f..961a55c131b 100644 --- a/core/dbt/task/seed.py +++ b/core/dbt/task/seed.py @@ -1,31 +1,26 @@ import random +from typing import Optional, Type -from .run import ModelRunner, RunTask -from .printer import ( - print_run_end_messages, -) - -from dbt.contracts.results import RunStatus -from dbt.exceptions import DbtInternalError +from dbt.artifacts.schemas.results import NodeStatus, RunStatus +from dbt.contracts.graph.manifest import Manifest +from dbt.events.types import LogSeedResult, LogStartLine, SeedHeader from dbt.graph import ResourceTypeSelector -from dbt.logger import TextOnly -from dbt.events.functions import fire_event -from dbt.events.types import ( - SeedHeader, - Formatting, - LogSeedResult, - LogStartLine, -) -from dbt.events.base_types import EventLevel from dbt.node_types import NodeType -from dbt.contracts.results import NodeStatus +from dbt.task.base import BaseRunner +from dbt_common.events.base_types import EventLevel +from dbt_common.events.functions import fire_event +from dbt_common.events.types import Formatting +from dbt_common.exceptions import DbtInternalError + +from .printer import print_run_end_messages +from .run import ModelRunner, RunTask class SeedRunner(ModelRunner): - def describe_node(self): + def describe_node(self) -> str: return "seed file {}".format(self.get_node_representation()) - def before_execute(self): + def before_execute(self) -> None: fire_event( LogStartLine( description=self.describe_node(), @@ -41,7 +36,7 @@ def _build_run_model_result(self, model, context): result.agate_table = agate_result.table return result - def compile(self, manifest): + def compile(self, manifest: Manifest): return self.node def print_result_line(self, result): @@ -63,11 +58,7 @@ def print_result_line(self, result): class SeedTask(RunTask): - def defer_to_manifest(self, adapter, selected_uids): - # seeds don't defer - return - - def raise_on_first_error(self): + def raise_on_first_error(self) -> bool: return False def get_node_selector(self): @@ -80,10 +71,10 @@ def get_node_selector(self): resource_types=[NodeType.Seed], ) - def get_runner_type(self, _): + def get_runner_type(self, _) -> Optional[Type[BaseRunner]]: return SeedRunner - def task_end_messages(self, results): + def task_end_messages(self, results) -> None: if self.args.show: self.show_tables(results) @@ -97,14 +88,12 @@ def show_table(self, result): alias = result.node.alias header = "Random sample of table: {}.{}".format(schema, alias) - with TextOnly(): - fire_event(Formatting("")) + fire_event(Formatting("")) fire_event(SeedHeader(header=header)) fire_event(Formatting("-" * len(header))) rand_table.print_table(max_rows=10, max_columns=None) - with TextOnly(): - fire_event(Formatting("")) + fire_event(Formatting("")) def show_tables(self, results): for result in results: diff --git a/core/dbt/task/show.py b/core/dbt/task/show.py index f9af847e874..0fb6551bf94 100644 --- a/core/dbt/task/show.py +++ b/core/dbt/task/show.py @@ -2,18 +2,20 @@ import threading import time +from dbt.artifacts.schemas.run import RunResult, RunStatus +from dbt.context.providers import generate_runtime_model_context from dbt.contracts.graph.nodes import SeedNode -from dbt.contracts.results import RunResult, RunStatus -from dbt.events.base_types import EventLevel -from dbt.events.functions import fire_event -from dbt.events.types import ShowNode, Note -from dbt.exceptions import DbtRuntimeError -from dbt.task.compile import CompileTask, CompileRunner +from dbt.events.types import ShowNode +from dbt.task.compile import CompileRunner, CompileTask from dbt.task.seed import SeedRunner +from dbt_common.events.base_types import EventLevel +from dbt_common.events.functions import fire_event +from dbt_common.events.types import Note +from dbt_common.exceptions import DbtRuntimeError class ShowRunner(CompileRunner): - def __init__(self, config, adapter, node, node_index, num_nodes): + def __init__(self, config, adapter, node, node_index, num_nodes) -> None: super().__init__(config, adapter, node, node_index, num_nodes) self.run_ephemeral_models = True @@ -23,14 +25,21 @@ def execute(self, compiled_node, manifest): # Allow passing in -1 (or any negative number) to get all rows limit = None if self.config.args.limit < 0 else self.config.args.limit - if "sql_header" in compiled_node.unrendered_config: - compiled_node.compiled_code = ( - compiled_node.unrendered_config["sql_header"] + compiled_node.compiled_code - ) - + model_context = generate_runtime_model_context(compiled_node, self.config, manifest) + compiled_node.compiled_code = self.adapter.execute_macro( + macro_name="get_show_sql", + macro_resolver=manifest, + context_override=model_context, + kwargs={ + "compiled_code": model_context["compiled_code"], + "sql_header": model_context["config"].get("sql_header"), + "limit": limit, + }, + ) adapter_response, execute_result = self.adapter.execute( - compiled_node.compiled_code, fetch=True, limit=limit + compiled_node.compiled_code, fetch=True ) + end_time = time.time() return RunResult( @@ -58,7 +67,7 @@ def get_runner_type(self, node): else: return ShowRunner - def task_end_messages(self, results): + def task_end_messages(self, results) -> None: is_inline = bool(getattr(self.args, "inline", None)) if is_inline: @@ -99,7 +108,7 @@ def task_end_messages(self, results): ) ) - def _handle_result(self, result): + def _handle_result(self, result) -> None: super()._handle_result(result) if ( diff --git a/core/dbt/task/snapshot.py b/core/dbt/task/snapshot.py index 3b66cb21475..3d8873f21fc 100644 --- a/core/dbt/task/snapshot.py +++ b/core/dbt/task/snapshot.py @@ -1,17 +1,20 @@ -from .run import ModelRunner, RunTask +from typing import Optional, Type -from dbt.exceptions import DbtInternalError -from dbt.events.functions import fire_event -from dbt.events.base_types import EventLevel +from dbt.artifacts.schemas.results import NodeStatus from dbt.events.types import LogSnapshotResult from dbt.graph import ResourceTypeSelector from dbt.node_types import NodeType -from dbt.contracts.results import NodeStatus -from dbt.utils import cast_dict_to_dict_of_strings +from dbt.task.base import BaseRunner +from dbt_common.events.base_types import EventLevel +from dbt_common.events.functions import fire_event +from dbt_common.exceptions import DbtInternalError +from dbt_common.utils import cast_dict_to_dict_of_strings + +from .run import ModelRunner, RunTask class SnapshotRunner(ModelRunner): - def describe_node(self): + def describe_node(self) -> str: return "snapshot {}".format(self.get_node_representation()) def print_result_line(self, result): @@ -27,13 +30,14 @@ def print_result_line(self, result): total=self.num_nodes, execution_time=result.execution_time, node_info=model.node_info, + result_message=result.message, ), level=level, ) class SnapshotTask(RunTask): - def raise_on_first_error(self): + def raise_on_first_error(self) -> bool: return False def get_node_selector(self): @@ -46,5 +50,5 @@ def get_node_selector(self): resource_types=[NodeType.Snapshot], ) - def get_runner_type(self, _): + def get_runner_type(self, _) -> Optional[Type[BaseRunner]]: return SnapshotRunner diff --git a/core/dbt/task/sql.py b/core/dbt/task/sql.py index 4f662383d74..ab8c89d6b98 100644 --- a/core/dbt/task/sql.py +++ b/core/dbt/task/sql.py @@ -1,43 +1,48 @@ +import traceback from abc import abstractmethod from datetime import datetime from typing import Generic, TypeVar -import traceback import dbt.exceptions +import dbt_common.exceptions.base +from dbt.contracts.graph.manifest import Manifest from dbt.contracts.sql import ( RemoteCompileResult, RemoteCompileResultMixin, RemoteRunResult, ResultTable, ) -from dbt.events.functions import fire_event from dbt.events.types import SQLRunnerException from dbt.task.compile import CompileRunner - +from dbt_common.events.functions import fire_event SQLResult = TypeVar("SQLResult", bound=RemoteCompileResultMixin) class GenericSqlRunner(CompileRunner, Generic[SQLResult]): - def __init__(self, config, adapter, node, node_index, num_nodes): + def __init__(self, config, adapter, node, node_index, num_nodes) -> None: CompileRunner.__init__(self, config, adapter, node, node_index, num_nodes) def handle_exception(self, e, ctx): - fire_event(SQLRunnerException(exc=str(e), exc_info=traceback.format_exc())) + fire_event( + SQLRunnerException( + exc=str(e), exc_info=traceback.format_exc(), node_info=self.node.node_info + ) + ) + # REVIEW: This code is invalid and will always throw. if isinstance(e, dbt.exceptions.Exception): - if isinstance(e, dbt.exceptions.DbtRuntimeError): + if isinstance(e, dbt_common.exceptions.DbtRuntimeError): e.add_node(ctx.node) return e - def before_execute(self): + def before_execute(self) -> None: pass - def after_execute(self, result): + def after_execute(self, result) -> None: pass - def compile(self, manifest): - compiler = self.adapter.get_compiler() - return compiler.compile_node(self.node, manifest, {}, write=False) + def compile(self, manifest: Manifest): + return self.compiler.compile_node(self.node, manifest, {}, write=False) @abstractmethod def execute(self, compiled_node, manifest) -> SQLResult: @@ -51,7 +56,9 @@ def error_result(self, node, error, start_time, timing_info): raise error def ephemeral_result(self, node, start_time, timing_info): - raise dbt.exceptions.NotImplementedError("cannot execute ephemeral nodes remotely!") + raise dbt_common.exceptions.base.NotImplementedError( + "cannot execute ephemeral nodes remotely!" + ) class SqlCompileRunner(GenericSqlRunner[RemoteCompileResult]): @@ -61,7 +68,6 @@ def execute(self, compiled_node, manifest) -> RemoteCompileResult: compiled_code=compiled_node.compiled_code, node=compiled_node, timing=[], # this will get added later - logs=[], generated_at=datetime.utcnow(), ) @@ -71,7 +77,6 @@ def from_run_result(self, result, start_time, timing_info) -> RemoteCompileResul compiled_code=result.compiled_code, node=result.node, timing=timing_info, - logs=[], generated_at=datetime.utcnow(), ) @@ -91,7 +96,6 @@ def execute(self, compiled_node, manifest) -> RemoteRunResult: node=compiled_node, table=table, timing=[], - logs=[], generated_at=datetime.utcnow(), ) @@ -102,6 +106,5 @@ def from_run_result(self, result, start_time, timing_info) -> RemoteRunResult: node=result.node, table=result.table, timing=timing_info, - logs=[], generated_at=datetime.utcnow(), ) diff --git a/core/dbt/task/test.py b/core/dbt/task/test.py index 0f5e4ca99d0..356328a4263 100644 --- a/core/dbt/task/test.py +++ b/core/dbt/task/test.py @@ -1,37 +1,62 @@ -from distutils.util import strtobool - -from dataclasses import dataclass -from dbt.utils import _coerce_decimal -from dbt.events.format import pluralize -from dbt.dataclass_schema import dbtClassMixin +import io +import json +import re import threading -from typing import Dict, Any +from dataclasses import dataclass +from typing import ( + TYPE_CHECKING, + Any, + Collection, + Dict, + List, + Optional, + Tuple, + Type, + Union, +) -from .compile import CompileRunner -from .run import RunTask +import daff +from dbt.adapters.exceptions import MissingMaterializationError +from dbt.artifacts.schemas.catalog import PrimitiveDict +from dbt.artifacts.schemas.results import TestStatus +from dbt.artifacts.schemas.run import RunResult +from dbt.clients.jinja import MacroGenerator +from dbt.context.providers import generate_runtime_model_context +from dbt.contracts.graph.manifest import Manifest from dbt.contracts.graph.nodes import ( + GenericTestNode, + SingularTestNode, TestNode, + UnitTestDefinition, + UnitTestNode, ) -from dbt.contracts.graph.manifest import Manifest -from dbt.contracts.results import TestStatus, PrimitiveDict, RunResult -from dbt.context.providers import generate_runtime_model_context -from dbt.clients.jinja import MacroGenerator -from dbt.events.functions import fire_event -from dbt.events.types import ( - LogTestResult, - LogStartLine, -) -from dbt.exceptions import ( - DbtInternalError, - BooleanError, - MissingMaterializationError, -) -from dbt.graph import ( - ResourceTypeSelector, -) -from dbt.node_types import NodeType +from dbt.events.types import LogStartLine, LogTestResult +from dbt.exceptions import BooleanError, DbtInternalError from dbt.flags import get_flags +from dbt.graph import ResourceTypeSelector +from dbt.node_types import TEST_NODE_TYPES, NodeType +from dbt.parser.unit_tests import UnitTestManifestLoader +from dbt.task.base import BaseRunner, resource_types_from_args +from dbt.utils import _coerce_decimal, strtobool +from dbt_common.dataclass_schema import dbtClassMixin +from dbt_common.events.format import pluralize +from dbt_common.events.functions import fire_event +from dbt_common.exceptions import DbtBaseException, DbtRuntimeError +from dbt_common.ui import green, red + +from .compile import CompileRunner +from .run import RunTask + +if TYPE_CHECKING: + import agate + + +@dataclass +class UnitTestDiff(dbtClassMixin): + actual: List[Dict[str, Any]] + expected: List[Dict[str, Any]] + rendered: str @dataclass @@ -59,17 +84,33 @@ def convert_bool_type(field) -> bool: return bool(field) +@dataclass +class UnitTestResultData(dbtClassMixin): + should_error: bool + adapter_response: Dict[str, Any] + diff: Optional[UnitTestDiff] = None + + class TestRunner(CompileRunner): - def describe_node(self): - node_name = self.node.name - return "test {}".format(node_name) + _ANSI_ESCAPE = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])") + _LOG_TEST_RESULT_EVENTS = LogTestResult + + def describe_node_name(self) -> str: + if self.node.resource_type == NodeType.Unit: + name = f"{self.node.model}::{self.node.versioned_name}" + return name + else: + return self.node.name + + def describe_node(self) -> str: + return f"{self.node.resource_type} {self.describe_node_name()}" def print_result_line(self, result): model = result.node fire_event( - LogTestResult( - name=model.name, + self._LOG_TEST_RESULT_EVENTS( + name=self.describe_node_name(), status=str(result.status), index=self.node_index, num_models=self.num_nodes, @@ -90,19 +131,21 @@ def print_start_line(self): ) ) - def before_execute(self): + def before_execute(self) -> None: self.print_start_line() - def execute_test(self, test: TestNode, manifest: Manifest) -> TestResultData: - context = generate_runtime_model_context(test, self.config, manifest) + def execute_data_test(self, data_test: TestNode, manifest: Manifest) -> TestResultData: + context = generate_runtime_model_context(data_test, self.config, manifest) + + hook_ctx = self.adapter.pre_model_hook(context["config"]) materialization_macro = manifest.find_materialization_macro_by_name( - self.config.project_name, test.get_materialization(), self.adapter.type() + self.config.project_name, data_test.get_materialization(), self.adapter.type() ) if materialization_macro is None: raise MissingMaterializationError( - materialization=test.get_materialization(), adapter_type=self.adapter.type() + materialization=data_test.get_materialization(), adapter_type=self.adapter.type() ) if "config" not in context: @@ -112,8 +155,12 @@ def execute_test(self, test: TestNode, manifest: Manifest) -> TestResultData: # generate materialization macro macro_func = MacroGenerator(materialization_macro, context) - # execute materialization macro - macro_func() + try: + # execute materialization macro + macro_func() + finally: + self.adapter.post_model_hook(context, hook_ctx) + # load results from context # could eventually be returned directly by materialization result = context["load_result"]("main") @@ -121,14 +168,14 @@ def execute_test(self, test: TestNode, manifest: Manifest) -> TestResultData: num_rows = len(table.rows) if num_rows != 1: raise DbtInternalError( - f"dbt internally failed to execute {test.unique_id}: " + f"dbt internally failed to execute {data_test.unique_id}: " f"Returned {num_rows} rows, but expected " f"1 row" ) num_cols = len(table.columns) if num_cols != 3: raise DbtInternalError( - f"dbt internally failed to execute {test.unique_id}: " + f"dbt internally failed to execute {data_test.unique_id}: " f"Returned {num_cols} columns, but expected " f"3 columns" ) @@ -143,9 +190,102 @@ def execute_test(self, test: TestNode, manifest: Manifest) -> TestResultData: TestResultData.validate(test_result_dct) return TestResultData.from_dict(test_result_dct) - def execute(self, test: TestNode, manifest: Manifest): - result = self.execute_test(test, manifest) + def build_unit_test_manifest_from_test( + self, unit_test_def: UnitTestDefinition, manifest: Manifest + ) -> Manifest: + # build a unit test manifest with only the test from this UnitTestDefinition + loader = UnitTestManifestLoader(manifest, self.config, {unit_test_def.unique_id}) + return loader.load() + + def execute_unit_test( + self, unit_test_def: UnitTestDefinition, manifest: Manifest + ) -> Tuple[UnitTestNode, UnitTestResultData]: + + unit_test_manifest = self.build_unit_test_manifest_from_test(unit_test_def, manifest) + + # The unit test node and definition have the same unique_id + unit_test_node = unit_test_manifest.nodes[unit_test_def.unique_id] + assert isinstance(unit_test_node, UnitTestNode) + + # Compile the node + unit_test_node = self.compiler.compile_node(unit_test_node, unit_test_manifest, {}) + assert isinstance(unit_test_node, UnitTestNode) + + # generate_runtime_unit_test_context not strictly needed - this is to run the 'unit' + # materialization, not compile the node.compiled_code + context = generate_runtime_model_context(unit_test_node, self.config, unit_test_manifest) + + hook_ctx = self.adapter.pre_model_hook(context["config"]) + + materialization_macro = unit_test_manifest.find_materialization_macro_by_name( + self.config.project_name, unit_test_node.get_materialization(), self.adapter.type() + ) + + if materialization_macro is None: + raise MissingMaterializationError( + materialization=unit_test_node.get_materialization(), + adapter_type=self.adapter.type(), + ) + + if "config" not in context: + raise DbtInternalError( + "Invalid materialization context generated, missing config: {}".format(context) + ) + + # generate materialization macro + macro_func = MacroGenerator(materialization_macro, context) + try: + # execute materialization macro + macro_func() + except DbtBaseException as e: + raise DbtRuntimeError( + f"An error occurred during execution of unit test '{unit_test_def.name}'. " + f"There may be an error in the unit test definition: check the data types.\n {e}" + ) + finally: + self.adapter.post_model_hook(context, hook_ctx) + + # load results from context + # could eventually be returned directly by materialization + result = context["load_result"]("main") + adapter_response = result["response"].to_dict(omit_none=True) + table = result["table"] + actual = self._get_unit_test_agate_table(table, "actual") + expected = self._get_unit_test_agate_table(table, "expected") + + # generate diff, if exists + should_error, diff = False, None + daff_diff = self._get_daff_diff(expected, actual) + if daff_diff.hasDifference(): + should_error = True + rendered = self._render_daff_diff(daff_diff) + rendered = f"\n\n{green('actual')} differs from {red('expected')}:\n\n{rendered}\n" + + diff = UnitTestDiff( + actual=json_rows_from_table(actual), + expected=json_rows_from_table(expected), + rendered=rendered, + ) + + unit_test_result_data = UnitTestResultData( + diff=diff, + should_error=should_error, + adapter_response=adapter_response, + ) + + return unit_test_node, unit_test_result_data + + def execute(self, test: Union[TestNode, UnitTestNode], manifest: Manifest): + if isinstance(test, UnitTestDefinition): + unit_test_node, unit_test_result = self.execute_unit_test(test, manifest) + return self.build_unit_test_run_result(unit_test_node, unit_test_result) + else: + # Note: manifest here is a normal manifest + assert isinstance(test, (SingularTestNode, GenericTestNode)) + test_result = self.execute_data_test(test, manifest) + return self.build_test_run_result(test, test_result) + def build_test_run_result(self, test: TestNode, result: TestResultData) -> RunResult: severity = test.config.severity.upper() thread_id = threading.current_thread().name num_errors = pluralize(result.failures, "result") @@ -157,7 +297,9 @@ def execute(self, test: TestNode, manifest: Manifest): message = f"Got {num_errors}, configured to fail if {test.config.error_if}" failures = result.failures elif result.should_warn: - if get_flags().WARN_ERROR: + if get_flags().WARN_ERROR or get_flags().WARN_ERROR_OPTIONS.includes( + self._LOG_TEST_RESULT_EVENTS.__name__ + ): status = TestStatus.Fail message = f"Got {num_errors}, configured to fail if {test.config.warn_if}" else: @@ -167,6 +309,31 @@ def execute(self, test: TestNode, manifest: Manifest): else: status = TestStatus.Pass + run_result = RunResult( + node=test, + status=status, + timing=[], + thread_id=thread_id, + execution_time=0, + message=message, + adapter_response=result.adapter_response, + failures=failures, + ) + return run_result + + def build_unit_test_run_result( + self, test: UnitTestNode, result: UnitTestResultData + ) -> RunResult: + thread_id = threading.current_thread().name + + status = TestStatus.Pass + message = None + failures = 0 + if result.should_error: + status = TestStatus.Fail + message = result.diff.rendered if result.diff else None + failures = 1 + return RunResult( node=test, status=status, @@ -178,18 +345,44 @@ def execute(self, test: TestNode, manifest: Manifest): failures=failures, ) - def after_execute(self, result): + def after_execute(self, result) -> None: self.print_result_line(result) - -class TestSelector(ResourceTypeSelector): - def __init__(self, graph, manifest, previous_state): - super().__init__( - graph=graph, - manifest=manifest, - previous_state=previous_state, - resource_types=[NodeType.Test], + def _get_unit_test_agate_table(self, result_table, actual_or_expected: str): + unit_test_table = result_table.where( + lambda row: row["actual_or_expected"] == actual_or_expected ) + columns = list(unit_test_table.columns.keys()) + columns.remove("actual_or_expected") + return unit_test_table.select(columns) + + def _get_daff_diff( + self, expected: "agate.Table", actual: "agate.Table", ordered: bool = False + ) -> daff.TableDiff: + # Sort expected and actual inputs prior to creating daff diff to ensure order insensitivity + # https://github.com/paulfitz/daff/issues/200 + expected_daff_table = daff.PythonTableView(list_rows_from_table(expected, sort=True)) + actual_daff_table = daff.PythonTableView(list_rows_from_table(actual, sort=True)) + + flags = daff.CompareFlags() + flags.ordered = ordered + + alignment = daff.Coopy.compareTables(expected_daff_table, actual_daff_table, flags).align() + result = daff.PythonTableView([]) + + diff = daff.TableDiff(alignment, flags) + diff.hilite(result) + return diff + + def _render_daff_diff(self, daff_diff: daff.TableDiff) -> str: + result = daff.PythonTableView([]) + daff_diff.hilite(result) + rendered = daff.TerminalDiffRender().render(result) + # strip colors if necessary + if not self.config.args.use_colors: + rendered = self._ANSI_ESCAPE.sub("", rendered) + + return rendered class TestTask(RunTask): @@ -201,17 +394,62 @@ class TestTask(RunTask): __test__ = False - def raise_on_first_error(self): + def raise_on_first_error(self) -> bool: return False - def get_node_selector(self) -> TestSelector: + @property + def resource_types(self) -> List[NodeType]: + resource_types: Collection[NodeType] = resource_types_from_args( + self.args, set(TEST_NODE_TYPES), set(TEST_NODE_TYPES) + ) + + # filter out any non-test node types + resource_types = [rt for rt in resource_types if rt in TEST_NODE_TYPES] + return list(resource_types) + + def get_node_selector(self) -> ResourceTypeSelector: if self.manifest is None or self.graph is None: raise DbtInternalError("manifest and graph must be set to get perform node selection") - return TestSelector( + return ResourceTypeSelector( graph=self.graph, manifest=self.manifest, previous_state=self.previous_state, + resource_types=self.resource_types, ) - def get_runner_type(self, _): + def get_runner_type(self, _) -> Optional[Type[BaseRunner]]: return TestRunner + + +# This was originally in agate_helper, but that was moved out into dbt_common +def json_rows_from_table(table: "agate.Table") -> List[Dict[str, Any]]: + "Convert a table to a list of row dict objects" + output = io.StringIO() + table.to_json(path=output) # type: ignore + + return json.loads(output.getvalue()) + + +# This was originally in agate_helper, but that was moved out into dbt_common +def list_rows_from_table(table: "agate.Table", sort: bool = False) -> List[Any]: + """ + Convert given table to a list of lists, where the first element represents the header + + By default, sort is False and no sort order is applied to the non-header rows of the given table. + + If sort is True, sort the non-header rows hierarchically, treating None values as lower in order. + Examples: + * [['a','b','c'],[4,5,6],[1,2,3]] -> [['a','b','c'],[1,2,3],[4,5,6]] + * [['a','b','c'],[4,5,6],[1,null,3]] -> [['a','b','c'],[1,null,3],[4,5,6]] + * [['a','b','c'],[4,5,6],[null,2,3]] -> [['a','b','c'],[4,5,6],[null,2,3]] + """ + header = [col.name for col in table.columns] + + rows = [] + for row in table.rows: + rows.append(list(row.values())) + + if sort: + rows = sorted(rows, key=lambda x: [(elem is None, elem) for elem in x]) + + return [header] + rows diff --git a/core/dbt/tests/fixtures/project.py b/core/dbt/tests/fixtures/project.py index 1b7ef899bd0..a12638b16a2 100644 --- a/core/dbt/tests/fixtures/project.py +++ b/core/dbt/tests/fixtures/project.py @@ -1,23 +1,35 @@ import os -import pytest # type: ignore import random from argparse import Namespace from datetime import datetime -import warnings +from pathlib import Path +from typing import Mapping + +import pytest # type: ignore import yaml -from dbt.exceptions import CompilationError, DbtDatabaseError import dbt.flags as flags +from dbt.adapters.factory import ( + get_adapter, + get_adapter_by_type, + register_adapter, + reset_adapters, +) from dbt.config.runtime import RuntimeConfig -from dbt.adapters.factory import get_adapter, register_adapter, reset_adapters, get_adapter_by_type -from dbt.events.functions import setup_event_logger, cleanup_event_logger +from dbt.context.providers import generate_runtime_macro_context +from dbt.events.logging import setup_event_logger +from dbt.mp_context import get_mp_context +from dbt.parser.manifest import ManifestLoader from dbt.tests.util import ( - write_file, - run_sql_with_adapter, TestProcessingException, get_connection, + run_sql_with_adapter, + write_file, ) - +from dbt_common.context import set_invocation_context +from dbt_common.events.event_manager_client import cleanup_event_logger +from dbt_common.exceptions import CompilationError, DbtDatabaseError +from dbt_common.tests import enable_test_caching # These are the fixtures that are used in dbt core functional tests # @@ -142,7 +154,6 @@ def profiles_config_update(): @pytest.fixture(scope="class") def dbt_profile_data(unique_schema, dbt_profile_target, profiles_config_update): profile = { - "config": {"send_anonymous_usage_stats": False}, "test": { "outputs": { "default": {}, @@ -181,6 +192,7 @@ def dbt_project_yml(project_root, project_config_update): project_config = { "name": "test", "profile": "test", + "flags": {"send_anonymous_usage_stats": False}, } if project_config_update: if isinstance(project_config_update, dict): @@ -265,7 +277,13 @@ def clean_up_logging(): # into the project in the tests instead of putting them in the fixtures. @pytest.fixture(scope="class") def adapter( - unique_schema, project_root, profiles_root, profiles_yml, dbt_project_yml, clean_up_logging + logs_dir, + unique_schema, + project_root, + profiles_root, + profiles_yml, + clean_up_logging, + dbt_project_yml, ): # The profiles.yml and dbt_project.yml should already be written out args = Namespace( @@ -277,11 +295,18 @@ def adapter( ) flags.set_from_args(args, {}) runtime_config = RuntimeConfig.from_args(args) - register_adapter(runtime_config) + register_adapter(runtime_config, get_mp_context()) adapter = get_adapter(runtime_config) # We only need the base macros, not macros from dependencies, and don't want # to run 'dbt deps' here. - adapter.load_macro_manifest(base_macros_only=True) + manifest = ManifestLoader.load_macros( + runtime_config, + adapter.connections.set_query_header, + base_macros_only=True, + ) + + adapter.set_macro_resolver(manifest) + adapter.set_macro_context_generator(generate_runtime_macro_context) yield adapter adapter.cleanup_connections() reset_adapters() @@ -316,6 +341,7 @@ def write_project_files_recursively(path, file_dict): # Provide a dictionary of file names to contents. Nested directories # are handle by nested dictionaries. + # models directory @pytest.fixture(scope="class") def models(): @@ -360,7 +386,20 @@ def analyses(): # Write out the files provided by models, macros, properties, snapshots, seeds, tests, analyses @pytest.fixture(scope="class") -def project_files(project_root, models, macros, snapshots, properties, seeds, tests, analyses): +def project_files( + project_root, + models, + macros, + snapshots, + properties, + seeds, + tests, + analyses, + selectors_yml, + dependencies_yml, + packages_yml, + dbt_project_yml, +): write_project_files(project_root, "models", {**models, **properties}) write_project_files(project_root, "macros", macros) write_project_files(project_root, "snapshots", snapshots) @@ -374,12 +413,12 @@ def project_files(project_root, models, macros, snapshots, properties, seeds, te def logs_dir(request, prefix): dbt_log_dir = os.path.join(request.config.rootdir, "logs", prefix) os.environ["DBT_LOG_PATH"] = str(dbt_log_dir) - yield dbt_log_dir + yield str(Path(dbt_log_dir)) del os.environ["DBT_LOG_PATH"] # This fixture is for customizing tests that need overrides in adapter -# repos. Example in dbt.tests.adapter.basic.test_base. +# repos. Example in tests.functional.adapter.basic.test_base. @pytest.fixture(scope="class") def test_config(): return {} @@ -442,6 +481,14 @@ def create_test_schema(self, schema_name=None): # Drop the unique test schema, usually called in test cleanup def drop_test_schema(self): + if self.adapter.get_macro_resolver() is None: + manifest = ManifestLoader.load_macros( + self.adapter.config, + self.adapter.connections.set_query_header, + base_macros_only=True, + ) + self.adapter.set_macro_resolver(manifest) + with get_connection(self.adapter): for schema_name in self.created_schemas: relation = self.adapter.Relation.create(database=self.database, schema=schema_name) @@ -465,32 +512,38 @@ def get_tables_in_schema(self): return {model_name: materialization for (model_name, materialization) in result} -# This is the main fixture that is used in all functional tests. It pulls in the other -# fixtures that are necessary to set up a dbt project, and saves some of the information -# in a TestProjInfo class, which it returns, so that individual test cases do not have -# to pull in the other fixtures individually to access their information. @pytest.fixture(scope="class") -def project( +def environment() -> Mapping[str, str]: + # By default, fixture initialization is done with the following environment + # from the os, but this fixture provides a way to customize the environment. + return os.environ + + +# Housekeeping that needs to be done before we start setting up any test fixtures. +@pytest.fixture(scope="class") +def initialization(environment) -> None: + # Create an "invocation context," which dbt application code relies on. + set_invocation_context(environment) + + # Enable caches used between test runs, for better testing performance. + enable_test_caching() + + +@pytest.fixture(scope="class") +def project_setup( + initialization, clean_up_logging, project_root, profiles_root, request, unique_schema, profiles_yml, - dbt_project_yml, - packages_yml, - dependencies_yml, - selectors_yml, adapter, - project_files, shared_data_dir, test_data_dir, logs_dir, test_config, ): - # Logbook warnings are ignored so we don't have to fork logbook to support python 3.10. - # This _only_ works for tests in `tests/` that use the project fixture. - warnings.filterwarnings("ignore", category=DeprecationWarning, module="logbook") log_flags = Namespace( LOG_PATH=logs_dir, LOG_FORMAT="json", @@ -539,3 +592,16 @@ def project( pass os.chdir(orig_cwd) cleanup_event_logger() + + +# This is the main fixture that is used in all functional tests. It pulls in the other +# fixtures that are necessary to set up a dbt project, and saves some of the information +# in a TestProjInfo class, which it returns, so that individual test cases do not have +# to pull in the other fixtures individually to access their information. +# The order of arguments here determine which steps runs first. +@pytest.fixture(scope="class") +def project( + project_setup: TestProjInfo, + project_files, +): + return project_setup diff --git a/core/dbt/tests/util.py b/core/dbt/tests/util.py index 1bd41d4bfb9..19eeecd470c 100644 --- a/core/dbt/tests/util.py +++ b/core/dbt/tests/util.py @@ -1,26 +1,30 @@ -from io import StringIO +import json import os import shutil -import yaml -import json -import warnings -from datetime import datetime -from typing import Dict, List, Optional from contextlib import contextmanager -from dbt.adapters.factory import Adapter +from contextvars import ContextVar, copy_context +from datetime import datetime +from io import StringIO +from typing import Any, Dict, List, Optional +from unittest import mock +import pytz +import yaml + +from dbt.adapters.base.relation import BaseRelation +from dbt.adapters.factory import Adapter from dbt.cli.main import dbtRunner -from dbt.logger import log_manager from dbt.contracts.graph.manifest import Manifest -from dbt.events.functions import ( - fire_event, +from dbt.materializations.incremental.microbatch import MicrobatchBuilder +from dbt_common.context import _INVOCATION_CONTEXT_VAR, InvocationContext +from dbt_common.events.base_types import EventLevel +from dbt_common.events.functions import ( capture_stdout_logs, - stop_capture_stdout_logs, + fire_event, reset_metadata_vars, + stop_capture_stdout_logs, ) -from dbt.events.base_types import EventLevel -from dbt.events.types import Note -from dbt.adapters.base.relation import BaseRelation +from dbt_common.events.types import Note # ============================================================================= # Test utilities @@ -68,20 +72,14 @@ # The first parameter is a list of dbt command line arguments, such as # run_dbt(["run", "--vars", "seed_name: base"]) # If the command is expected to fail, pass in "expect_pass=False"): -# run_dbt("test"], expect_pass=False) +# run_dbt(["test"], expect_pass=False) def run_dbt( args: Optional[List[str]] = None, expect_pass: bool = True, ): - # Ignore logbook warnings - warnings.filterwarnings("ignore", category=DeprecationWarning, module="logbook") - # reset global vars reset_metadata_vars() - # The logger will complain about already being initialized if - # we don't do this. - log_manager.reset_handlers() if args is None: args = ["run"] @@ -95,8 +93,8 @@ def run_dbt( args.extend(["--project-dir", project_dir]) if profiles_dir and "--profiles-dir" not in args: args.extend(["--profiles-dir", profiles_dir]) - dbt = dbtRunner() + res = dbt.invoke(args) # the exception is immediately raised to be caught in tests @@ -149,17 +147,27 @@ def get_logging_events(log_output, event_name): # Used in test cases to get the manifest from the partial parsing file # Note: this uses an internal version of the manifest, and in the future # parts of it will not be supported for external use. -def get_manifest(project_root): +def get_manifest(project_root) -> Optional[Manifest]: path = os.path.join(project_root, "target", "partial_parse.msgpack") if os.path.exists(path): with open(path, "rb") as fp: manifest_mp = fp.read() - manifest: Manifest = Manifest.from_msgpack(manifest_mp) + manifest: Manifest = Manifest.from_msgpack(manifest_mp) # type: ignore[attr-defined] return manifest else: return None +# Used in test cases to get the run_results.json file. +def get_run_results(project_root) -> Any: + path = os.path.join(project_root, "target", "run_results.json") + if os.path.exists(path): + with open(path) as run_result_text: + return json.load(run_result_text) + else: + return None + + # Used in tests to copy a file, usually from a data directory to the project directory def copy_file(src_path, src, dest_path, dest) -> None: # dest is a list, so that we can provide nested directories, like 'models' etc. @@ -214,6 +222,10 @@ def rm_dir(directory_path): raise FileNotFoundError(f"{directory_path} does not exist.") +def rename_dir(src_directory_path, dest_directory_path): + os.rename(src_directory_path, dest_directory_path) + + # Get an artifact (usually from the target directory) such as # manifest.json or catalog.json to use in a test def get_artifact(*paths): @@ -283,6 +295,7 @@ class TestProcessingException(Exception): # Testing utilities that use adapter code + # Uses: # adapter.config.credentials # adapter.quote @@ -617,3 +630,21 @@ def get_model_file(project, relation: BaseRelation) -> str: def set_model_file(project, relation: BaseRelation, model_sql: str): write_file(model_sql, project.project_root, "models", f"{relation.name}.sql") + + +def safe_set_invocation_context(): + """In order to deal with a problem with the way the pytest runner interacts + with ContextVars, this function provides a mechanism for setting the + invocation context reliably, using its name rather than the reference + variable, which may have been loaded in a separate context.""" + invocation_var: Optional[ContextVar] = next( + iter([cv for cv in copy_context() if cv.name == _INVOCATION_CONTEXT_VAR.name]), None + ) + if invocation_var is None: + invocation_var = _INVOCATION_CONTEXT_VAR + invocation_var.set(InvocationContext(os.environ)) + + +def patch_microbatch_end_time(dt_str: str): + dt = datetime.strptime(dt_str, "%Y-%m-%d %H:%M:%S").replace(tzinfo=pytz.UTC) + return mock.patch.object(MicrobatchBuilder, "build_end_time", return_value=dt) diff --git a/core/dbt/tracking.py b/core/dbt/tracking.py index f306d9f0b00..7d648d86e03 100644 --- a/core/dbt/tracking.py +++ b/core/dbt/tracking.py @@ -6,15 +6,17 @@ from datetime import datetime from typing import Optional -import logbook import pytz import requests +from packaging.version import Version from snowplow_tracker import Emitter, SelfDescribingJson, Subject, Tracker +from snowplow_tracker import __version__ as snowplow_version # type: ignore from snowplow_tracker import logger as sp_logger +from snowplow_tracker.events import StructuredEvent from dbt import version as dbt_version +from dbt.adapters.exceptions import FailedToConnectError from dbt.clients.yaml_helper import safe_load, yaml # noqa:F401 -from dbt.events.functions import fire_event, get_invocation_id from dbt.events.types import ( DisableTracking, FlushEvents, @@ -24,7 +26,9 @@ SendingEvent, TrackingInitializeFailure, ) -from dbt.exceptions import FailedToConnectError, NotImplementedError +from dbt_common.events.base_types import EventMsg +from dbt_common.events.functions import fire_event, get_invocation_id, msg_to_dict +from dbt_common.exceptions import NotImplementedError sp_logger.setLevel(100) @@ -34,6 +38,7 @@ ADAPTER_INFO_SPEC = "iglu:com.dbt/adapter_info/jsonschema/1-0-1" DEPRECATION_WARN_SPEC = "iglu:com.dbt/deprecation_warn/jsonschema/1-0-0" +BEHAVIOR_CHANGE_WARN_SPEC = "iglu:com.dbt/behavior_change_warn/jsonschema/1-0-0" EXPERIMENTAL_PARSER = "iglu:com.dbt/experimental_parser/jsonschema/1-0-0" INVOCATION_ENV_SPEC = "iglu:com.dbt/invocation_env/jsonschema/1-0-0" INVOCATION_SPEC = "iglu:com.dbt/invocation/jsonschema/1-0-2" @@ -46,18 +51,27 @@ RPC_REQUEST_SPEC = "iglu:com.dbt/rpc_request/jsonschema/1-0-1" RUNNABLE_TIMING = "iglu:com.dbt/runnable/jsonschema/1-0-0" RUN_MODEL_SPEC = "iglu:com.dbt/run_model/jsonschema/1-0-3" +PLUGIN_GET_NODES = "iglu:com.dbt/plugin_get_nodes/jsonschema/1-0-0" + +SNOWPLOW_TRACKER_VERSION = Version(snowplow_version) + +# workaround in case real snowplow tracker is in the env +# the argument was renamed in https://github.com/snowplow/snowplow-python-tracker/commit/39fd50a3aff98a5efdd5c5c7fb5518fe4761305b +INIT_KW_ARGS = ( + {"buffer_size": 30} if SNOWPLOW_TRACKER_VERSION < Version("0.13.0") else {"batch_size": 30} +) class TimeoutEmitter(Emitter): - def __init__(self): + def __init__(self) -> None: super().__init__( COLLECTOR_URL, protocol=COLLECTOR_PROTOCOL, - buffer_size=30, on_failure=self.handle_failure, method="post", # don't set this. byte_limit=None, + **INIT_KW_ARGS, ) @staticmethod @@ -102,14 +116,14 @@ def http_get(self, payload): emitter = TimeoutEmitter() tracker = Tracker( - emitter, + emitters=emitter, namespace="cf", app_id="dbt", ) class User: - def __init__(self, cookie_dir): + def __init__(self, cookie_dir) -> None: self.do_not_track = True self.cookie_dir = cookie_dir @@ -204,12 +218,12 @@ def get_dbt_env_context(): def track(user, *args, **kwargs): if user.do_not_track: return - else: - fire_event(SendingEvent(kwargs=str(kwargs))) - try: - tracker.track_struct_event(*args, **kwargs) - except Exception: - fire_event(SendEventFailure()) + + fire_event(SendingEvent(kwargs=str(kwargs))) + try: + tracker.track(StructuredEvent(*args, **kwargs)) + except Exception: + fire_event(SendEventFailure()) def track_project_id(options): @@ -353,6 +367,20 @@ def track_deprecation_warn(options): ) +def track_behavior_change_warn(msg: EventMsg) -> None: + if msg.info.name != "BehaviorChangeEvent" or active_user is None: + return + + context = [SelfDescribingJson(BEHAVIOR_CHANGE_WARN_SPEC, msg_to_dict(msg))] + track( + active_user, + category="dbt", + action=msg.info.name, + label=get_invocation_id(), + context=context, + ) + + def track_invocation_end(invocation_context, result_type=None): data = {"progress": "end", "result_type": result_type, "result": None} data.update(invocation_context) @@ -409,6 +437,19 @@ def track_partial_parser(options): ) +def track_plugin_get_nodes(options): + context = [SelfDescribingJson(PLUGIN_GET_NODES, options)] + assert active_user is not None, "Cannot track plugin node info when active user is None" + + track( + active_user, + category="dbt", + action="plugin_get_nodes", + label=get_invocation_id(), + context=context, + ) + + def track_runnable_timing(options): context = [SelfDescribingJson(RUNNABLE_TIMING, options)] assert active_user is not None, "Cannot track runnable info when active user is None" @@ -442,22 +483,7 @@ def do_not_track(): active_user = User(None) -class InvocationProcessor(logbook.Processor): - def __init__(self): - super().__init__() - - def process(self, record): - if active_user is not None: - record.extra.update( - { - "run_started_at": active_user.run_started_at.isoformat(), - "invocation_id": get_invocation_id(), - } - ) - - def initialize_from_flags(send_anonymous_usage_stats, profiles_dir): - # Setting these used to be in UserConfig, but had to be moved here global active_user if send_anonymous_usage_stats: active_user = User(profiles_dir) diff --git a/core/dbt/ui.py b/core/dbt/ui.py deleted file mode 100644 index ef9089c857a..00000000000 --- a/core/dbt/ui.py +++ /dev/null @@ -1,69 +0,0 @@ -import textwrap -from typing import Dict - -import colorama - -from dbt.flags import get_flags - -COLORS: Dict[str, str] = { - "red": colorama.Fore.RED, - "green": colorama.Fore.GREEN, - "yellow": colorama.Fore.YELLOW, - "reset_all": colorama.Style.RESET_ALL, -} - - -COLOR_FG_RED = COLORS["red"] -COLOR_FG_GREEN = COLORS["green"] -COLOR_FG_YELLOW = COLORS["yellow"] -COLOR_RESET_ALL = COLORS["reset_all"] - - -def color(text: str, color_code: str) -> str: - if get_flags().USE_COLORS: - return "{}{}{}".format(color_code, text, COLOR_RESET_ALL) - else: - return text - - -def printer_width() -> int: - flags = get_flags() - if flags.PRINTER_WIDTH: - return flags.PRINTER_WIDTH - return 80 - - -def green(text: str) -> str: - return color(text, COLOR_FG_GREEN) - - -def yellow(text: str) -> str: - return color(text, COLOR_FG_YELLOW) - - -def red(text: str) -> str: - return color(text, COLOR_FG_RED) - - -def line_wrap_message(msg: str, subtract: int = 0, dedent: bool = True, prefix: str = "") -> str: - """ - Line wrap the given message to PRINTER_WIDTH - {subtract}. Convert double - newlines to newlines and avoid calling textwrap.fill() on them (like - markdown) - """ - width = printer_width() - subtract - if dedent: - msg = textwrap.dedent(msg) - - if prefix: - msg = f"{prefix}{msg}" - - # If the input had an explicit double newline, we want to preserve that - # (we'll turn it into a single line soon). Support windows, too. - splitter = "\r\n\r\n" if "\r\n\r\n" in msg else "\n\n" - chunks = msg.split(splitter) - return "\n".join(textwrap.fill(chunk, width=width, break_on_hyphens=False) for chunk in chunks) - - -def warning_tag(msg: str) -> str: - return f'[{yellow("WARNING")}]: {msg}' diff --git a/core/dbt/utils.py b/core/dbt/utils.py index 72fc4fcfdc6..05416d43344 100644 --- a/core/dbt/utils.py +++ b/core/dbt/utils.py @@ -1,46 +1,35 @@ import collections -import concurrent.futures -import copy import datetime import decimal import functools -import hashlib import itertools -import jinja2 import json import os -import requests import sys -from tarfile import ReadError -import time -from pathlib import PosixPath, WindowsPath - -from contextlib import contextmanager -from dbt.events.types import RetryExternalCall, RecordRetryException -from dbt.helper_types import WarnErrorOptions -from dbt import flags from enum import Enum -from typing_extensions import Protocol +from pathlib import PosixPath, WindowsPath from typing import ( - Tuple, - Type, + AbstractSet, Any, - Optional, - TypeVar, Dict, - Union, - Callable, - List, + Iterable, Iterator, + List, Mapping, - Iterable, - AbstractSet, - Set, + Optional, Sequence, + Set, + Tuple, + Type, ) -import dbt.events.functions -import dbt.exceptions +import jinja2 + +from dbt import flags +from dbt.exceptions import DuplicateAliasError +from dbt_common.exceptions import RecursionError +from dbt_common.helper_types import WarnErrorOptions +from dbt_common.utils import md5 DECIMALS: Tuple[Type[Any], ...] try: @@ -87,158 +76,10 @@ def get_model_name_or_none(model): return name -MACRO_PREFIX = "dbt_macro__" -DOCS_PREFIX = "dbt_docs__" - - -def get_dbt_macro_name(name): - if name is None: - raise dbt.exceptions.DbtInternalError("Got None for a macro name!") - return f"{MACRO_PREFIX}{name}" - - -def get_dbt_docs_name(name): - if name is None: - raise dbt.exceptions.DbtInternalError("Got None for a doc name!") - return f"{DOCS_PREFIX}{name}" - - -def get_materialization_macro_name(materialization_name, adapter_type=None, with_prefix=True): - if adapter_type is None: - adapter_type = "default" - name = f"materialization_{materialization_name}_{adapter_type}" - return get_dbt_macro_name(name) if with_prefix else name - - -def get_docs_macro_name(docs_name, with_prefix=True): - return get_dbt_docs_name(docs_name) if with_prefix else docs_name - - -def get_test_macro_name(test_name, with_prefix=True): - name = f"test_{test_name}" - return get_dbt_macro_name(name) if with_prefix else name - - def split_path(path): return path.split(os.sep) -def merge(*args): - if len(args) == 0: - return None - - if len(args) == 1: - return args[0] - - lst = list(args) - last = lst.pop(len(lst) - 1) - - return _merge(merge(*lst), last) - - -def _merge(a, b): - to_return = a.copy() - to_return.update(b) - return to_return - - -# http://stackoverflow.com/questions/20656135/python-deep-merge-dictionary-data -def deep_merge(*args): - """ - >>> dbt.utils.deep_merge({'a': 1, 'b': 2, 'c': 3}, {'a': 2}, {'a': 3, 'b': 1}) # noqa - {'a': 3, 'b': 1, 'c': 3} - """ - if len(args) == 0: - return None - - if len(args) == 1: - return copy.deepcopy(args[0]) - - lst = list(args) - last = copy.deepcopy(lst.pop(len(lst) - 1)) - - return _deep_merge(deep_merge(*lst), last) - - -def _deep_merge(destination, source): - if isinstance(source, dict): - for key, value in source.items(): - deep_merge_item(destination, key, value) - return destination - - -def deep_merge_item(destination, key, value): - if isinstance(value, dict): - node = destination.setdefault(key, {}) - destination[key] = deep_merge(node, value) - elif isinstance(value, tuple) or isinstance(value, list): - if key in destination: - destination[key] = list(value) + list(destination[key]) - else: - destination[key] = value - else: - destination[key] = value - - -def _deep_map_render( - func: Callable[[Any, Tuple[Union[str, int], ...]], Any], - value: Any, - keypath: Tuple[Union[str, int], ...], -) -> Any: - atomic_types: Tuple[Type[Any], ...] = (int, float, str, type(None), bool, datetime.date) - - ret: Any - - if isinstance(value, list): - ret = [_deep_map_render(func, v, (keypath + (idx,))) for idx, v in enumerate(value)] - elif isinstance(value, dict): - ret = {k: _deep_map_render(func, v, (keypath + (str(k),))) for k, v in value.items()} - elif isinstance(value, atomic_types): - ret = func(value, keypath) - else: - container_types: Tuple[Type[Any], ...] = (list, dict) - ok_types = container_types + atomic_types - raise dbt.exceptions.DbtConfigError( - "in _deep_map_render, expected one of {!r}, got {!r}".format(ok_types, type(value)) - ) - - return ret - - -def deep_map_render(func: Callable[[Any, Tuple[Union[str, int], ...]], Any], value: Any) -> Any: - """This function renders a nested dictionary derived from a yaml - file. It is used to render dbt_project.yml, profiles.yml, and - schema files. - - It maps the function func() onto each non-container value in 'value' - recursively, returning a new value. As long as func does not manipulate - the value, then deep_map_render will also not manipulate it. - - value should be a value returned by `yaml.safe_load` or `json.load` - the - only expected types are list, dict, native python number, str, NoneType, - and bool. - - func() will be called on numbers, strings, Nones, and booleans. Its first - parameter will be the value, and the second will be its keypath, an - iterable over the __getitem__ keys needed to get to it. - - :raises: If there are cycles in the value, raises a - dbt.exceptions.RecursionException - """ - try: - return _deep_map_render(func, value, ()) - except RuntimeError as exc: - if "maximum recursion depth exceeded" in str(exc): - raise dbt.exceptions.RecursionError("Cycle detected in deep_map_render") - raise - - -class AttrDict(dict): - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self.__dict__ = self - - def get_pseudo_test_path(node_name, source_path): "schema tests all come from schema.yml files. fake a source sql file" source_path_parts = split_path(source_path) @@ -253,13 +94,6 @@ def get_pseudo_hook_path(hook_name): return os.path.join(*path_parts) -def md5(string, charset="utf-8"): - if sys.version_info >= (3, 9): - return hashlib.md5(string.encode(charset), usedforsecurity=False).hexdigest() - else: - return hashlib.md5(string.encode(charset)).hexdigest() - - def get_hash(model): return md5(model.unique_id) @@ -279,9 +113,9 @@ class memoized: Taken from https://wiki.python.org/moin/PythonDecoratorLibrary#Memoize""" - def __init__(self, func): + def __init__(self, func) -> None: self.func = func - self.cache = {} + self.cache: Dict[Any, Any] = {} def __call__(self, *args): if not isinstance(args, collections.abc.Hashable): @@ -303,14 +137,6 @@ def __get__(self, obj, objtype): return functools.partial(self.__call__, obj) -K_T = TypeVar("K_T") -V_T = TypeVar("V_T") - - -def filter_null_values(input: Dict[K_T, Optional[V_T]]) -> Dict[K_T, V_T]: - return {k: v for k, v in input.items() if v is not None} - - def add_ephemeral_model_prefix(s: str) -> str: return "__dbt__cte__{}".format(s) @@ -351,18 +177,8 @@ def default(self, obj): return super().default(obj) -class ForgivingJSONEncoder(JSONEncoder): - def default(self, obj): - # let dbt's default JSON encoder handle it if possible, fallback to - # str() - try: - return super().default(obj) - except TypeError: - return str(obj) - - class Translator: - def __init__(self, aliases: Mapping[str, str], recursive: bool = False): + def __init__(self, aliases: Mapping[str, str], recursive: bool = False) -> None: self.aliases = aliases self.recursive = recursive @@ -372,7 +188,7 @@ def translate_mapping(self, kwargs: Mapping[str, Any]) -> Dict[str, Any]: for key, value in kwargs.items(): canonical_key = self.aliases.get(key, key) if canonical_key in result: - raise dbt.exceptions.DuplicateAliasError(kwargs, self.aliases, canonical_key) + raise DuplicateAliasError(kwargs, self.aliases, canonical_key) result[canonical_key] = self.translate_value(value) return result @@ -416,6 +232,7 @@ def translate_aliases( # Note that this only affects hologram json validation. # It has no effect on mashumaro serialization. +# Q: Can this be removed? def restrict_to(*restrictions): """Create the metadata for a restricted dataclass field""" return {"restrict": list(restrictions)} @@ -438,90 +255,6 @@ def _coerce_decimal(value): return value -def lowercase(value: Optional[str]) -> Optional[str]: - if value is None: - return None - else: - return value.lower() - - -# some types need to make constants available to the jinja context as -# attributes, and regular properties only work with objects. maybe this should -# be handled by the RelationProxy? - - -class classproperty(object): - def __init__(self, func): - self.func = func - - def __get__(self, obj, objtype): - return self.func(objtype) - - -class ConnectingExecutor(concurrent.futures.Executor): - def submit_connected(self, adapter, conn_name, func, *args, **kwargs): - def connected(conn_name, func, *args, **kwargs): - with self.connection_named(adapter, conn_name): - return func(*args, **kwargs) - - return self.submit(connected, conn_name, func, *args, **kwargs) - - -# a little concurrent.futures.Executor for single-threaded mode -class SingleThreadedExecutor(ConnectingExecutor): - def submit(*args, **kwargs): - # this basic pattern comes from concurrent.futures.Executor itself, - # but without handling the `fn=` form. - if len(args) >= 2: - self, fn, *args = args - elif not args: - raise TypeError( - "descriptor 'submit' of 'SingleThreadedExecutor' object needs an argument" - ) - else: - raise TypeError( - "submit expected at least 1 positional argument, got %d" % (len(args) - 1) - ) - fut = concurrent.futures.Future() - try: - result = fn(*args, **kwargs) - except Exception as exc: - fut.set_exception(exc) - else: - fut.set_result(result) - return fut - - @contextmanager - def connection_named(self, adapter, name): - yield - - -class MultiThreadedExecutor( - ConnectingExecutor, - concurrent.futures.ThreadPoolExecutor, -): - @contextmanager - def connection_named(self, adapter, name): - with adapter.connection_named(name): - yield - - -class ThreadedArgs(Protocol): - single_threaded: bool - - -class HasThreadingConfig(Protocol): - args: ThreadedArgs - threads: Optional[int] - - -def executor(config: HasThreadingConfig) -> ConnectingExecutor: - if config.args.single_threaded: - return SingleThreadedExecutor() - else: - return MultiThreadedExecutor(max_workers=config.threads) - - def fqn_search(root: Dict[str, Any], fqn: List[str]) -> Iterator[Dict[str, Any]]: """Iterate into a nested dictionary, looking for keys in the fqn as levels. Yield the level config. @@ -590,31 +323,6 @@ def __contains__(self, name) -> bool: return any((name in entry for entry in self._itersource())) -def _connection_exception_retry(fn, max_attempts: int, attempt: int = 0): - """Attempts to run a function that makes an external call, if the call fails - on a Requests exception or decompression issue (ReadError), it will be tried - up to 5 more times. All exceptions that Requests explicitly raises inherit from - requests.exceptions.RequestException. See https://github.com/dbt-labs/dbt-core/issues/4579 - for context on this decompression issues specifically. - """ - try: - return fn() - except ( - requests.exceptions.RequestException, - ReadError, - EOFError, - ) as exc: - if attempt <= max_attempts - 1: - dbt.events.functions.fire_event(RecordRetryException(exc=str(exc))) - dbt.events.functions.fire_event(RetryExternalCall(attempt=attempt, max=max_attempts)) - time.sleep(1) - return _connection_exception_retry(fn, max_attempts, attempt + 1) - else: - raise dbt.exceptions.ConnectionError( - "External connection exception occurred: " + str(exc) - ) - - # This is used to serialize the args in the run_results and in the logs. # We do this separately because there are a few fields that don't serialize, # i.e. PosixPath, WindowsPath, and types. It also includes args from both @@ -624,7 +332,7 @@ def _connection_exception_retry(fn, max_attempts: int, attempt: int = 0): def args_to_dict(args): var_args = vars(args).copy() # update the args with the flags, which could also come from environment - # variables or user_config + # variables or project_flags flag_dict = flags.get_flag_dict() var_args.update(flag_dict) dict_args = {} @@ -663,25 +371,38 @@ def args_to_dict(args): return dict_args -# This is useful for proto generated classes in particular, since -# the default for protobuf for strings is the empty string, so -# Optional[str] types don't work for generated Python classes. -def cast_to_str(string: Optional[str]) -> str: - if string is None: - return "" - else: - return string - +# Taken from https://github.com/python/cpython/blob/3.11/Lib/distutils/util.py +# This is a copy of the function from distutils.util, which was removed in Python 3.12. +def strtobool(val: str) -> bool: + """Convert a string representation of truth to True or False. -def cast_to_int(integer: Optional[int]) -> int: - if integer is None: - return 0 + True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values + are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if + 'val' is anything else. + """ + val = val.lower() + if val in ("y", "yes", "t", "true", "on", "1"): + return True + elif val in ("n", "no", "f", "false", "off", "0"): + return False else: - return integer + raise ValueError("invalid truth value %r" % (val,)) -def cast_dict_to_dict_of_strings(dct): - new_dct = {} - for k, v in dct.items(): - new_dct[str(k)] = str(v) - return new_dct +def try_get_max_rss_kb() -> Optional[int]: + """Attempts to get the high water mark for this process's memory use via + the most reliable and accurate mechanism available through the host OS. + Currently only implemented for Linux.""" + if sys.platform == "linux" and os.path.isfile("/proc/self/status"): + try: + # On Linux, the most reliable documented mechanism for getting the RSS + # high-water-mark comes from the line confusingly labeled VmHWM in the + # /proc/self/status virtual file. + with open("/proc/self/status") as f: + for line in f: + if line.startswith("VmHWM:"): + return int(str.split(line)[1]) + except Exception: + pass + + return None diff --git a/core/dbt/version.py b/core/dbt/version.py index cfbe01f65fd..475e98e9d56 100644 --- a/core/dbt/version.py +++ b/core/dbt/version.py @@ -1,16 +1,14 @@ +import glob import importlib import importlib.util -import os -import glob import json +import os from typing import Iterator, List, Optional, Tuple import requests -import dbt.exceptions -import dbt.semver - -from dbt.ui import green, red, yellow +import dbt_common.semver as semver +from dbt_common.ui import green, red, yellow PYPI_VERSION_URL = "https://pypi.org/pypi/dbt-core/json" @@ -34,13 +32,13 @@ def get_version_information() -> str: return "\n\n".join(msg_lines) -def get_installed_version() -> dbt.semver.VersionSpecifier: - return dbt.semver.VersionSpecifier.from_version_string(__version__) +def get_installed_version() -> semver.VersionSpecifier: + return semver.VersionSpecifier.from_version_string(__version__) def get_latest_version( version_url: str = PYPI_VERSION_URL, -) -> Optional[dbt.semver.VersionSpecifier]: +) -> Optional[semver.VersionSpecifier]: try: resp = requests.get(version_url, timeout=1) data = resp.json() @@ -48,10 +46,13 @@ def get_latest_version( except (json.JSONDecodeError, KeyError, requests.RequestException): return None - return dbt.semver.VersionSpecifier.from_version_string(version_string) + return semver.VersionSpecifier.from_version_string(version_string) -def _get_core_msg_lines(installed, latest) -> Tuple[List[List[str]], str]: +def _get_core_msg_lines( + installed: semver.VersionSpecifier, + latest: Optional[semver.VersionSpecifier], +) -> Tuple[List[List[str]], str]: installed_s = installed.to_version_string(skip_matcher=True) installed_line = ["installed", installed_s, ""] update_info = "" @@ -96,7 +97,7 @@ def _format_core_msg(lines: List[List[str]]) -> str: return msg + "\n".join(msg_lines) -def _get_plugins_msg(installed: dbt.semver.VersionSpecifier) -> str: +def _get_plugins_msg(installed: semver.VersionSpecifier) -> str: msg_lines = ["Plugins:"] plugins = [] @@ -122,9 +123,9 @@ def _get_plugins_msg(installed: dbt.semver.VersionSpecifier) -> str: def _get_plugin_msg_info( - name: str, version_s: str, core: dbt.semver.VersionSpecifier + name: str, version_s: str, core: semver.VersionSpecifier ) -> Tuple[str, bool]: - plugin = dbt.semver.VersionSpecifier.from_version_string(version_s) + plugin = semver.VersionSpecifier.from_version_string(version_s) latest_plugin = get_latest_version(version_url=get_package_pypi_url(name)) needs_update = False @@ -169,14 +170,12 @@ def _pad_lines(lines: List[List[str]], seperator: str = "") -> List[List[str]]: result: List[List[str]] = [] for i, line in enumerate(lines): - # add another list to hold padded strings if len(result) == i: result.append([""] * len(line)) # iterate over columns in the line for j, item in enumerate(line): - # the last column does not need padding if j == len(line) - 1: result[i][j] = item @@ -212,7 +211,7 @@ def _get_dbt_plugins_info() -> Iterator[Tuple[str, str]]: except ImportError: # not an adapter continue - yield plugin_name, mod.version # type: ignore + yield plugin_name, mod.version def _get_adapter_plugin_names() -> Iterator[str]: @@ -232,5 +231,5 @@ def _get_adapter_plugin_names() -> Iterator[str]: yield plugin_name -__version__ = "1.7.0a1" +__version__ = "1.9.0a1" installed = get_installed_version() diff --git a/core/setup.py b/core/setup.py index e198bc0aed2..354326e8896 100644 --- a/core/setup.py +++ b/core/setup.py @@ -25,7 +25,7 @@ package_name = "dbt-core" -package_version = "1.7.0a1" +package_version = "1.9.0a1" description = """With dbt, data analysts and engineers can build analytics \ the way engineers build applications.""" @@ -49,46 +49,37 @@ # ---- # dbt-core uses these packages deeply, throughout the codebase, and there have been breaking changes in past patch releases (even though these are major-version-one). # Pin to the patch or minor version, and bump in each new minor version of dbt-core. - "agate~=1.7.0", - "Jinja2~=3.1.2", - "mashumaro[msgpack]~=3.8.1", - # ---- - # Legacy: This package has not been updated since 2019, and it is unused in dbt's logging system (since v1.0) - # The dependency here will be removed along with the removal of 'legacy logging', in a future release of dbt-core - "logbook>=1.5,<1.6", + "agate>=1.7.0,<1.10", + "Jinja2>=3.1.3,<4", + "mashumaro[msgpack]>=3.9,<4.0", # ---- # dbt-core uses these packages in standard ways. Pin to the major version, and check compatibility # with major versions in each new minor version of dbt-core. - "click<9", - "networkx>=2.3,<4", + "click>=8.0.2,<9.0", + "networkx>=2.3,<4.0", + "protobuf>=4.0.0,<5", + "requests<3.0.0", # should match dbt-common + "snowplow-tracker>=1.0.2,<2.0", # ---- # These packages are major-version-0. Keep upper bounds on upcoming minor versions (which could have breaking changes) # and check compatibility / bump in each new minor version of dbt-core. - "colorama>=0.3.9,<0.5", - "pathspec>=0.9,<0.12", - "isodate>=0.6,<0.7", - # ---- - "sqlparse>=0.2.3,<0.5", + "pathspec>=0.9,<0.13", + "sqlparse>=0.5.0,<0.6.0", # ---- - # These are major-version-0 packages also maintained by dbt-labs. Accept patches. - "dbt-extractor~=0.5.0", - "hologram~=0.0.16", # includes transitive dependencies on python-dateutil and jsonschema - "minimal-snowplow-tracker~=0.0.2", - # DSI is under active development, so we're pinning to specific dev versions for now. - "dbt-semantic-interfaces~=0.2.0", + # These are major-version-0 packages also maintained by dbt-labs. + # Accept patches but avoid automatically updating past a set minor version range. + "dbt-extractor>=0.5.0,<=0.6", + "dbt-semantic-interfaces>=0.7.1,<0.8", + # Minor versions for these are expected to be backwards-compatible + "dbt-common>=1.9.0,<2.0", + "dbt-adapters>=1.7.0,<2.0", # ---- # Expect compatibility with all new versions of these packages, so lower bounds only. "packaging>20.9", - "protobuf>=4.0.0", "pytz>=2015.7", "pyyaml>=6.0", - "typing-extensions>=3.7.4", - # ---- - # Match snowflake-connector-python, to ensure compatibility in dbt-snowflake - "cffi>=1.9,<2.0.0", - "idna>=2.5,<4", - "requests<3.0.0", - "urllib3~=1.0", + "daff>=1.3.46", + "typing-extensions>=4.4", # ---- ], zip_safe=False, @@ -102,6 +93,7 @@ "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", ], python_requires=">=3.8", ) diff --git a/dev-requirements.txt b/dev-requirements.txt index 1b290c05b61..20605e632b8 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -1,36 +1,39 @@ -black==23.3.0 +git+https://github.com/dbt-labs/dbt-adapters.git@main +git+https://github.com/dbt-labs/dbt-adapters.git@main#subdirectory=dbt-tests-adapter +git+https://github.com/dbt-labs/dbt-common.git@main +git+https://github.com/dbt-labs/dbt-postgres.git@main +# black must match what's in .pre-commit-config.yaml to be sure local env matches CI +black==24.3.0 bumpversion -ddtrace +ddtrace==2.3.0 docutils -flake8 +# flake8 must match what's in .pre-commit-config.yaml to be sure local env matches CI +flake8==4.0.1 flaky -freezegun==0.3.12 +freezegun>=1.4.0,<1.5 +hypothesis ipdb +# isort must match what's in .pre-commit-config.yaml to be sure local env matches CI +isort==5.13.2 +# mypy must match what's in .pre-commit-config.yaml to be sure local env matches CI mypy==1.4.1 pip-tools pre-commit -protobuf>=4.0.0 -pytest +pytest>=7.4,<8.0 pytest-cov -pytest-csv +pytest-csv>=3.0,<4.0 pytest-dotenv -pytest-logbook pytest-mock pytest-split pytest-xdist sphinx tox>=3.13 -twine -types-colorama types-docutils types-PyYAML -types-freezegun types-Jinja2 types-mock -types-protobuf -types-python-dateutil +types-protobuf>=4.0.0,<5.0.0 types-pytz types-requests types-setuptools -wheel mocker diff --git a/docker-compose.yml b/docker-compose.yml index deb51662bf6..6606b3d53a5 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -30,7 +30,3 @@ services: working_dir: /usr/app depends_on: - database - -networks: - default: - name: dbt-net diff --git a/docker/Dockerfile b/docker/Dockerfile index f1a44a11763..0cfe9ace811 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,133 +1,59 @@ -## -# Generic dockerfile for dbt image building. -# See README for operational details -## - -# Top level build args -ARG build_for=linux/amd64 - -## -# base image (abstract) -## -FROM --platform=$build_for python:3.11.2-slim-bullseye as base - -# N.B. The refs updated automagically every release via bumpversion -# N.B. dbt-postgres is currently found in the core codebase so a value of dbt-core@ is correct - -ARG dbt_core_ref=dbt-core@v1.7.0a1 -ARG dbt_postgres_ref=dbt-core@v1.7.0a1 -ARG dbt_redshift_ref=dbt-redshift@v1.7.0a1 -ARG dbt_bigquery_ref=dbt-bigquery@v1.7.0a1 -ARG dbt_snowflake_ref=dbt-snowflake@v1.7.0a1 -ARG dbt_spark_ref=dbt-spark@v1.7.0a1 -# special case args -ARG dbt_spark_version=all -ARG dbt_third_party +ARG py_version=3.11.2 + +FROM python:$py_version-slim-bullseye as base -# System setup RUN apt-get update \ && apt-get dist-upgrade -y \ && apt-get install -y --no-install-recommends \ - git \ - ssh-client \ - software-properties-common \ - make \ - build-essential \ - ca-certificates \ - libpq-dev \ + build-essential=12.9 \ + ca-certificates=20210119 \ + git=1:2.30.2-1+deb11u2 \ + libpq-dev=13.16-0+deb11u1 \ + make=4.3-4.1 \ + openssh-client=1:8.4p1-5+deb11u3 \ + software-properties-common=0.96.20.2-2.1 \ && apt-get clean \ && rm -rf \ /var/lib/apt/lists/* \ /tmp/* \ /var/tmp/* -# Env vars ENV PYTHONIOENCODING=utf-8 ENV LANG=C.UTF-8 -# Update python -RUN python -m pip install --upgrade pip setuptools wheel --no-cache-dir +RUN python -m pip install --upgrade "pip==24.0" "setuptools==69.2.0" "wheel==0.43.0" --no-cache-dir -# Set docker basics -WORKDIR /usr/app/dbt/ -ENTRYPOINT ["dbt"] -## -# dbt-core -## FROM base as dbt-core -RUN python -m pip install --no-cache-dir "git+https://github.com/dbt-labs/${dbt_core_ref}#egg=dbt-core&subdirectory=core" -## -# dbt-postgres -## -FROM base as dbt-postgres -RUN python -m pip install --no-cache-dir "git+https://github.com/dbt-labs/${dbt_postgres_ref}#egg=dbt-postgres&subdirectory=plugins/postgres" +ARG commit_ref=main +HEALTHCHECK CMD dbt --version || exit 1 -## -# dbt-redshift -## -FROM base as dbt-redshift -RUN python -m pip install --no-cache-dir "git+https://github.com/dbt-labs/${dbt_redshift_ref}#egg=dbt-redshift" +WORKDIR /usr/app/dbt/ +ENTRYPOINT ["dbt"] +RUN python -m pip install --no-cache-dir "dbt-core @ git+https://github.com/dbt-labs/dbt-core@${commit_ref}#subdirectory=core" -## -# dbt-bigquery -## -FROM base as dbt-bigquery -RUN python -m pip install --no-cache-dir "git+https://github.com/dbt-labs/${dbt_bigquery_ref}#egg=dbt-bigquery" +FROM base as dbt-postgres -## -# dbt-snowflake -## -FROM base as dbt-snowflake -RUN python -m pip install --no-cache-dir "git+https://github.com/dbt-labs/${dbt_snowflake_ref}#egg=dbt-snowflake" +ARG commit_ref=main -## -# dbt-spark -## -FROM base as dbt-spark -RUN apt-get update \ - && apt-get dist-upgrade -y \ - && apt-get install -y --no-install-recommends \ - python-dev \ - libsasl2-dev \ - gcc \ - unixodbc-dev \ - && apt-get clean \ - && rm -rf \ - /var/lib/apt/lists/* \ - /tmp/* \ - /var/tmp/* -RUN python -m pip install --no-cache-dir "git+https://github.com/dbt-labs/${dbt_spark_ref}#egg=dbt-spark[${dbt_spark_version}]" +HEALTHCHECK CMD dbt --version || exit 1 + +WORKDIR /usr/app/dbt/ +ENTRYPOINT ["dbt"] + +RUN python -m pip install --no-cache-dir "dbt-postgres @ git+https://github.com/dbt-labs/dbt-core@${commit_ref}#subdirectory=plugins/postgres" -## -# dbt-third-party -## FROM dbt-core as dbt-third-party -RUN python -m pip install --no-cache-dir "${dbt_third_party}" -## -# dbt-all -## -FROM base as dbt-all -RUN apt-get update \ - && apt-get dist-upgrade -y \ - && apt-get install -y --no-install-recommends \ - python-dev \ - libsasl2-dev \ - gcc \ - unixodbc-dev \ - && apt-get clean \ - && rm -rf \ - /var/lib/apt/lists/* \ - /tmp/* \ - /var/tmp/* - RUN python -m pip install --no-cache "git+https://github.com/dbt-labs/${dbt_redshift_ref}#egg=dbt-redshift" - RUN python -m pip install --no-cache "git+https://github.com/dbt-labs/${dbt_bigquery_ref}#egg=dbt-bigquery" - RUN python -m pip install --no-cache "git+https://github.com/dbt-labs/${dbt_snowflake_ref}#egg=dbt-snowflake" - RUN python -m pip install --no-cache "git+https://github.com/dbt-labs/${dbt_spark_ref}#egg=dbt-spark[${dbt_spark_version}]" - RUN python -m pip install --no-cache "git+https://github.com/dbt-labs/${dbt_postgres_ref}#egg=dbt-postgres&subdirectory=plugins/postgres" +ARG dbt_third_party + +RUN if [ "$dbt_third_party" ]; then \ + python -m pip install --no-cache-dir "${dbt_third_party}"; \ + else \ + echo "No third party adapter provided"; \ + fi \ diff --git a/docker/README.md b/docker/README.md index 7a48010b7d3..d05184146ed 100644 --- a/docker/README.md +++ b/docker/README.md @@ -5,70 +5,40 @@ This docker file is suitable for building dbt Docker images locally or using wit ## Building an image: This Dockerfile can create images for the following targets, each named after the database they support: * `dbt-core` _(no db-adapter support)_ -* `dbt-postgres` -* `dbt-redshift` -* `dbt-bigquery` -* `dbt-snowflake` -* `dbt-spark` * `dbt-third-party` _(requires additional build-arg)_ -* `dbt-all` _(installs all of the above in a single image)_ + +For platform-specific images, please refer to that platform's repository (eg. `dbt-labs/dbt-postgres`) In order to build a new image, run the following docker command. ``` -docker build --tag --target +docker build --tag --target ``` --- > **Note:** Docker must be configured to use [BuildKit](https://docs.docker.com/develop/develop-images/build_enhancements/) in order for images to build properly! --- -By default the images will be populated with the most recent release of `dbt-core` and whatever database adapter you select. If you need to use a different version you can specify it by git ref using the `--build-arg` flag: +By default the images will be populated with `dbt-core` on `main`. +If you need to use a different version you can specify it by git ref (tag, branch, sha) using the `--build-arg` flag: ``` docker build --tag \ --target \ - --build-arg = \ + --build-arg commit_ref= \ ``` -valid arg names for versioning are: -* `dbt_core_ref` -* `dbt_postgres_ref` -* `dbt_redshift_ref` -* `dbt_bigquery_ref` -* `dbt_snowflake_ref` -* `dbt_spark_ref` - ---- ->**NOTE:** Only override a _single_ build arg for each build. Using multiple overrides may lead to a non-functioning image. - ---- -If you wish to build an image with a third-party adapter you can use the `dbt-third-party` target. This target requires you provide a path to the adapter that can be processed by `pip` by using the `dbt_third_party` build arg: +If you wish to build an image with a third-party adapter you can use the `dbt-third-party` target. +This target requires you provide a path to the adapter that can be processed by `pip` by using the `dbt_third_party` build arg: ``` docker build --tag \ --target dbt-third-party \ --build-arg dbt_third_party= \ ``` +This can also be combined with the `commit_ref` build arg to specify a version of `dbt-core`. ### Examples: -To build an image named "my-dbt" that supports redshift using the latest releases: -``` -cd dbt-core/docker -docker build --tag my-dbt --target dbt-redshift . -``` - -To build an image named "my-other-dbt" that supports bigquery using `dbt-core` version 0.21.latest and the bigquery adapter version 1.0.0b1: -``` -cd dbt-core/docker -docker build \ - --tag my-other-dbt \ - --target dbt-bigquery \ - --build-arg dbt_bigquery_ref=dbt-bigquery@v1.0.0b1 \ - --build-arg dbt_core_ref=dbt-core@0.21.latest \ - . -``` - -To build an image named "my-third-party-dbt" that uses [Materilize third party adapter](https://github.com/MaterializeInc/materialize/tree/main/misc/dbt-materialize) and the latest release of `dbt-core`: +To build an image named "my-third-party-dbt" that uses the latest release of [Materialize third party adapter](https://github.com/MaterializeInc/materialize/tree/main/misc/dbt-materialize) and the latest dev version of `dbt-core`: ``` cd dbt-core/docker docker build --tag my-third-party-dbt \ @@ -78,36 +48,15 @@ docker build --tag my-third-party-dbt \ ``` -## Special cases -There are a few special cases worth noting: -* The `dbt-spark` database adapter comes in three different versions named `PyHive`, `ODBC`, and the default `all`. If you wish to overide this you can use the `--build-arg` flag with the value of `dbt_spark_version=`. See the [docs](https://docs.getdbt.com/reference/warehouse-profiles/spark-profile) for more information. - -* The `dbt-postgres` database adapter is released as part of the `dbt-core` codebase. If you wish to overide the version used, make sure you use the gitref for `dbt-core`: -``` -docker build --tag my_dbt \ - --target dbt-postgres \ - --build-arg dbt_postgres_ref=dbt-core@1.0.0b1 \ - \ - ``` - -* If you need to build against another architecture (linux/arm64 in this example) you can overide the `build_for` build arg: -``` -docker build --tag my_dbt \ - --target dbt-postgres \ - --build-arg build_for=linux/arm64 \ - \ - ``` -Supported architectures can be found in the python docker [dockerhub page](https://hub.docker.com/_/python). - ## Running an image in a container: The `ENTRYPOINT` for this Dockerfile is the command `dbt` so you can bind-mount your project to `/usr/app` and use dbt as normal: ``` docker run \ ---network=host ---mount type=bind,source=path/to/project,target=/usr/app \ ---mount type=bind,source=path/to/profiles.yml,target=/root/.dbt/profiles.yml \ -my-dbt \ -ls + --network=host \ + --mount type=bind,source=path/to/project,target=/usr/app \ + --mount type=bind,source=path/to/profiles.yml,target=/root/.dbt/profiles.yml \ + my-dbt \ + ls ``` --- **Notes:** diff --git a/docs/guides/behavior-change-flags.md b/docs/guides/behavior-change-flags.md new file mode 100644 index 00000000000..ad8b0e4fb93 --- /dev/null +++ b/docs/guides/behavior-change-flags.md @@ -0,0 +1,32 @@ +# Playbook: Behavior Change Flags + +User documentation: https://docs.getdbt.com/reference/global-configs/legacy-behaviors + +## Rules for introducing a new flag + +1. **Naming.** All behavior change flags should be named so that their default value changes from **False → True**. This makes it significantly easier for us to document them and talk about them consistently, and it's more intuitive for end users. + * (a) If the flag is prohibiting something that we previously allowed, use the verb "require." Examples: + * `require_resource_names_without_spaces` + * `require_explicit_package_overrides_for_builtin_materializations` + * (b) All flags should be of boolean type, and False by default when introduced: `bool = False`. +2. **Documentation.** Start with the docs. What is the change? Who might be affected? What action will users need to take to mitigate this change? At this point, the dates for flag Introduction + Maturity are "TBD." +3. **Deprecation warnings**. As a general rule, **all** behavior changes should be accompanied by a deprecation warning. + * (a) Always use our standard deprecations module: [https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/deprecations.py](https://github.com/dbt-labs/dbt-core/blob/main/core/dbt/deprecations.py) + * (b) This serves two purposes: Signalling the change to the user, and collecting telemetry so we can understand blast radius among users with telemtry enabled. + * (c) These warning messages should link back to documentation: [https://docs.getdbt.com/reference/global-configs/legacy-behaviors](https://docs.getdbt.com/reference/global-configs/legacy-behaviors#deprecate_package_materialization_builtin_override) + * (d) Even for additive behaviors that are not "breaking changes," there is still an opportunity to signal these changes for users, and to gather an estimate of the impact. E.g. `source_freshness_run_project_hooks` should still include a proactive message any time someone runs the `source freshness` command in a project that has `on-run-*` hooks defined. + * (e) The call site for these deprecation warnings should be as close as possible to the place where we’re evaluating conditional logic based on the project flag. Essentially, any time we check the flag value and it returns `False`, we should raise a deprecation warning while preserving the legacy behavior. (In the future, we might be able to streamline more of this boilerplate code.) + * (f) If users want to silence these deprecation warnings, they can do so via [`warn_error_options.silence`](https://docs.getdbt.com/reference/global-configs/warnings). Explicitly setting the flag to `False` in `dbt_project.yml` is not sufficient to silence the warning. +4. **Exceptions.** If the behavior change is to raise an exception that prohibits behavior which was previously permitted (e.g. spaces in model names), the exception message should also link to the docs on legacy behaviors. +5. **Backports.** Whenever possible, we should backport both the deprecation warning and the flag to the previous version of dbt Core. +6. **Open a GitHub issue** in the dbt-core repository that is the implementation ticket for switching the default from `false` to `true`. Add the `behavior_change_flag` issue label, and add it to the GitHub milestone for the next minor version. (This is true in most cases, see below for exceptional considerations.) During planning, we will bundle up the "introduced" behavior changes into an epic/tasklist that schedules their maturation. + +## After introduction + +1. **Maturing flag(s) by switching value from `False` → `True` in dbt-core `main`.** + * (a) This should land in **the next minor (`1.X.0`) release of dbt-core**. + * (b) If the behavior change is mitigating a security vulnerability, and the next minor release is still planned for several months away, we still backport the fix + flag (off by default) to supported OSS versions, and we strongly advise all users to opt into the flag sooner. +2. **Removing support for legacy behaviors.** + * (a) As a general rule, we will not entirely remove support for any legacy behaviors until dbt v2.0. At the same time, we are not committing to supporting them forever (à la Rust editions). We need to strike the right balance between _too fast_ and _never_. + * (b) On a case-by-case basis, if there is a strong compelling reason to remove a legacy behavior and we see minimal in-the-wild usage (<1% of relevant projects), we can remove it entirely. This needs to be communicated well in advance — at least 2 minor versions after introduction in dbt Core. + * (d) These are project configurations, not temporary feature flags. They add complexity to our codebase; that complexity compounds the more we have, and the longer we have them. Such is the price of maintaining mature v1.* software. diff --git a/docs/guides/parsing-vs-compilation-vs-runtime.md b/docs/guides/parsing-vs-compilation-vs-runtime.md new file mode 100644 index 00000000000..5c784f3b3a1 --- /dev/null +++ b/docs/guides/parsing-vs-compilation-vs-runtime.md @@ -0,0 +1,151 @@ +# Parsing vs. Compilation vs. Runtime + +## Context: Why this doc? + +There’s a lot of confusion about what dbt does at parse time vs. compile time / runtime. Even that separation is a relative simplification: parsing includes multiple steps, and while there are some distinctions between "compiling" and "running" a model, the two are **very** closely related. + +It's come up many times before, and we expect it will keep coming up! A decent number of bug reports in `dbt-core` are actually rooted in a misunderstanding of when configs are resolved, especially when folks are using pre/post hooks, or configs that alter materialization behavior (`partitions`, `merge_exclude_columns`, etc). + +So, here goes. + +## What is "parsing"? + +**In a sentence:** dbt reads all the files in your project, and constructs an internal representation of the project ("manifest"). + +To keep it really simple, let’s say this happens in two steps: "Parsing" and "Resolving." + +### Parsing + +As a user, you write models as SQL (or Python!) + YAML. For sake of simplicity, we'll mostly consider SQL models ("Jinja-SQL") with additional notes for Python models ("dbt-py") as-needed. + +dbt wants to understand and define each SQL model as an object in an internal data structure. It also wants to know its dependencies and configuration (= its place in the DAG). dbt reads your code **for that one model,** and attempts to construct that object, raising a **validation** error if it can’t. + +
+(Toggle for many more details.) + +- (Because your SQL and YAML live in separate files, this is actually two steps. But for things like `sources`, `exposures`, `metrics`, `tests`, it’s a single pass.) +- dbt needs to capture and store two vital pieces of information: **dependencies** and **configuration**. + - We need to know the shape of the DAG. This includes which models are disabled. It also includes dependency relationships between models. + - Plus, certain configurations have implications for **node selection**, which supports selecting models using the `tag:` and `config:` methods. +- Parsing also resolves the configuration for that model, based on configs set in `dbt_project.yml`, and macros like `generate_schema_name`. (These are "special" macros, whose results are saved at parse time!) +- The way dbt parses models depends on the language that model is written in. + - dbt-py models are statically analyzed using the Python AST. + - Simple Jinja-SQL models (using just `ref()`, `source()`, &/or `config()` with literal inputs) are also [statically analyzed](https://docs.getdbt.com/reference/parsing#static-parser), using [a thing we built](https://github.com/dbt-labs/dbt-extractor). This is **very** fast (~0.3 ms). + - More complex Jinja-SQL models are parsed by actually rendering the Jinja, and "capturing" any instances of `ref()`, `source()`, &/or `config()`. This is kinda slow, but it’s more capable than our static parser. Those macros can receive `set` variables, or call other macros in turn, and we can still capture the right results because **we’re actually using real Jinja to render it.** + - We capture any other macros called in `depends_on.macros`. This enables us to do clever things later on, such as select models downstream of changed macros (`state:modified.macros`). + - **However:** If `ref()` is nested inside a conditional block that is false at parse time (e.g. `{% if execute %}`), we will miss capturing that macro call then. If the same conditional block resolves to true at runtime, we’re screwed! So [we have a runtime check](https://github.com/dbt-labs/dbt-core/blob/16f529e1d4e067bdbb6a659a622bead442f24b4e/core/dbt/context/providers.py#L495-L500) to validate that any `ref()` we see again at compile/runtime, is one we also previously captured at parse time. If we find a new `ref()` we weren’t expecting, there’s a risk that we’re running the DAG out of order! + +
+ +### Resolving + +After we’ve parsed all the objects in a project, we need to resolve the links between them. This is when we look up all the `ref()`, `source()`, `metric()`, and `doc()` calls that we captured during parsing. + +This is the first step of (almost) every dbt command! When it's done, we have the **Manifest**. + +
+(Toggle for many more details.) + +- If we find another node matching the lookup, we add it to the first node’s `depends_on.nodes`. +- If we don’t find an enabled node matching the lookup, we raise an error. + - (This is sometimes a failure mode for partial parsing, where we missed re-parsing a particular changed file/node, and it appears as though the node is missing when it clearly isn’t.) +- Corollary: During the initial parse (previous step), we’re not actually ready to look up `ref()`, `source()`, etc. But during that first Jinja render, we still want them to return a `Relation` object, to avoid type errors if users are writing custom code that expects to operate on a `Relation`. (Otherwise, we’d see all sorts of errors like "NoneType has no attribute "identifier.") So, during parsing, we just have `ref()` and `source()` return a placeholder `Relation` pointing to the model currently being parsed. This can lead to some odd behavior, such as in [this recent issue](https://github.com/dbt-labs/dbt-core/issues/6382). + +
+ +## What is "execution"? + +**In a sentence:** Now that dbt knows about all the stuff in your project, it can perform operations on top of it. + +Things it can do: + +- tell you about all the models that match certain criteria (`list`) +- compile + run a set of models, in DAG order +- interactively compile / preview some Jinja-SQL, that includes calls to macros or ref’s models defined in your project + +Depending on what’s involved, these operations may or may not require a live database connection. While executing, dbt produces metadata, which it returns as **log events** and **artifacts**. + +Put another way, dbt’s execution has required inputs, expected outputs, and the possibility for side effects: + +- **Inputs** (provided by user): project files, credentials, configuration → Manifest + runtime configuration +- **Outputs** (returned to user): logs & artifacts +- **Side effects** (not seen directly by user): changes in database state, depending on the operation being performed + +### Compiling a model + +We use the word "compiling" in a way that’s confusing for most software engineers (and many other people). Most of what’s described above, parsing + validating + constructing a Manifest (internal representation), falls more squarely in the traditional role of a language compiler. By contrast, when we talk about "compiling SQL," we’re really talking about something that happens at **runtime**. + +Devils in the details; toggle away. + +
+The mechanism of "compilation" varies by model language. + +- **Jinja-SQL** wants to compile down to "vanilla" SQL, appropriate for this database, where any calls to `ref('something')` have been replaced with `database.schema.something`. +- dbt doesn’t directly modify or rewrite user-provided **dbt-py** code at all. Instead, "compilation" looks like code generation: appending more methods that allow calls to `dbt.ref()`, `dbt.source()`, and `dbt.config.get()` to return the correct results at runtime. + +
+ +
+If your model’s code uses a dynamic query to template code, this requires a database connection. + +- At this point, [`execute`](https://docs.getdbt.com/reference/dbt-jinja-functions/execute) is set to `True`. +- e.g. `dbt_utils.get_column_values`, `dbt_utils.star` +- Jinja-SQL supports this sort of dynamic templating. dbt-py does not; there are other imperative ways to do this, using DataFrame methods / the Python interpreter at runtime. + +
+ +
+Compilation is also when ephemeral model CTEs are interpolated into the models that `ref` them. + +- The code for this is *gnarly*. That’s all I’m going to say about it for now. + +
+ +
+When compiling happens for a given node varies by command. + +- For example, if one model’s templated SQL depends on an introspective query that expects another model to have already been materialized, this can lead to errors. +- In `dbt run`, models are operated on in DAG order, where operating on one model means compiling it and then running its materialization. This way, if a downstream model’s compiled SQL will depend on an introspective query against the materialized results of an upstream model, we wait to compile it until the upstream model has completely finishing running. + +
+ +
+ +The outcome of compiling a model is updating its Manifest entry in two important ways: +- `compiled` is set to `True` +- `compiled_code` is populated with (what else) the compiled code for this model + +### Running / materializing a model + +A model’s `compiled_code` is passed into the materialization macro, and the materialization macro is executed. That materialization macro will also call user-provided pre- and post-hooks, and other built-in macros that return the appropriate DDL + DML statements (`create`, `alter`, `merge`, etc.) + +(For legacy reasons, `compiled_code` is also available as a context variable named [`sql`](https://github.com/dbt-labs/dbt-core/blob/16f529e1d4e067bdbb6a659a622bead442f24b4e/core/dbt/context/providers.py#L1314-L1323). You'll see it referenced as `sql` in some materializations. Going forward, `model['compiled_code']` is a better way to access this.) + +## Why does it matter? + +Keeping these pieces of logic separate is one of the most important & opinionated abstractions offered by dbt. + +- **The separation of "control plane" logic** (configurations & shape of the DAG) **from "data plane" logic** (how data should be manipulated & transformed remotely). + - You must declare all dependencies & configurations ahead of time, rather than imperatively redefining them at runtime. You cannot dynamically redefine the DAG on the basis of a query result. + - This is limiting for some advanced use cases, but it prevents you from solving hard problems in exactly the wrong ways. +- **The separation of modeling code** ("logical" transformation written in SQL, or DataFrame manipulations) **from materialization code** ("physical" state changes via DDL/DML)**.** + - Every model is "just" a `select` statement (for Jinja-SQL models), or a Python DataFrame (for dbt-py models). It can be developed, previewed, and tested as such, *without* mutating database state. Those mutations are defined declaratively, with reusable boilerplate ("view" vs. "table" vs. "incremental"), rather than imperatively each time. + + +## Appendix + +
+Click to toggle notes on parsing + +### Notes on parsing + +- **dbt has not yet connected to a database.** Every step performed thus far has required only project files, configuration, and `dbt-core`. You can perform parsing without an Internet connection. +- There is a command called `parse`, which does **just** "parsing" + "resolving," as a way to measure parsing performance in large projects. That command is the fastest way to write `manifest.json` (since v1.5). +- In large projects, the parsing step can also be quite slow: reading lots of files, doing lots of dataclass validation, creating lots of links between lots of nodes. (See below for details on two potential optimizations.) + +### Two potential optimizations + +1. [**"Partial parsing."**](https://docs.getdbt.com/reference/parsing#partial-parsing) dbt saves the mostly-done Manifest from last time, in a file called `target/partial_parse.msgpack`. dbt **just** reads the files that have changed (based on file system metadata), and makes partial updates to that mostly-done Manifest. Of course, if a user has updated configuration that could be relevant globally (e.g. `dbt_project.yml`, `--vars`), we have to opt for a full re-parse — better safe (slow & correct) than sorry (fast & incorrect). +2. [**"Reusing manifests."**](https://docs.getdbt.com/reference/programmatic-invocations#reusing-objects) Note that this is taking "full control," and there are failure modes (example: [dbt-core#7945](https://github.com/dbt-labs/dbt-core/issues/7945)). + +
diff --git a/docs/roadmap/2023-11-dbt-tng.md b/docs/roadmap/2023-11-dbt-tng.md new file mode 100644 index 00000000000..7c6f235a5a6 --- /dev/null +++ b/docs/roadmap/2023-11-dbt-tng.md @@ -0,0 +1,107 @@ +# dbt: The Next Generation (November 2023) + +To everyone we saw at [Coalesce](https://coalesce.getdbt.com/) last month: thank you for joining us! We got up on stage and shared the next chapters from this year’s featured stories: about [collaborating across multiple teams and projects at scale](https://www.youtube.com/watch?v=NIseH-Gd-U4); about [relaunching the dbt Semantic Layer](https://www.youtube.com/watch?v=2Qo5_CIsSH4); about [more flexibility in development](https://www.youtube.com/watch?v=UfraDWKsSvU); and about [more mature CI/CD](https://www.youtube.com/watch?v=3sp6tmYykVc). To anyone who missed us live, [catch us on the replays](https://www.youtube.com/@dbt-labs)! + +These are stories that span both dbt Core and dbt Cloud. Our aim is to push forward the open source standard for analytics engineering, and also the platform that makes it possible for more teams to adopt & deploy dbt at scale. + +In [his keynote presentation](https://youtu.be/lNZLcsHAdco?si=FdtTOOIokvm1pT8D&t=637), Tristan talked about these two priorities for dbt Labs. We remain committed to dbt Core, as a standard for the industry, and an open source project under an Apache 2 license. We are also committed to creating a business around dbt Cloud that is sustainable over the long term, to enable us to continue to invest in driving dbt forward. + +Those two goals are inseparable. To make them both happen, we need to strike an important balance. What has it looked like over the last six months, and what will it look like for the six months ahead? + +_[JC](https://github.com/jtcohen6) & [GG](https://github.com/graciegoheen)*_ + +> *Also, hi! I’m Grace Goheen, or [@graciegoheen](https://github.com/graciegoheen). Long time dbt user, new to the dbt Core product team. I joined the Professional Services team at dbt Labs back in 2021, where I’ve since had the opportunity to work hands-on in dozens of dbt projects - leading legacy migrations, consulting on architecture, optimizing project performance, and more. I lived through the joy (lineage! testing! documentation!) and pain (spaghetti DAGs! model bottlenecks! debugging code!) of being an analytics engineer, and realized I wanted to be a part of shaping the tool at the center of it all. So here I am, the newest Product Manager of dbt Core! I am so grateful to be building this industry-defining tool with all of you. +> + +# The last six months: scale + +| Version | When | Namesake | Stuff | +| --- | --- | --- | --- | +| [v1.5](https://docs.getdbt.com/guides/migration/versions/upgrading-to-v1.5) | April | [Dawn Staley](https://github.com/dbt-labs/dbt-core/releases/tag/v1.5.0#:~:text=Dawn%20Staley%20(b.%201970)) | Revamped CLI. Programmatic invocations. Model governance features (contracts, access, groups, versions). | +| [v1.6](https://docs.getdbt.com/guides/migration/versions/upgrading-to-v1.6) | July | [Quiara Alegría Hudes](https://github.com/dbt-labs/dbt-core/releases/tag/v1.6.0#:~:text=Quiara%20Alegr%C3%ADa%20Hudes%20(b.%201977)) | New Semantic Layer spec. More on model governance (deprecations). Saving time and $$ with retry + clone. Initial rollout of materialized views. | +| [v1.7](https://docs.getdbt.com/guides/migration/versions/upgrading-to-v1.7) | November | Questlove | More flexible access to "applied state" in docs generate and source freshness. Improvements to model governance & semantic layer features (driven by user feedback). | + +We added a **lot** of stuff this year! Over the past three dbt Core minor versions, we’ve managed to knock out a litany of the most popular issues and discussions gathered over the past several years: + +- [CLI preview](https://github.com/dbt-labs/dbt-core/discussions/5418) (1.5) +- [Invoking dbt as a Python module](https://github.com/dbt-labs/dbt-core/issues/2013) (1.5) +- [Materialized views](https://github.com/dbt-labs/dbt-core/issues/1162) (1.6) +- [Namespacing for dbt resources](https://github.com/dbt-labs/dbt-core/issues/1269) (1.6), in support of [multi-project collaboration](https://github.com/dbt-labs/dbt-core/discussions/6725) +- [`docs generate --select` for slimmer catalog queries](https://github.com/dbt-labs/dbt-core/issues/6014) (1.7) +- And… we’re finally taking aim at [unit testing for dbt-SQL models](https://github.com/dbt-labs/dbt-core/discussions/8275) (!), coming in 1.8, which you should read more about in the section below. + +Thank you for providing your upvotes, comments, and feedback. One of the best things about building dbt Core in the open is that we are all pushing forward the analytics engineering standard together. We’re able to prioritize these features and paper cuts because of your participation. + +We’ve got lots more to build - there are some highly upvoted issues and discussions that remain, and gaps in the analytics engineering workflow that we want to close. But before we keep building, we must ensure our foundation is a **stable** one. + +# The next six months: stability (& unit testing!) + +| Version | When | Stuff | Confidence | +| --- | --- | --- | --- | +| 1.8 | Spring 2024 | Stable interfaces for adapters & artifacts. Built-in support for unit testing dbt models. | 80% | + +Since the v1.0 release of dbt Core (almost two years ago), we’ve released a minor version of dbt Core every three months. The January release (post-Coalesce, post-holidays) tends to be an understated affair: tech debt, bug fixes, support for Python 3-dot-new. + +We’ve been measuring the rate of adoption for new versions, and we’ve seen that it takes more than 3 months for the wider user base to really adopt them. The plurality of dbt projects in the world are using a dbt Core version released between 6 and 12 months ago. We think this speaks to two things: It’s harder to upgrade than it should be.; and we can afford to take more time baking new releases. + +Between now and next April (2024), we plan to instead prepare **one** minor release that prioritizes **all-around interface stability**. We want to make it easier for _everyone_ to upgrade with confidence, regardless of their adapter or other integrated tooling. There is a _lot_ of value locked up in the features we’ve already released in 2023, and we want to lower the barrier for *tens of thousands* of existing projects who are still on older versions. That work is important, it takes time, and it has long-lasting implications. + +### Adapters & artifacts + +With the v1.0 release, [we committed](https://www.getdbt.com/blog/getting-ready-for-v1-0) to minimizing breaking changes to project code, so that end users would be able to upgrade more easily. We haven’t perfected this, including earlier this year when we did a full relaunch of the metrics spec for the Semantic Layer. We are committed to getting better here. + +Even in v1.0, though, we intentionally carved out two less-stable interfaces, which would continue to evolve in minor releases: **adapter plugins** and **metadata artifacts**. At the time, these interfaces were newer and rapidly changing. Almost every minor version upgrade, from v1.0 through v1.7, has required some fast-follow compatibility changes for adapters and for tools that parse dbt manifests. + +This has been particularly difficult for adapter maintainers. As of this writing, while [the majority of third-party adapters support v1.4](https://github.com/dbt-labs/dbt-core/discussions/6624#discussioncomment-5663823) (released in January), [just over a third support v1.5](https://github.com/dbt-labs/dbt-core/discussions/7213#discussioncomment-5663790) (April), and [only a handful support v1.6](https://github.com/dbt-labs/dbt-core/discussions/7958#discussioncomment-6310276) (July). It isn’t fair of us to keep releasing in a way that *requires* this reactive compatibility work every 3 months. Instead, we will be defining a stable interface for adapters, in a separate codebase and versioned separately from dbt Core. Starting in v1.8, it will be forward-compatible for future versions. If you want to use `dbt-core` v1.X with `dbt-duckdb` v1.Y, you will be able to. + +For most people, we don’t want you to have to think about versions _at all_: just use latest & greatest dbt Core. For customers and users of dbt Cloud, this is the experience we want to provide: delivering dbt Core and dbt Cloud together, as one integrated and continuously delivered SaaS application — an experience where you don’t need to think about versions or upgrading, and where you get access to Cloud-enhanced & Cloud-only features as a matter of course. + +**An aside:** This was the first year in which we delivered [some functionality like that](https://github.com/dbt-labs/dbt-core/discussions/6725): built it in such a way that it _feels like Core_ while being actually powered by (and exclusive to) dbt Cloud. This has long been our pattern: Core defines the spec, and Cloud the scalable implementation, especially for Enterprise-geared functionality. + +I (Jeremy) wish I had communicated this delineation more clearly, and from the start. We are going to continue telling unified stories, across mature capabilities in Core and newer ones in Cloud, and we want all of you — open source community members, Cloud customers, longtime data practitioners and more-recent arrivals — to know that you are along for this journey with us. + +### Summary: continuous & stable delivery + +Over the next 6-12 months, we will be spending less time on totally new constructs in dbt Core, and more time on the fundamentals that are already there: stabilizing, maintaining, iterating, improving. + +dbt Cloud customers will see enhancements and under-the-hood improvements delivered continuously, as we move towards this model of increased stability. Features that fit inside dbt Core’s traditional scope will also land in a subsequent minor version of dbt Core. + +This is an important part of our evolving story: a compelling commercial offering that makes it possible for us to keep developing, maintaining, and distributing dbt Core as Apache 2 software. + +## Onwards + +dbt Core is as it has always been: an open source standard. It’s a framework, a coherent set of ideas, and a fully functional standalone tool that anyone can take for a spin — adopt, extend, integrate, imitate — without ever needing to ask us for permission. Adapters will keep moving at the pace of innovation for their respective data warehouse. dbt Docs remains a great "single-player" experience for getting hooked on data documentation. (The aesthetic isn’t dated, it’s *[retro](https://github.com/lightdash/dbt-docs-95).*) dbt Core remains the industry-defining way to author analytical models and ensure their quality in production. + +But wait! + +As many of you have voiced, there’s been no good way to ensure your SQL logic is correct without running expensive queries against your full production data. dbt does not have native unit testing functionality… yet. This gap in the standard is one we have been eager to work on, and we’re planning to land it in the next minor release of dbt Core. + +### What is unit testing in dbt? + +For many years, dbt has supported "data" tests — testing your *data outputs* (dbt models, snapshots, seeds, etc.) based on that environment’s actual *inputs* (dbt sources in your warehouse), and ensure the resulting datasets match your defined expectations. + +Soon, we’re introducing "unit" tests — testing your modeling *logic,* using a small set of static inputs, to validate that your code is working as expected, faster and cheaper. + +### What’s the plan? + +Thank you to everyone who has already provided feedback and thoughts on our [unit testing discussion](https://github.com/dbt-labs/dbt-core/discussions/8275) — or, we should say our _new_ unit testing discussion, since Michelle opened the [original one](https://github.com/dbt-labs/dbt-core/discussions/4455) back in 2020, before she joined dbt Labs :) + +We truly appreciate the amount of insights and energy y’all have already poured into helping us make sure we build the right thing. + +We are actively working on this feature and expect it to be ready for you all in our `1.8` release next year! If you have thoughts or opinions, please keep commenting in the discussion. We’re also planning a community feedback session for unit testing once we’ve released an initial beta of `1.8`, so keep an eye out. + +### Bugs, regressions, paper cuts, ... + +We will continue to respond to your issues and review your PRs. We will continue to resolve regressions and high-priority bugs, fast as we’re able, and include those fixes in regular patch releases. + +Along with fixing bugs and regressions, we’d also like to keep tackling some of the highly requested "paper cuts”. Thank you to all those who have expressed their interest by upvoting and commenting. + +We’re unlikely to tackle all of these things in v1.8 — they’re lower-priority than the interface stability work, which we must do — they are all legitimate opportunities to solidify the existing, well-established Core functionality: + +- [Allow data tests to be documented](https://github.com/dbt-labs/dbt-core/issues/2578) +- [Snapshot paper cuts](https://github.com/dbt-labs/dbt-core/discussions/7018) +- [Making external tables native to dbt-core](https://github.com/dbt-labs/dbt-core/discussions/8617) +- [Defining vars, folder-level configs outside `dbt_project.yml`](https://github.com/dbt-labs/dbt-core/issues/2955) +- [Supporting additional formats for seeds](https://github.com/dbt-labs/dbt-core/issues/2365) (JSON) + +Let us know which ones speak to you — in that list, not in that list, the ideas in your head — on GitHub, on Slack, or wherever you may find us. diff --git a/editable-requirements.txt b/editable-requirements.txt index 28fa6bc216d..6f81820bb96 100644 --- a/editable-requirements.txt +++ b/editable-requirements.txt @@ -1,3 +1 @@ -e ./core --e ./plugins/postgres --e ./tests/adapter diff --git a/plugins/bigquery/README.md b/plugins/bigquery/README.md deleted file mode 100644 index 2a5d6064a1f..00000000000 --- a/plugins/bigquery/README.md +++ /dev/null @@ -1,3 +0,0 @@ -### dbt-bigquery - -This plugin has moved! https://github.com/dbt-labs/dbt-bigquery diff --git a/plugins/postgres/README.md b/plugins/postgres/README.md deleted file mode 100644 index 62858c4d727..00000000000 --- a/plugins/postgres/README.md +++ /dev/null @@ -1,36 +0,0 @@ -

- dbt logo -

-

- - CI Badge - -

- -**[dbt](https://www.getdbt.com/)** enables data analysts and engineers to transform their data using the same practices that software engineers use to build applications. - -dbt is the T in ELT. Organize, cleanse, denormalize, filter, rename, and pre-aggregate the raw data in your warehouse so that it's ready for analysis. - -## dbt-postgres - -The `dbt-postgres` package contains all of the code enabling dbt to work with a Postgres database. For -more information on using dbt with Postgres, consult [the docs](https://docs.getdbt.com/docs/profile-postgres). - -## Getting started - -- [Install dbt](https://docs.getdbt.com/docs/installation) -- Read the [introduction](https://docs.getdbt.com/docs/introduction/) and [viewpoint](https://docs.getdbt.com/docs/about/viewpoint/) - -## Join the dbt Community - -- Be part of the conversation in the [dbt Community Slack](http://community.getdbt.com/) -- Read more on the [dbt Community Discourse](https://discourse.getdbt.com) - -## Reporting bugs and contributing code - -- Want to report a bug or request a feature? Let us know on [Slack](http://community.getdbt.com/), or open [an issue](https://github.com/dbt-labs/dbt-core/issues/new) -- Want to help us build dbt? Check out the [Contributing Guide](https://github.com/dbt-labs/dbt-core/blob/HEAD/CONTRIBUTING.md) - -## Code of Conduct - -Everyone interacting in the dbt project's codebases, issue trackers, chat rooms, and mailing lists is expected to follow the [dbt Code of Conduct](https://community.getdbt.com/code-of-conduct). diff --git a/plugins/postgres/dbt/__init__.py b/plugins/postgres/dbt/__init__.py deleted file mode 100644 index 3a7ded78b77..00000000000 --- a/plugins/postgres/dbt/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# N.B. -# This will add to the package’s __path__ all subdirectories of directories on sys.path named after the package which effectively combines both modules into a single namespace (dbt.adapters) -# The matching statement is in core/dbt/__init__.py - -from pkgutil import extend_path - -__path__ = extend_path(__path__, __name__) diff --git a/plugins/postgres/dbt/adapters/__init__.py b/plugins/postgres/dbt/adapters/__init__.py deleted file mode 100644 index 65bb44b672e..00000000000 --- a/plugins/postgres/dbt/adapters/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# N.B. -# This will add to the package’s __path__ all subdirectories of directories on sys.path named after the package which effectively combines both modules into a single namespace (dbt.adapters) -# The matching statement is in core/dbt/adapters/__init__.py - -from pkgutil import extend_path - -__path__ = extend_path(__path__, __name__) diff --git a/plugins/postgres/dbt/adapters/postgres/__init__.py b/plugins/postgres/dbt/adapters/postgres/__init__.py deleted file mode 100644 index 38dce8bdb22..00000000000 --- a/plugins/postgres/dbt/adapters/postgres/__init__.py +++ /dev/null @@ -1,13 +0,0 @@ -# these are mostly just exports, #noqa them so flake8 will be happy -from dbt.adapters.postgres.connections import PostgresConnectionManager # noqa -from dbt.adapters.postgres.connections import PostgresCredentials -from dbt.adapters.postgres.column import PostgresColumn # noqa -from dbt.adapters.postgres.relation import PostgresRelation # noqa: F401 -from dbt.adapters.postgres.impl import PostgresAdapter - -from dbt.adapters.base import AdapterPlugin -from dbt.include import postgres - -Plugin = AdapterPlugin( - adapter=PostgresAdapter, credentials=PostgresCredentials, include_path=postgres.PACKAGE_PATH -) diff --git a/plugins/postgres/dbt/adapters/postgres/__version__.py b/plugins/postgres/dbt/adapters/postgres/__version__.py deleted file mode 100644 index 874bd74c8ac..00000000000 --- a/plugins/postgres/dbt/adapters/postgres/__version__.py +++ /dev/null @@ -1 +0,0 @@ -version = "1.7.0a1" diff --git a/plugins/postgres/dbt/adapters/postgres/column.py b/plugins/postgres/dbt/adapters/postgres/column.py deleted file mode 100644 index 686ec0cb8a4..00000000000 --- a/plugins/postgres/dbt/adapters/postgres/column.py +++ /dev/null @@ -1,12 +0,0 @@ -from dbt.adapters.base import Column - - -class PostgresColumn(Column): - @property - def data_type(self): - # on postgres, do not convert 'text' or 'varchar' to 'varchar()' - if self.dtype.lower() == "text" or ( - self.dtype.lower() == "character varying" and self.char_size is None - ): - return self.dtype - return super().data_type diff --git a/plugins/postgres/dbt/adapters/postgres/connections.py b/plugins/postgres/dbt/adapters/postgres/connections.py deleted file mode 100644 index 2a1b4e13420..00000000000 --- a/plugins/postgres/dbt/adapters/postgres/connections.py +++ /dev/null @@ -1,204 +0,0 @@ -from contextlib import contextmanager - -import psycopg2 -from psycopg2.extensions import string_types - -import dbt.exceptions -from dbt.adapters.base import Credentials -from dbt.adapters.sql import SQLConnectionManager -from dbt.contracts.connection import AdapterResponse -from dbt.events import AdapterLogger - -from dbt.helper_types import Port -from dataclasses import dataclass -from typing import Optional - - -logger = AdapterLogger("Postgres") - - -@dataclass -class PostgresCredentials(Credentials): - host: str - user: str - port: Port - password: str # on postgres the password is mandatory - connect_timeout: int = 10 - role: Optional[str] = None - search_path: Optional[str] = None - keepalives_idle: int = 0 # 0 means to use the default value - sslmode: Optional[str] = None - sslcert: Optional[str] = None - sslkey: Optional[str] = None - sslrootcert: Optional[str] = None - application_name: Optional[str] = "dbt" - retries: int = 1 - - _ALIASES = {"dbname": "database", "pass": "password"} - - @property - def type(self): - return "postgres" - - @property - def unique_field(self): - return self.host - - def _connection_keys(self): - return ( - "host", - "port", - "user", - "database", - "schema", - "connect_timeout", - "role", - "search_path", - "keepalives_idle", - "sslmode", - "sslcert", - "sslkey", - "sslrootcert", - "application_name", - "retries", - ) - - -class PostgresConnectionManager(SQLConnectionManager): - TYPE = "postgres" - - @contextmanager - def exception_handler(self, sql): - try: - yield - - except psycopg2.DatabaseError as e: - logger.debug("Postgres error: {}".format(str(e))) - - try: - self.rollback_if_open() - except psycopg2.Error: - logger.debug("Failed to release connection!") - pass - - raise dbt.exceptions.DbtDatabaseError(str(e).strip()) from e - - except Exception as e: - logger.debug("Error running SQL: {}", sql) - logger.debug("Rolling back transaction.") - self.rollback_if_open() - if isinstance(e, dbt.exceptions.DbtRuntimeError): - # during a sql query, an internal to dbt exception was raised. - # this sounds a lot like a signal handler and probably has - # useful information, so raise it without modification. - raise - - raise dbt.exceptions.DbtRuntimeError(e) from e - - @classmethod - def open(cls, connection): - if connection.state == "open": - logger.debug("Connection is already open, skipping open.") - return connection - - credentials = cls.get_credentials(connection.credentials) - kwargs = {} - # we don't want to pass 0 along to connect() as postgres will try to - # call an invalid setsockopt() call (contrary to the docs). - if credentials.keepalives_idle: - kwargs["keepalives_idle"] = credentials.keepalives_idle - - # psycopg2 doesn't support search_path officially, - # see https://github.com/psycopg/psycopg2/issues/465 - search_path = credentials.search_path - if search_path is not None and search_path != "": - # see https://postgresql.org/docs/9.5/libpq-connect.html - kwargs["options"] = "-c search_path={}".format(search_path.replace(" ", "\\ ")) - - if credentials.sslmode: - kwargs["sslmode"] = credentials.sslmode - - if credentials.sslcert is not None: - kwargs["sslcert"] = credentials.sslcert - - if credentials.sslkey is not None: - kwargs["sslkey"] = credentials.sslkey - - if credentials.sslrootcert is not None: - kwargs["sslrootcert"] = credentials.sslrootcert - - if credentials.application_name: - kwargs["application_name"] = credentials.application_name - - def connect(): - handle = psycopg2.connect( - dbname=credentials.database, - user=credentials.user, - host=credentials.host, - password=credentials.password, - port=credentials.port, - connect_timeout=credentials.connect_timeout, - **kwargs, - ) - if credentials.role: - handle.cursor().execute("set role {}".format(credentials.role)) - return handle - - retryable_exceptions = [ - # OperationalError is subclassed by all psycopg2 Connection Exceptions and it's raised - # by generic connection timeouts without an error code. This is a limitation of - # psycopg2 which doesn't provide subclasses for errors without a SQLSTATE error code. - # The limitation has been known for a while and there are no efforts to tackle it. - # See: https://github.com/psycopg/psycopg2/issues/682 - psycopg2.errors.OperationalError, - ] - - def exponential_backoff(attempt: int): - return attempt * attempt - - return cls.retry_connection( - connection, - connect=connect, - logger=logger, - retry_limit=credentials.retries, - retry_timeout=exponential_backoff, - retryable_exceptions=retryable_exceptions, - ) - - def cancel(self, connection): - connection_name = connection.name - try: - pid = connection.handle.get_backend_pid() - except psycopg2.InterfaceError as exc: - # if the connection is already closed, not much to cancel! - if "already closed" in str(exc): - logger.debug(f"Connection {connection_name} was already closed") - return - # probably bad, re-raise it - raise - - sql = "select pg_terminate_backend({})".format(pid) - - logger.debug("Cancelling query '{}' ({})".format(connection_name, pid)) - - _, cursor = self.add_query(sql) - res = cursor.fetchone() - - logger.debug("Cancel query '{}': {}".format(connection_name, res)) - - @classmethod - def get_credentials(cls, credentials): - return credentials - - @classmethod - def get_response(cls, cursor) -> AdapterResponse: - message = str(cursor.statusmessage) - rows = cursor.rowcount - status_message_parts = message.split() if message is not None else [] - status_messsage_strings = [part for part in status_message_parts if not part.isdigit()] - code = " ".join(status_messsage_strings) - return AdapterResponse(_message=message, code=code, rows_affected=rows) - - @classmethod - def data_type_code_to_name(cls, type_code: int) -> str: - return string_types[type_code].name diff --git a/plugins/postgres/dbt/adapters/postgres/impl.py b/plugins/postgres/dbt/adapters/postgres/impl.py deleted file mode 100644 index adffc4d3a62..00000000000 --- a/plugins/postgres/dbt/adapters/postgres/impl.py +++ /dev/null @@ -1,145 +0,0 @@ -from datetime import datetime -from dataclasses import dataclass -from typing import Optional, Set, List, Any - -from dbt.adapters.base.meta import available -from dbt.adapters.base.impl import AdapterConfig, ConstraintSupport -from dbt.adapters.sql import SQLAdapter -from dbt.adapters.postgres import PostgresConnectionManager -from dbt.adapters.postgres.column import PostgresColumn -from dbt.adapters.postgres import PostgresRelation -from dbt.dataclass_schema import dbtClassMixin, ValidationError -from dbt.contracts.graph.nodes import ConstraintType -from dbt.exceptions import ( - CrossDbReferenceProhibitedError, - IndexConfigNotDictError, - IndexConfigError, - DbtRuntimeError, - UnexpectedDbReferenceError, -) -import dbt.utils - - -GET_RELATIONS_MACRO_NAME = "postgres__get_relations" - - -@dataclass -class PostgresIndexConfig(dbtClassMixin): - columns: List[str] - unique: bool = False - type: Optional[str] = None - - def render(self, relation): - # We append the current timestamp to the index name because otherwise - # the index will only be created on every other run. See - # https://github.com/dbt-labs/dbt-core/issues/1945#issuecomment-576714925 - # for an explanation. - now = datetime.utcnow().isoformat() - inputs = self.columns + [relation.render(), str(self.unique), str(self.type), now] - string = "_".join(inputs) - return dbt.utils.md5(string) - - @classmethod - def parse(cls, raw_index) -> Optional["PostgresIndexConfig"]: - if raw_index is None: - return None - try: - cls.validate(raw_index) - return cls.from_dict(raw_index) - except ValidationError as exc: - raise IndexConfigError(exc) - except TypeError: - raise IndexConfigNotDictError(raw_index) - - -@dataclass -class PostgresConfig(AdapterConfig): - unlogged: Optional[bool] = None - indexes: Optional[List[PostgresIndexConfig]] = None - - -class PostgresAdapter(SQLAdapter): - Relation = PostgresRelation - ConnectionManager = PostgresConnectionManager - Column = PostgresColumn - - AdapterSpecificConfigs = PostgresConfig - - CONSTRAINT_SUPPORT = { - ConstraintType.check: ConstraintSupport.ENFORCED, - ConstraintType.not_null: ConstraintSupport.ENFORCED, - ConstraintType.unique: ConstraintSupport.ENFORCED, - ConstraintType.primary_key: ConstraintSupport.ENFORCED, - ConstraintType.foreign_key: ConstraintSupport.ENFORCED, - } - - @classmethod - def date_function(cls): - return "now()" - - @available - def verify_database(self, database): - if database.startswith('"'): - database = database.strip('"') - expected = self.config.credentials.database - if database.lower() != expected.lower(): - raise UnexpectedDbReferenceError(self.type(), database, expected) - # return an empty string on success so macros can call this - return "" - - @available - def parse_index(self, raw_index: Any) -> Optional[PostgresIndexConfig]: - return PostgresIndexConfig.parse(raw_index) - - def _link_cached_database_relations(self, schemas: Set[str]): - """ - :param schemas: The set of schemas that should have links added. - """ - database = self.config.credentials.database - table = self.execute_macro(GET_RELATIONS_MACRO_NAME) - - for (dep_schema, dep_name, refed_schema, refed_name) in table: - dependent = self.Relation.create( - database=database, schema=dep_schema, identifier=dep_name - ) - referenced = self.Relation.create( - database=database, schema=refed_schema, identifier=refed_name - ) - - # don't record in cache if this relation isn't in a relevant - # schema - if refed_schema.lower() in schemas: - self.cache.add_link(referenced, dependent) - - def _get_catalog_schemas(self, manifest): - # postgres only allow one database (the main one) - schemas = super()._get_catalog_schemas(manifest) - try: - return schemas.flatten() - except DbtRuntimeError as exc: - raise CrossDbReferenceProhibitedError(self.type(), exc.msg) - - def _link_cached_relations(self, manifest): - schemas: Set[str] = set() - relations_schemas = self._get_cache_schemas(manifest) - for relation in relations_schemas: - self.verify_database(relation.database) - schemas.add(relation.schema.lower()) - - self._link_cached_database_relations(schemas) - - def _relations_cache_for_schemas(self, manifest, cache_schemas=None): - super()._relations_cache_for_schemas(manifest, cache_schemas) - self._link_cached_relations(manifest) - - def timestamp_add_sql(self, add_to: str, number: int = 1, interval: str = "hour") -> str: - return f"{add_to} + interval '{number} {interval}'" - - def valid_incremental_strategies(self): - """The set of standard builtin strategies which this adapter supports out-of-the-box. - Not used to validate custom strategies defined by end users. - """ - return ["append", "delete+insert", "merge"] - - def debug_query(self): - self.execute("select 1 as id") diff --git a/plugins/postgres/dbt/adapters/postgres/relation.py b/plugins/postgres/dbt/adapters/postgres/relation.py deleted file mode 100644 index 43822efb11f..00000000000 --- a/plugins/postgres/dbt/adapters/postgres/relation.py +++ /dev/null @@ -1,91 +0,0 @@ -from dataclasses import dataclass -from typing import Optional, Set, FrozenSet - -from dbt.adapters.base.relation import BaseRelation -from dbt.adapters.relation_configs import ( - RelationConfigChangeAction, - RelationResults, -) -from dbt.context.providers import RuntimeConfigObject -from dbt.exceptions import DbtRuntimeError - -from dbt.adapters.postgres.relation_configs import ( - PostgresIndexConfig, - PostgresIndexConfigChange, - PostgresMaterializedViewConfig, - PostgresMaterializedViewConfigChangeCollection, - MAX_CHARACTERS_IN_IDENTIFIER, -) - - -@dataclass(frozen=True, eq=False, repr=False) -class PostgresRelation(BaseRelation): - def __post_init__(self): - # Check for length of Postgres table/view names. - # Check self.type to exclude test relation identifiers - if ( - self.identifier is not None - and self.type is not None - and len(self.identifier) > self.relation_max_name_length() - ): - raise DbtRuntimeError( - f"Relation name '{self.identifier}' " - f"is longer than {self.relation_max_name_length()} characters" - ) - - def relation_max_name_length(self): - return MAX_CHARACTERS_IN_IDENTIFIER - - def get_materialized_view_config_change_collection( - self, relation_results: RelationResults, runtime_config: RuntimeConfigObject - ) -> Optional[PostgresMaterializedViewConfigChangeCollection]: - config_change_collection = PostgresMaterializedViewConfigChangeCollection() - - existing_materialized_view = PostgresMaterializedViewConfig.from_relation_results( - relation_results - ) - new_materialized_view = PostgresMaterializedViewConfig.from_model_node( - runtime_config.model - ) - - config_change_collection.indexes = self._get_index_config_changes( - existing_materialized_view.indexes, new_materialized_view.indexes - ) - - # we return `None` instead of an empty `PostgresMaterializedViewConfigChangeCollection` object - # so that it's easier and more extensible to check in the materialization: - # `core/../materializations/materialized_view.sql` : - # {% if configuration_changes is none %} - if config_change_collection.has_changes: - return config_change_collection - - def _get_index_config_changes( - self, - existing_indexes: FrozenSet[PostgresIndexConfig], - new_indexes: FrozenSet[PostgresIndexConfig], - ) -> Set[PostgresIndexConfigChange]: - """ - Get the index updates that will occur as a result of a new run - - There are four scenarios: - - 1. Indexes are equal -> don't return these - 2. Index is new -> create these - 3. Index is old -> drop these - 4. Indexes are not equal -> drop old, create new -> two actions - - Returns: a set of index updates in the form {"action": "drop/create", "context": } - """ - drop_changes = set( - PostgresIndexConfigChange.from_dict( - {"action": RelationConfigChangeAction.drop, "context": index} - ) - for index in existing_indexes.difference(new_indexes) - ) - create_changes = set( - PostgresIndexConfigChange.from_dict( - {"action": RelationConfigChangeAction.create, "context": index} - ) - for index in new_indexes.difference(existing_indexes) - ) - return set().union(drop_changes, create_changes) diff --git a/plugins/postgres/dbt/adapters/postgres/relation_configs/__init__.py b/plugins/postgres/dbt/adapters/postgres/relation_configs/__init__.py deleted file mode 100644 index 9fdb942bfa5..00000000000 --- a/plugins/postgres/dbt/adapters/postgres/relation_configs/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -from dbt.adapters.postgres.relation_configs.constants import ( # noqa: F401 - MAX_CHARACTERS_IN_IDENTIFIER, -) -from dbt.adapters.postgres.relation_configs.index import ( # noqa: F401 - PostgresIndexConfig, - PostgresIndexConfigChange, -) -from dbt.adapters.postgres.relation_configs.materialized_view import ( # noqa: F401 - PostgresMaterializedViewConfig, - PostgresMaterializedViewConfigChangeCollection, -) diff --git a/plugins/postgres/dbt/adapters/postgres/relation_configs/constants.py b/plugins/postgres/dbt/adapters/postgres/relation_configs/constants.py deleted file mode 100644 index 9228df23043..00000000000 --- a/plugins/postgres/dbt/adapters/postgres/relation_configs/constants.py +++ /dev/null @@ -1 +0,0 @@ -MAX_CHARACTERS_IN_IDENTIFIER = 63 diff --git a/plugins/postgres/dbt/adapters/postgres/relation_configs/index.py b/plugins/postgres/dbt/adapters/postgres/relation_configs/index.py deleted file mode 100644 index 3a072ea4307..00000000000 --- a/plugins/postgres/dbt/adapters/postgres/relation_configs/index.py +++ /dev/null @@ -1,165 +0,0 @@ -from dataclasses import dataclass, field -from typing import Set, FrozenSet - -import agate -from dbt.dataclass_schema import StrEnum -from dbt.exceptions import DbtRuntimeError -from dbt.adapters.relation_configs import ( - RelationConfigBase, - RelationConfigValidationMixin, - RelationConfigValidationRule, - RelationConfigChangeAction, - RelationConfigChange, -) - - -class PostgresIndexMethod(StrEnum): - btree = "btree" - hash = "hash" - gist = "gist" - spgist = "spgist" - gin = "gin" - brin = "brin" - - @classmethod - def default(cls) -> "PostgresIndexMethod": - return cls.btree - - -@dataclass(frozen=True, eq=True, unsafe_hash=True) -class PostgresIndexConfig(RelationConfigBase, RelationConfigValidationMixin): - """ - This config fallows the specs found here: - https://www.postgresql.org/docs/current/sql-createindex.html - - The following parameters are configurable by dbt: - - name: the name of the index in the database, this isn't predictable since we apply a timestamp - - unique: checks for duplicate values when the index is created and on data updates - - method: the index method to be used - - column_names: the columns in the index - - Applicable defaults for non-configurable parameters: - - concurrently: `False` - - nulls_distinct: `True` - """ - - name: str = field(default=None, hash=False, compare=False) - column_names: FrozenSet[str] = field(default_factory=frozenset, hash=True) - unique: bool = field(default=False, hash=True) - method: PostgresIndexMethod = field(default=PostgresIndexMethod.default(), hash=True) - - @property - def validation_rules(self) -> Set[RelationConfigValidationRule]: - return { - RelationConfigValidationRule( - validation_check=self.column_names is not None, - validation_error=DbtRuntimeError( - "Indexes require at least one column, but none were provided" - ), - ), - } - - @classmethod - def from_dict(cls, config_dict) -> "PostgresIndexConfig": - # TODO: include the QuotePolicy instead of defaulting to lower() - kwargs_dict = { - "name": config_dict.get("name"), - "column_names": frozenset( - column.lower() for column in config_dict.get("column_names", set()) - ), - "unique": config_dict.get("unique"), - "method": config_dict.get("method"), - } - index: "PostgresIndexConfig" = super().from_dict(kwargs_dict) # type: ignore - return index - - @classmethod - def parse_model_node(cls, model_node_entry: dict) -> dict: - config_dict = { - "column_names": set(model_node_entry.get("columns", set())), - "unique": model_node_entry.get("unique"), - "method": model_node_entry.get("type"), - } - return config_dict - - @classmethod - def parse_relation_results(cls, relation_results_entry: agate.Row) -> dict: - config_dict = { - "name": relation_results_entry.get("name"), - "column_names": set(relation_results_entry.get("column_names", "").split(",")), - "unique": relation_results_entry.get("unique"), - "method": relation_results_entry.get("method"), - } - return config_dict - - @property - def as_node_config(self) -> dict: - """ - Returns: a dictionary that can be passed into `get_create_index_sql()` - """ - node_config = { - "columns": list(self.column_names), - "unique": self.unique, - "type": self.method.value, - } - return node_config - - -@dataclass(frozen=True, eq=True, unsafe_hash=True) -class PostgresIndexConfigChange(RelationConfigChange, RelationConfigValidationMixin): - """ - Example of an index change: - { - "action": "create", - "context": { - "name": "", # we don't know the name since it gets created as a hash at runtime - "columns": ["column_1", "column_3"], - "type": "hash", - "unique": True - } - }, - { - "action": "drop", - "context": { - "name": "index_abc", # we only need this to drop, but we need the rest to compare - "columns": ["column_1"], - "type": "btree", - "unique": True - } - } - """ - - context: PostgresIndexConfig - - @property - def requires_full_refresh(self) -> bool: - return False - - @property - def validation_rules(self) -> Set[RelationConfigValidationRule]: - return { - RelationConfigValidationRule( - validation_check=self.action - in {RelationConfigChangeAction.create, RelationConfigChangeAction.drop}, - validation_error=DbtRuntimeError( - "Invalid operation, only `drop` and `create` changes are supported for indexes." - ), - ), - RelationConfigValidationRule( - validation_check=not ( - self.action == RelationConfigChangeAction.drop and self.context.name is None - ), - validation_error=DbtRuntimeError( - "Invalid operation, attempting to drop an index with no name." - ), - ), - RelationConfigValidationRule( - validation_check=not ( - self.action == RelationConfigChangeAction.create - and self.context.column_names == set() - ), - validation_error=DbtRuntimeError( - "Invalid operations, attempting to create an index with no columns." - ), - ), - } diff --git a/plugins/postgres/dbt/adapters/postgres/relation_configs/materialized_view.py b/plugins/postgres/dbt/adapters/postgres/relation_configs/materialized_view.py deleted file mode 100644 index 15e700e777a..00000000000 --- a/plugins/postgres/dbt/adapters/postgres/relation_configs/materialized_view.py +++ /dev/null @@ -1,113 +0,0 @@ -from dataclasses import dataclass, field -from typing import Set, FrozenSet, List - -import agate -from dbt.adapters.relation_configs import ( - RelationConfigBase, - RelationResults, - RelationConfigValidationMixin, - RelationConfigValidationRule, -) -from dbt.contracts.graph.nodes import ModelNode -from dbt.exceptions import DbtRuntimeError - -from dbt.adapters.postgres.relation_configs.constants import MAX_CHARACTERS_IN_IDENTIFIER -from dbt.adapters.postgres.relation_configs.index import ( - PostgresIndexConfig, - PostgresIndexConfigChange, -) - - -@dataclass(frozen=True, eq=True, unsafe_hash=True) -class PostgresMaterializedViewConfig(RelationConfigBase, RelationConfigValidationMixin): - """ - This config follows the specs found here: - https://www.postgresql.org/docs/current/sql-creatematerializedview.html - - The following parameters are configurable by dbt: - - table_name: name of the materialized view - - query: the query that defines the view - - indexes: the collection (set) of indexes on the materialized view - - Applicable defaults for non-configurable parameters: - - method: `heap` - - tablespace_name: `default_tablespace` - - with_data: `True` - """ - - table_name: str = "" - query: str = "" - indexes: FrozenSet[PostgresIndexConfig] = field(default_factory=frozenset) - - @property - def validation_rules(self) -> Set[RelationConfigValidationRule]: - # index rules get run by default with the mixin - return { - RelationConfigValidationRule( - validation_check=self.table_name is None - or len(self.table_name) <= MAX_CHARACTERS_IN_IDENTIFIER, - validation_error=DbtRuntimeError( - f"The materialized view name is more than {MAX_CHARACTERS_IN_IDENTIFIER} " - f"characters: {self.table_name}" - ), - ), - } - - @classmethod - def from_dict(cls, config_dict: dict) -> "PostgresMaterializedViewConfig": - kwargs_dict = { - "table_name": config_dict.get("table_name"), - "query": config_dict.get("query"), - "indexes": frozenset( - PostgresIndexConfig.from_dict(index) for index in config_dict.get("indexes", {}) - ), - } - materialized_view: "PostgresMaterializedViewConfig" = super().from_dict(kwargs_dict) # type: ignore - return materialized_view - - @classmethod - def from_model_node(cls, model_node: ModelNode) -> "PostgresMaterializedViewConfig": - materialized_view_config = cls.parse_model_node(model_node) - materialized_view = cls.from_dict(materialized_view_config) - return materialized_view - - @classmethod - def parse_model_node(cls, model_node: ModelNode) -> dict: - indexes: List[dict] = model_node.config.extra.get("indexes", []) - config_dict = { - "table_name": model_node.identifier, - "query": model_node.compiled_code, - "indexes": [PostgresIndexConfig.parse_model_node(index) for index in indexes], - } - return config_dict - - @classmethod - def from_relation_results( - cls, relation_results: RelationResults - ) -> "PostgresMaterializedViewConfig": - materialized_view_config = cls.parse_relation_results(relation_results) - materialized_view = cls.from_dict(materialized_view_config) - return materialized_view - - @classmethod - def parse_relation_results(cls, relation_results: RelationResults) -> dict: - indexes: agate.Table = relation_results.get("indexes", agate.Table(rows={})) - config_dict = { - "indexes": [ - PostgresIndexConfig.parse_relation_results(index) for index in indexes.rows - ], - } - return config_dict - - -@dataclass -class PostgresMaterializedViewConfigChangeCollection: - indexes: Set[PostgresIndexConfigChange] = field(default_factory=set) - - @property - def requires_full_refresh(self) -> bool: - return any(index.requires_full_refresh for index in self.indexes) - - @property - def has_changes(self) -> bool: - return self.indexes != set() diff --git a/plugins/postgres/dbt/include/postgres/__init__.py b/plugins/postgres/dbt/include/postgres/__init__.py deleted file mode 100644 index b177e5d4932..00000000000 --- a/plugins/postgres/dbt/include/postgres/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -import os - -PACKAGE_PATH = os.path.dirname(__file__) diff --git a/plugins/postgres/dbt/include/postgres/dbt_project.yml b/plugins/postgres/dbt/include/postgres/dbt_project.yml deleted file mode 100644 index 081149f6fd7..00000000000 --- a/plugins/postgres/dbt/include/postgres/dbt_project.yml +++ /dev/null @@ -1,5 +0,0 @@ -config-version: 2 -name: dbt_postgres -version: 1.0 - -macro-paths: ["macros"] diff --git a/plugins/postgres/dbt/include/postgres/macros/adapters.sql b/plugins/postgres/dbt/include/postgres/macros/adapters.sql deleted file mode 100644 index 0c56232bb22..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/adapters.sql +++ /dev/null @@ -1,252 +0,0 @@ -{% macro postgres__create_table_as(temporary, relation, sql) -%} - {%- set unlogged = config.get('unlogged', default=false) -%} - {%- set sql_header = config.get('sql_header', none) -%} - - {{ sql_header if sql_header is not none }} - - create {% if temporary -%} - temporary - {%- elif unlogged -%} - unlogged - {%- endif %} table {{ relation }} - {% set contract_config = config.get('contract') %} - {% if contract_config.enforced %} - {{ get_assert_columns_equivalent(sql) }} - {{ get_table_columns_and_constraints() }} ; - insert into {{ relation }} ( - {{ adapter.dispatch('get_column_names', 'dbt')() }} - ) - {%- set sql = get_select_subquery(sql) %} - {% else %} - as - {% endif %} - ( - {{ sql }} - ); -{%- endmacro %} - -{% macro postgres__get_create_index_sql(relation, index_dict) -%} - {%- set index_config = adapter.parse_index(index_dict) -%} - {%- set comma_separated_columns = ", ".join(index_config.columns) -%} - {%- set index_name = index_config.render(relation) -%} - - create {% if index_config.unique -%} - unique - {%- endif %} index if not exists - "{{ index_name }}" - on {{ relation }} {% if index_config.type -%} - using {{ index_config.type }} - {%- endif %} - ({{ comma_separated_columns }}); -{%- endmacro %} - -{% macro postgres__create_schema(relation) -%} - {% if relation.database -%} - {{ adapter.verify_database(relation.database) }} - {%- endif -%} - {%- call statement('create_schema') -%} - create schema if not exists {{ relation.without_identifier().include(database=False) }} - {%- endcall -%} -{% endmacro %} - -{% macro postgres__drop_schema(relation) -%} - {% if relation.database -%} - {{ adapter.verify_database(relation.database) }} - {%- endif -%} - {%- call statement('drop_schema') -%} - drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade - {%- endcall -%} -{% endmacro %} - -{% macro postgres__get_columns_in_relation(relation) -%} - {% call statement('get_columns_in_relation', fetch_result=True) %} - select - column_name, - data_type, - character_maximum_length, - numeric_precision, - numeric_scale - - from {{ relation.information_schema('columns') }} - where table_name = '{{ relation.identifier }}' - {% if relation.schema %} - and table_schema = '{{ relation.schema }}' - {% endif %} - order by ordinal_position - - {% endcall %} - {% set table = load_result('get_columns_in_relation').table %} - {{ return(sql_convert_columns_in_relation(table)) }} -{% endmacro %} - - -{% macro postgres__list_relations_without_caching(schema_relation) %} - {% call statement('list_relations_without_caching', fetch_result=True) -%} - select - '{{ schema_relation.database }}' as database, - tablename as name, - schemaname as schema, - 'table' as type - from pg_tables - where schemaname ilike '{{ schema_relation.schema }}' - union all - select - '{{ schema_relation.database }}' as database, - viewname as name, - schemaname as schema, - 'view' as type - from pg_views - where schemaname ilike '{{ schema_relation.schema }}' - union all - select - '{{ schema_relation.database }}' as database, - matviewname as name, - schemaname as schema, - 'materialized_view' as type - from pg_matviews - where schemaname ilike '{{ schema_relation.schema }}' - {% endcall %} - {{ return(load_result('list_relations_without_caching').table) }} -{% endmacro %} - -{% macro postgres__information_schema_name(database) -%} - {% if database_name -%} - {{ adapter.verify_database(database_name) }} - {%- endif -%} - information_schema -{%- endmacro %} - -{% macro postgres__list_schemas(database) %} - {% if database -%} - {{ adapter.verify_database(database) }} - {%- endif -%} - {% call statement('list_schemas', fetch_result=True, auto_begin=False) %} - select distinct nspname from pg_namespace - {% endcall %} - {{ return(load_result('list_schemas').table) }} -{% endmacro %} - -{% macro postgres__check_schema_exists(information_schema, schema) -%} - {% if information_schema.database -%} - {{ adapter.verify_database(information_schema.database) }} - {%- endif -%} - {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %} - select count(*) from pg_namespace where nspname = '{{ schema }}' - {% endcall %} - {{ return(load_result('check_schema_exists').table) }} -{% endmacro %} - -{# - Postgres tables have a maximum length of 63 characters, anything longer is silently truncated. - Temp and backup relations add a lot of extra characters to the end of table names to ensure uniqueness. - To prevent this going over the character limit, the base_relation name is truncated to ensure - that name + suffix + uniquestring is < 63 characters. -#} - -{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %} - {% if dstring %} - {% set dt = modules.datetime.datetime.now() %} - {% set dtstring = dt.strftime("%H%M%S%f") %} - {% set suffix = suffix ~ dtstring %} - {% endif %} - {% set suffix_length = suffix|length %} - {% set relation_max_name_length = base_relation.relation_max_name_length() %} - {% if suffix_length > relation_max_name_length %} - {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %} - {% endif %} - {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %} - - {{ return(base_relation.incorporate(path={"identifier": identifier })) }} - - {% endmacro %} - -{% macro postgres__make_intermediate_relation(base_relation, suffix) %} - {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }} -{% endmacro %} - -{% macro postgres__make_temp_relation(base_relation, suffix) %} - {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %} - {{ return(temp_relation.incorporate(path={"schema": none, - "database": none})) }} -{% endmacro %} - -{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %} - {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %} - {{ return(backup_relation.incorporate(type=backup_relation_type)) }} -{% endmacro %} - -{# - By using dollar-quoting like this, users can embed anything they want into their comments - (including nested dollar-quoting), as long as they do not use this exact dollar-quoting - label. It would be nice to just pick a new one but eventually you do have to give up. -#} -{% macro postgres_escape_comment(comment) -%} - {% if comment is not string %} - {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %} - {% endif %} - {%- set magic = '$dbt_comment_literal_block$' -%} - {%- if magic in comment -%} - {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%} - {%- endif -%} - {{ magic }}{{ comment }}{{ magic }} -{%- endmacro %} - - -{% macro postgres__alter_relation_comment(relation, comment) %} - {% set escaped_comment = postgres_escape_comment(comment) %} - comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }}; -{% endmacro %} - - -{% macro postgres__alter_column_comment(relation, column_dict) %} - {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute="name") | list %} - {% for column_name in column_dict if (column_name in existing_columns) %} - {% set comment = column_dict[column_name]['description'] %} - {% set escaped_comment = postgres_escape_comment(comment) %} - comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }}; - {% endfor %} -{% endmacro %} - -{%- macro postgres__get_show_grant_sql(relation) -%} - select grantee, privilege_type - from {{ relation.information_schema('role_table_grants') }} - where grantor = current_role - and grantee != current_role - and table_schema = '{{ relation.schema }}' - and table_name = '{{ relation.identifier }}' -{%- endmacro -%} - -{% macro postgres__copy_grants() %} - {{ return(False) }} -{% endmacro %} - - -{% macro postgres__get_show_indexes_sql(relation) %} - select - i.relname as name, - m.amname as method, - ix.indisunique as "unique", - array_to_string(array_agg(a.attname), ',') as column_names - from pg_index ix - join pg_class i - on i.oid = ix.indexrelid - join pg_am m - on m.oid=i.relam - join pg_class t - on t.oid = ix.indrelid - join pg_namespace n - on n.oid = t.relnamespace - join pg_attribute a - on a.attrelid = t.oid - and a.attnum = ANY(ix.indkey) - where t.relname = '{{ relation.identifier }}' - and n.nspname = '{{ relation.schema }}' - and t.relkind in ('r', 'm') - group by 1, 2, 3 - order by 1, 2, 3 -{% endmacro %} - - -{%- macro postgres__get_drop_index_sql(relation, index_name) -%} - drop index if exists "{{ relation.schema }}"."{{ index_name }}" -{%- endmacro -%} diff --git a/plugins/postgres/dbt/include/postgres/macros/catalog.sql b/plugins/postgres/dbt/include/postgres/macros/catalog.sql deleted file mode 100644 index f0d68e1741c..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/catalog.sql +++ /dev/null @@ -1,53 +0,0 @@ - -{% macro postgres__get_catalog(information_schema, schemas) -%} - - {%- call statement('catalog', fetch_result=True) -%} - {# - If the user has multiple databases set and the first one is wrong, this will fail. - But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better. - #} - {% set database = information_schema.database %} - {{ adapter.verify_database(database) }} - - select - '{{ database }}' as table_database, - sch.nspname as table_schema, - tbl.relname as table_name, - case tbl.relkind - when 'v' then 'VIEW' - else 'BASE TABLE' - end as table_type, - tbl_desc.description as table_comment, - col.attname as column_name, - col.attnum as column_index, - pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type, - col_desc.description as column_comment, - pg_get_userbyid(tbl.relowner) as table_owner - - from pg_catalog.pg_namespace sch - join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid - join pg_catalog.pg_attribute col on col.attrelid = tbl.oid - left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0) - left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum) - - where ( - {%- for schema in schemas -%} - upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%} - {%- endfor -%} - ) - and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session - and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables - and tbl.relkind in ('r', 'v', 'f', 'p') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table, [m]aterialized view - and col.attnum > 0 -- negative numbers are used for system columns such as oid - and not col.attisdropped -- column as not been dropped - - order by - sch.nspname, - tbl.relname, - col.attnum - - {%- endcall -%} - - {{ return(load_result('catalog').table) }} - -{%- endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/materializations/incremental_strategies.sql b/plugins/postgres/dbt/include/postgres/macros/materializations/incremental_strategies.sql deleted file mode 100644 index f2fbf41e090..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/materializations/incremental_strategies.sql +++ /dev/null @@ -1,9 +0,0 @@ -{% macro postgres__get_incremental_default_sql(arg_dict) %} - - {% if arg_dict["unique_key"] %} - {% do return(get_incremental_delete_insert_sql(arg_dict)) %} - {% else %} - {% do return(get_incremental_append_sql(arg_dict)) %} - {% endif %} - -{% endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/materializations/materialized_view.sql b/plugins/postgres/dbt/include/postgres/macros/materializations/materialized_view.sql deleted file mode 100644 index 6e66e4bcd2c..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/materializations/materialized_view.sql +++ /dev/null @@ -1,84 +0,0 @@ -{% macro postgres__get_alter_materialized_view_as_sql( - relation, - configuration_changes, - sql, - existing_relation, - backup_relation, - intermediate_relation -) %} - - -- apply a full refresh immediately if needed - {% if configuration_changes.requires_full_refresh %} - - {{ get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) }} - - -- otherwise apply individual changes as needed - {% else %} - - {{ postgres__update_indexes_on_materialized_view(relation, configuration_changes.indexes) }} - - {%- endif -%} - -{% endmacro %} - - -{% macro postgres__get_create_materialized_view_as_sql(relation, sql) %} - create materialized view if not exists {{ relation }} as {{ sql }}; - - {% for _index_dict in config.get('indexes', []) -%} - {{- get_create_index_sql(relation, _index_dict) -}} - {%- endfor -%} - -{% endmacro %} - - -{% macro postgres__get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %} - {{- get_create_materialized_view_as_sql(intermediate_relation, sql) -}} - - {% if existing_relation is not none %} - alter materialized view {{ existing_relation }} rename to {{ backup_relation.include(database=False, schema=False) }}; - {% endif %} - - alter materialized view {{ intermediate_relation }} rename to {{ relation.include(database=False, schema=False) }}; - -{% endmacro %} - - -{% macro postgres__get_materialized_view_configuration_changes(existing_relation, new_config) %} - {% set _existing_materialized_view = postgres__describe_materialized_view(existing_relation) %} - {% set _configuration_changes = existing_relation.get_materialized_view_config_change_collection(_existing_materialized_view, new_config) %} - {% do return(_configuration_changes) %} -{% endmacro %} - - -{% macro postgres__refresh_materialized_view(relation) %} - refresh materialized view {{ relation }} -{% endmacro %} - - -{%- macro postgres__update_indexes_on_materialized_view(relation, index_changes) -%} - {{- log("Applying UPDATE INDEXES to: " ~ relation) -}} - - {%- for _index_change in index_changes -%} - {%- set _index = _index_change.context -%} - - {%- if _index_change.action == "drop" -%} - - {{ postgres__get_drop_index_sql(relation, _index.name) }}; - - {%- elif _index_change.action == "create" -%} - - {{ postgres__get_create_index_sql(relation, _index.as_node_config) }} - - {%- endif -%} - - {%- endfor -%} - -{%- endmacro -%} - - -{% macro postgres__describe_materialized_view(relation) %} - -- for now just get the indexes, we don't need the name or the query yet - {% set _indexes = run_query(get_show_indexes_sql(relation)) %} - {% do return({'indexes': _indexes}) %} -{% endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/materializations/snapshot_merge.sql b/plugins/postgres/dbt/include/postgres/macros/materializations/snapshot_merge.sql deleted file mode 100644 index 807c70b6c02..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/materializations/snapshot_merge.sql +++ /dev/null @@ -1,18 +0,0 @@ - -{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%} - {%- set insert_cols_csv = insert_cols | join(', ') -%} - - update {{ target }} - set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to - from {{ source }} as DBT_INTERNAL_SOURCE - where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text - and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text) - and {{ target }}.dbt_valid_to is null; - - insert into {{ target }} ({{ insert_cols_csv }}) - select {% for column in insert_cols -%} - DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %} - {%- endfor %} - from {{ source }} as DBT_INTERNAL_SOURCE - where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text; -{% endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/relations.sql b/plugins/postgres/dbt/include/postgres/macros/relations.sql deleted file mode 100644 index dd50cf00163..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/relations.sql +++ /dev/null @@ -1,80 +0,0 @@ -{% macro postgres__get_relations() -%} - - {# - -- in pg_depend, objid is the dependent, refobjid is the referenced object - -- > a pg_depend entry indicates that the referenced object cannot be - -- > dropped without also dropping the dependent object. - #} - - {%- call statement('relations', fetch_result=True) -%} - with relation as ( - select - pg_rewrite.ev_class as class, - pg_rewrite.oid as id - from pg_rewrite - ), - class as ( - select - oid as id, - relname as name, - relnamespace as schema, - relkind as kind - from pg_class - ), - dependency as ( - select distinct - pg_depend.objid as id, - pg_depend.refobjid as ref - from pg_depend - ), - schema as ( - select - pg_namespace.oid as id, - pg_namespace.nspname as name - from pg_namespace - where nspname != 'information_schema' and nspname not like 'pg\_%' - ), - referenced as ( - select - relation.id AS id, - referenced_class.name , - referenced_class.schema , - referenced_class.kind - from relation - join class as referenced_class on relation.class=referenced_class.id - where referenced_class.kind in ('r', 'v', 'm') - ), - relationships as ( - select - referenced.name as referenced_name, - referenced.schema as referenced_schema_id, - dependent_class.name as dependent_name, - dependent_class.schema as dependent_schema_id, - referenced.kind as kind - from referenced - join dependency on referenced.id=dependency.id - join class as dependent_class on dependency.ref=dependent_class.id - where - (referenced.name != dependent_class.name or - referenced.schema != dependent_class.schema) - ) - - select - referenced_schema.name as referenced_schema, - relationships.referenced_name as referenced_name, - dependent_schema.name as dependent_schema, - relationships.dependent_name as dependent_name - from relationships - join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id - join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id - group by referenced_schema, referenced_name, dependent_schema, dependent_name - order by referenced_schema, referenced_name, dependent_schema, dependent_name; - - {%- endcall -%} - - {{ return(load_result('relations').table) }} -{% endmacro %} - -{% macro postgres_get_relations() %} - {{ return(postgres__get_relations()) }} -{% endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/timestamps.sql b/plugins/postgres/dbt/include/postgres/macros/timestamps.sql deleted file mode 100644 index 7233571b677..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/timestamps.sql +++ /dev/null @@ -1,20 +0,0 @@ -{% macro postgres__current_timestamp() -%} - now() -{%- endmacro %} - -{% macro postgres__snapshot_string_as_time(timestamp) -%} - {%- set result = "'" ~ timestamp ~ "'::timestamp without time zone" -%} - {{ return(result) }} -{%- endmacro %} - -{% macro postgres__snapshot_get_time() -%} - {{ current_timestamp() }}::timestamp without time zone -{%- endmacro %} - -{% macro postgres__current_timestamp_backcompat() %} - current_timestamp::{{ type_timestamp() }} -{% endmacro %} - -{% macro postgres__current_timestamp_in_utc_backcompat() %} - (current_timestamp at time zone 'utc')::{{ type_timestamp() }} -{% endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/utils/any_value.sql b/plugins/postgres/dbt/include/postgres/macros/utils/any_value.sql deleted file mode 100644 index 6fcb4eebe5f..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/utils/any_value.sql +++ /dev/null @@ -1,7 +0,0 @@ -{#- /*Postgres doesn't support any_value, so we're using min() to get the same result*/ -#} - -{% macro postgres__any_value(expression) -%} - - min({{ expression }}) - -{%- endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/utils/dateadd.sql b/plugins/postgres/dbt/include/postgres/macros/utils/dateadd.sql deleted file mode 100644 index 97009ccdd53..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/utils/dateadd.sql +++ /dev/null @@ -1,5 +0,0 @@ -{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %} - - {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }})) - -{% endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/utils/datediff.sql b/plugins/postgres/dbt/include/postgres/macros/utils/datediff.sql deleted file mode 100644 index b452529bec3..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/utils/datediff.sql +++ /dev/null @@ -1,32 +0,0 @@ -{% macro postgres__datediff(first_date, second_date, datepart) -%} - - {% if datepart == 'year' %} - (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date)) - {% elif datepart == 'quarter' %} - ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date)) - {% elif datepart == 'month' %} - ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date)) - {% elif datepart == 'day' %} - (({{second_date}})::date - ({{first_date}})::date) - {% elif datepart == 'week' %} - ({{ datediff(first_date, second_date, 'day') }} / 7 + case - when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then - case when {{first_date}} <= {{second_date}} then 0 else -1 end - else - case when {{first_date}} <= {{second_date}} then 1 else 0 end - end) - {% elif datepart == 'hour' %} - ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp)) - {% elif datepart == 'minute' %} - ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp)) - {% elif datepart == 'second' %} - ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp))) - {% elif datepart == 'millisecond' %} - ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp))) - {% elif datepart == 'microsecond' %} - ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp))) - {% else %} - {{ exceptions.raise_compiler_error("Unsupported datepart for macro datediff in postgres: {!r}".format(datepart)) }} - {% endif %} - -{%- endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/utils/last_day.sql b/plugins/postgres/dbt/include/postgres/macros/utils/last_day.sql deleted file mode 100644 index 16995301cb4..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/utils/last_day.sql +++ /dev/null @@ -1,14 +0,0 @@ -{% macro postgres__last_day(date, datepart) -%} - - {%- if datepart == 'quarter' -%} - -- postgres dateadd does not support quarter interval. - cast( - {{dbt.dateadd('day', '-1', - dbt.dateadd('month', '3', dbt.date_trunc(datepart, date)) - )}} - as date) - {%- else -%} - {{dbt.default_last_day(date, datepart)}} - {%- endif -%} - -{%- endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/utils/listagg.sql b/plugins/postgres/dbt/include/postgres/macros/utils/listagg.sql deleted file mode 100644 index f3e19427dc4..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/utils/listagg.sql +++ /dev/null @@ -1,23 +0,0 @@ -{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%} - - {% if limit_num -%} - array_to_string( - (array_agg( - {{ measure }} - {% if order_by_clause -%} - {{ order_by_clause }} - {%- endif %} - ))[1:{{ limit_num }}], - {{ delimiter_text }} - ) - {%- else %} - string_agg( - {{ measure }}, - {{ delimiter_text }} - {% if order_by_clause -%} - {{ order_by_clause }} - {%- endif %} - ) - {%- endif %} - -{%- endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/macros/utils/split_part.sql b/plugins/postgres/dbt/include/postgres/macros/utils/split_part.sql deleted file mode 100644 index e4174d2ee9f..00000000000 --- a/plugins/postgres/dbt/include/postgres/macros/utils/split_part.sql +++ /dev/null @@ -1,9 +0,0 @@ -{% macro postgres__split_part(string_text, delimiter_text, part_number) %} - - {% if part_number >= 0 %} - {{ dbt.default__split_part(string_text, delimiter_text, part_number) }} - {% else %} - {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }} - {% endif %} - -{% endmacro %} diff --git a/plugins/postgres/dbt/include/postgres/profile_template.yml b/plugins/postgres/dbt/include/postgres/profile_template.yml deleted file mode 100644 index 5060a272f5d..00000000000 --- a/plugins/postgres/dbt/include/postgres/profile_template.yml +++ /dev/null @@ -1,21 +0,0 @@ -fixed: - type: postgres -prompts: - host: - hint: 'hostname for the instance' - port: - default: 5432 - type: 'int' - user: - hint: 'dev username' - pass: - hint: 'dev password' - hide_input: true - dbname: - hint: 'default database that dbt will build objects in' - schema: - hint: 'default schema that dbt will build objects in' - threads: - hint: '1 or more' - type: 'int' - default: 1 diff --git a/plugins/postgres/dbt/include/postgres/sample_profiles.yml b/plugins/postgres/dbt/include/postgres/sample_profiles.yml deleted file mode 100644 index 567f3912893..00000000000 --- a/plugins/postgres/dbt/include/postgres/sample_profiles.yml +++ /dev/null @@ -1,24 +0,0 @@ -default: - outputs: - - dev: - type: postgres - threads: [1 or more] - host: [host] - port: [port] - user: [dev_username] - pass: [dev_password] - dbname: [dbname] - schema: [dev_schema] - - prod: - type: postgres - threads: [1 or more] - host: [host] - port: [port] - user: [prod_username] - pass: [prod_password] - dbname: [dbname] - schema: [prod_schema] - - target: dev diff --git a/plugins/postgres/setup.py b/plugins/postgres/setup.py deleted file mode 100644 index 30936ab3941..00000000000 --- a/plugins/postgres/setup.py +++ /dev/null @@ -1,90 +0,0 @@ -#!/usr/bin/env python -import os -import sys - -if sys.version_info < (3, 8): - print("Error: dbt does not support this version of Python.") - print("Please upgrade to Python 3.8 or higher.") - sys.exit(1) - - -from setuptools import setup - -try: - from setuptools import find_namespace_packages -except ImportError: - # the user has a downlevel version of setuptools. - print("Error: dbt requires setuptools v40.1.0 or higher.") - print('Please upgrade setuptools with "pip install --upgrade setuptools" ' "and try again") - sys.exit(1) - - -PSYCOPG2_MESSAGE = """ -No package name override was set. -Using 'psycopg2-binary' package to satisfy 'psycopg2' - -If you experience segmentation faults, silent crashes, or installation errors, -consider retrying with the 'DBT_PSYCOPG2_NAME' environment variable set to -'psycopg2'. It may require a compiler toolchain and development libraries! -""".strip() - - -def _dbt_psycopg2_name(): - # if the user chose something, use that - package_name = os.getenv("DBT_PSYCOPG2_NAME", "") - if package_name: - return package_name - - # default to psycopg2-binary for all OSes/versions - print(PSYCOPG2_MESSAGE) - return "psycopg2-binary" - - -package_name = "dbt-postgres" -package_version = "1.7.0a1" -description = """The postgres adapter plugin for dbt (data build tool)""" - -this_directory = os.path.abspath(os.path.dirname(__file__)) -with open(os.path.join(this_directory, "README.md")) as f: - long_description = f.read() - -DBT_PSYCOPG2_NAME = _dbt_psycopg2_name() - -setup( - name=package_name, - version=package_version, - description=description, - long_description=long_description, - long_description_content_type="text/markdown", - author="dbt Labs", - author_email="info@dbtlabs.com", - url="https://github.com/dbt-labs/dbt-core", - packages=find_namespace_packages(include=["dbt", "dbt.*"]), - package_data={ - "dbt": [ - "include/postgres/dbt_project.yml", - "include/postgres/sample_profiles.yml", - "include/postgres/macros/*.sql", - "include/postgres/macros/**/*.sql", - ] - }, - install_requires=[ - "dbt-core=={}".format(package_version), - "{}~=2.8".format(DBT_PSYCOPG2_NAME), - # installed via dbt-core, but referenced directly, don't pin to avoid version conflicts with dbt-core - "agate", - ], - zip_safe=False, - classifiers=[ - "Development Status :: 5 - Production/Stable", - "License :: OSI Approved :: Apache Software License", - "Operating System :: Microsoft :: Windows", - "Operating System :: MacOS :: MacOS X", - "Operating System :: POSIX :: Linux", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - ], - python_requires=">=3.8", -) diff --git a/plugins/redshift/README.md b/plugins/redshift/README.md deleted file mode 100644 index 5f3fc6eccc9..00000000000 --- a/plugins/redshift/README.md +++ /dev/null @@ -1,3 +0,0 @@ -### dbt-redshift - -This plugin has moved! https://github.com/dbt-labs/dbt-redshift diff --git a/plugins/snowflake/README.md b/plugins/snowflake/README.md deleted file mode 100644 index 3dcca642a80..00000000000 --- a/plugins/snowflake/README.md +++ /dev/null @@ -1,3 +0,0 @@ -### dbt-snowflake - -This plugin has moved! https://github.com/dbt-labs/dbt-snowflake diff --git a/pytest.ini b/pytest.ini index 0760d49a55a..800dd6b9ece 100644 --- a/pytest.ini +++ b/pytest.ini @@ -7,3 +7,4 @@ env_files = testpaths = tests/functional tests/unit +pythonpath = core diff --git a/requirements.txt b/requirements.txt index 279403c7e64..a2895053cb5 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,2 +1 @@ ./core -./plugins/postgres diff --git a/schemas/dbt/catalog/v1.json b/schemas/dbt/catalog/v1.json index 08f76a591bf..25c2b25b2b3 100644 --- a/schemas/dbt/catalog/v1.json +++ b/schemas/dbt/catalog/v1.json @@ -1,249 +1,425 @@ { + "$schema": "https://json-schema.org/draft/2020-12/schema", "type": "object", - "required": [ - "metadata", - "nodes", - "sources" - ], + "title": "CatalogArtifact", "properties": { "metadata": { - "$ref": "#/definitions/CatalogMetadata" - }, - "nodes": { "type": "object", - "additionalProperties": { - "$ref": "#/definitions/CatalogTable" - } - }, - "sources": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/CatalogTable" - } - }, - "errors": { - "oneOf": [ - { - "type": "array", - "items": { - "type": "string" - } - }, - { - "type": "null" - } - ] - } - }, - "additionalProperties": false, - "description": "CatalogArtifact(metadata: dbt.contracts.results.CatalogMetadata, nodes: Dict[str, dbt.contracts.results.CatalogTable], sources: Dict[str, dbt.contracts.results.CatalogTable], errors: Optional[List[str]] = None, _compile_results: Optional[Any] = None)", - "definitions": { - "CatalogMetadata": { - "type": "object", - "required": [], + "title": "CatalogMetadata", "properties": { "dbt_schema_version": { - "type": "string", - "default": "https://schemas.getdbt.com/dbt/catalog/v1.json" + "type": "string" }, "dbt_version": { "type": "string", - "default": "1.5.0a1" + "default": "1.9.0a1" }, "generated_at": { - "type": "string", - "format": "date-time", - "default": "2023-02-09T23:46:55.265093Z" + "type": "string" }, "invocation_id": { - "oneOf": [ + "anyOf": [ { "type": "string" }, { "type": "null" } - ], - "default": "e6a9b266-203d-4fec-93af-fb8f55423a6b" + ] }, "env": { "type": "object", "additionalProperties": { "type": "string" }, - "default": {} - } - }, - "additionalProperties": false, - "description": "CatalogMetadata(dbt_schema_version: str = , dbt_version: str = '1.5.0a1', generated_at: datetime.datetime = , invocation_id: Optional[str] = , env: Dict[str, str] = )" - }, - "CatalogTable": { - "type": "object", - "required": [ - "metadata", - "columns", - "stats" - ], - "properties": { - "metadata": { - "$ref": "#/definitions/TableMetadata" - }, - "columns": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/ColumnMetadata" - } - }, - "stats": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/StatsItem" + "propertyNames": { + "type": "string" } - }, - "unique_id": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] } }, - "additionalProperties": false, - "description": "CatalogTable(metadata: dbt.contracts.results.TableMetadata, columns: Dict[str, dbt.contracts.results.ColumnMetadata], stats: Dict[str, dbt.contracts.results.StatsItem], unique_id: Optional[str] = None)" + "additionalProperties": false }, - "TableMetadata": { + "nodes": { "type": "object", - "required": [ - "type", - "schema", - "name" - ], - "properties": { - "type": { - "type": "string" - }, - "schema": { - "type": "string" - }, - "name": { - "type": "string" - }, - "database": { - "oneOf": [ - { - "type": "string" + "additionalProperties": { + "type": "object", + "title": "CatalogTable", + "properties": { + "metadata": { + "type": "object", + "title": "TableMetadata", + "properties": { + "type": { + "type": "string" + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "comment": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "owner": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } }, - { - "type": "null" - } - ] - }, - "comment": { - "oneOf": [ - { - "type": "string" + "additionalProperties": false, + "required": [ + "type", + "schema", + "name" + ] + }, + "columns": { + "type": "object", + "additionalProperties": { + "type": "object", + "title": "ColumnMetadata", + "properties": { + "type": { + "type": "string" + }, + "index": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "comment": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "type", + "index", + "name" + ] }, - { - "type": "null" - } - ] - }, - "owner": { - "oneOf": [ - { + "propertyNames": { "type": "string" - }, - { - "type": "null" } - ] - } - }, - "additionalProperties": false, - "description": "TableMetadata(type: str, schema: str, name: str, database: Optional[str] = None, comment: Optional[str] = None, owner: Optional[str] = None)" - }, - "ColumnMetadata": { - "type": "object", - "required": [ - "type", - "index", - "name" - ], - "properties": { - "type": { - "type": "string" - }, - "index": { - "type": "integer" - }, - "name": { - "type": "string" - }, - "comment": { - "oneOf": [ - { - "type": "string" + }, + "stats": { + "type": "object", + "additionalProperties": { + "type": "object", + "title": "StatsItem", + "properties": { + "id": { + "type": "string" + }, + "label": { + "type": "string" + }, + "value": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ] + }, + "include": { + "type": "boolean" + }, + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "id", + "label", + "value", + "include" + ] }, - { - "type": "null" + "propertyNames": { + "type": "string" } - ] - } + }, + "unique_id": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "metadata", + "columns", + "stats" + ] }, - "additionalProperties": false, - "description": "ColumnMetadata(type: str, index: int, name: str, comment: Optional[str] = None)" + "propertyNames": { + "type": "string" + } }, - "StatsItem": { + "sources": { "type": "object", - "required": [ - "id", - "label", - "include" - ], - "properties": { - "id": { - "type": "string" - }, - "label": { - "type": "string" - }, - "value": { - "oneOf": [ - { - "type": "boolean" + "additionalProperties": { + "type": "object", + "title": "CatalogTable", + "properties": { + "metadata": { + "type": "object", + "title": "TableMetadata", + "properties": { + "type": { + "type": "string" + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "comment": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "owner": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } }, - { - "type": "string" + "additionalProperties": false, + "required": [ + "type", + "schema", + "name" + ] + }, + "columns": { + "type": "object", + "additionalProperties": { + "type": "object", + "title": "ColumnMetadata", + "properties": { + "type": { + "type": "string" + }, + "index": { + "type": "integer" + }, + "name": { + "type": "string" + }, + "comment": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "type", + "index", + "name" + ] }, - { - "type": "number" + "propertyNames": { + "type": "string" + } + }, + "stats": { + "type": "object", + "additionalProperties": { + "type": "object", + "title": "StatsItem", + "properties": { + "id": { + "type": "string" + }, + "label": { + "type": "string" + }, + "value": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ] + }, + "include": { + "type": "boolean" + }, + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "id", + "label", + "value", + "include" + ] }, - { - "type": "null" + "propertyNames": { + "type": "string" } - ] + }, + "unique_id": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } }, - "include": { - "type": "boolean" + "additionalProperties": false, + "required": [ + "metadata", + "columns", + "stats" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "errors": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } }, - "description": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] + { + "type": "null" } - }, - "additionalProperties": false, - "description": "StatsItem(id: str, label: str, value: Union[bool, str, float, NoneType], include: bool, description: Optional[str] = None)" + ], + "default": null + }, + "_compile_results": { + "anyOf": [ + {}, + { + "type": "null" + } + ], + "default": null } }, - "$schema": "http://json-schema.org/draft-07/schema#", + "additionalProperties": false, + "required": [ + "metadata", + "nodes", + "sources" + ], "$id": "https://schemas.getdbt.com/dbt/catalog/v1.json" } diff --git a/schemas/dbt/manifest/v10.json b/schemas/dbt/manifest/v10.json index e57a55f4531..dff60014a78 100644 --- a/schemas/dbt/manifest/v10.json +++ b/schemas/dbt/manifest/v10.json @@ -141,6 +141,9 @@ }, { "$ref": "#/definitions/Metric" + }, + { + "$ref": "#/definitions/SemanticModel" } ] } @@ -212,7 +215,7 @@ } }, "additionalProperties": false, - "description": "WritableManifest(metadata: dbt.contracts.graph.manifest.ManifestMetadata, nodes: Mapping[str, Union[dbt.contracts.graph.nodes.AnalysisNode, dbt.contracts.graph.nodes.SingularTestNode, dbt.contracts.graph.nodes.HookNode, dbt.contracts.graph.nodes.ModelNode, dbt.contracts.graph.nodes.RPCNode, dbt.contracts.graph.nodes.SqlNode, dbt.contracts.graph.nodes.GenericTestNode, dbt.contracts.graph.nodes.SnapshotNode, dbt.contracts.graph.nodes.SeedNode]], sources: Mapping[str, dbt.contracts.graph.nodes.SourceDefinition], macros: Mapping[str, dbt.contracts.graph.nodes.Macro], docs: Mapping[str, dbt.contracts.graph.nodes.Documentation], exposures: Mapping[str, dbt.contracts.graph.nodes.Exposure], metrics: Mapping[str, dbt.contracts.graph.nodes.Metric], groups: Mapping[str, dbt.contracts.graph.nodes.Group], selectors: Mapping[str, Any], disabled: Union[Mapping[str, List[Union[dbt.contracts.graph.nodes.AnalysisNode, dbt.contracts.graph.nodes.SingularTestNode, dbt.contracts.graph.nodes.HookNode, dbt.contracts.graph.nodes.ModelNode, dbt.contracts.graph.nodes.RPCNode, dbt.contracts.graph.nodes.SqlNode, dbt.contracts.graph.nodes.GenericTestNode, dbt.contracts.graph.nodes.SnapshotNode, dbt.contracts.graph.nodes.SeedNode, dbt.contracts.graph.nodes.SourceDefinition, dbt.contracts.graph.nodes.Exposure, dbt.contracts.graph.nodes.Metric]]], NoneType], parent_map: Union[Dict[str, List[str]], NoneType], child_map: Union[Dict[str, List[str]], NoneType], group_map: Union[Dict[str, List[str]], NoneType], semantic_models: Mapping[str, dbt.contracts.graph.nodes.SemanticModel])", + "description": "WritableManifest(metadata: dbt.contracts.graph.manifest.ManifestMetadata, nodes: Mapping[str, Union[dbt.contracts.graph.nodes.AnalysisNode, dbt.contracts.graph.nodes.SingularTestNode, dbt.contracts.graph.nodes.HookNode, dbt.contracts.graph.nodes.ModelNode, dbt.contracts.graph.nodes.RPCNode, dbt.contracts.graph.nodes.SqlNode, dbt.contracts.graph.nodes.GenericTestNode, dbt.contracts.graph.nodes.SnapshotNode, dbt.contracts.graph.nodes.SeedNode]], sources: Mapping[str, dbt.contracts.graph.nodes.SourceDefinition], macros: Mapping[str, dbt.contracts.graph.nodes.Macro], docs: Mapping[str, dbt.contracts.graph.nodes.Documentation], exposures: Mapping[str, dbt.contracts.graph.nodes.Exposure], metrics: Mapping[str, dbt.contracts.graph.nodes.Metric], groups: Mapping[str, dbt.contracts.graph.nodes.Group], selectors: Mapping[str, Any], disabled: Union[Mapping[str, List[Union[dbt.contracts.graph.nodes.AnalysisNode, dbt.contracts.graph.nodes.SingularTestNode, dbt.contracts.graph.nodes.HookNode, dbt.contracts.graph.nodes.ModelNode, dbt.contracts.graph.nodes.RPCNode, dbt.contracts.graph.nodes.SqlNode, dbt.contracts.graph.nodes.GenericTestNode, dbt.contracts.graph.nodes.SnapshotNode, dbt.contracts.graph.nodes.SeedNode, dbt.contracts.graph.nodes.SourceDefinition, dbt.contracts.graph.nodes.Exposure, dbt.contracts.graph.nodes.Metric, dbt.contracts.graph.nodes.SemanticModel]]], NoneType], parent_map: Union[Dict[str, List[str]], NoneType], child_map: Union[Dict[str, List[str]], NoneType], group_map: Union[Dict[str, List[str]], NoneType], semantic_models: Mapping[str, dbt.contracts.graph.nodes.SemanticModel])", "definitions": { "ManifestMetadata": { "type": "object", @@ -224,12 +227,12 @@ }, "dbt_version": { "type": "string", - "default": "1.6.0b4" + "default": "1.6.5" }, "generated_at": { "type": "string", "format": "date-time", - "default": "2023-06-15T20:32:38.802488Z" + "default": "2023-10-05T00:33:14.410024Z" }, "invocation_id": { "oneOf": [ @@ -240,7 +243,7 @@ "type": "null" } ], - "default": "fe95e4d0-61ff-487d-8293-092f543fcab2" + "default": "603e2fae-9c7d-4d17-8530-7d28c9875263" }, "env": { "type": "object", @@ -471,7 +474,7 @@ }, "created_at": { "type": "number", - "default": 1686861158.804467 + "default": 1696465994.411958 }, "config_call_dict": { "type": "object", @@ -1184,7 +1187,7 @@ }, "created_at": { "type": "number", - "default": 1686861158.805745 + "default": 1696465994.413604 }, "config_call_dict": { "type": "object", @@ -1572,7 +1575,7 @@ }, "created_at": { "type": "number", - "default": 1686861158.806452 + "default": 1696465994.414359 }, "config_call_dict": { "type": "object", @@ -1848,7 +1851,7 @@ }, "created_at": { "type": "number", - "default": 1686861158.807143 + "default": 1696465994.4150689 }, "config_call_dict": { "type": "object", @@ -1951,8 +1954,8 @@ "access": { "type": "string", "enum": [ - "protected", "private", + "protected", "public" ], "default": "protected" @@ -2001,10 +2004,10 @@ } ] }, - "state_relation": { + "defer_relation": { "oneOf": [ { - "$ref": "#/definitions/StateRelation" + "$ref": "#/definitions/DeferRelation" }, { "type": "null" @@ -2013,7 +2016,7 @@ } }, "additionalProperties": false, - "description": "ModelNode(database: Union[str, NoneType], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = , _event_status: Dict[str, Any] = , tags: List[str] = , description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = , meta: Dict[str, Any] = , group: Union[str, NoneType] = None, docs: dbt.contracts.graph.unparsed.Docs = , patch_path: Union[str, NoneType] = None, build_path: Union[str, NoneType] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = , created_at: float = , config_call_dict: Dict[str, Any] = , relation_name: Union[str, NoneType] = None, raw_code: str = '', language: str = 'sql', refs: List[dbt.contracts.graph.nodes.RefArgs] = , sources: List[List[str]] = , metrics: List[List[str]] = , depends_on: dbt.contracts.graph.nodes.DependsOn = , compiled_path: Union[str, NoneType] = None, compiled: bool = False, compiled_code: Union[str, NoneType] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = , _pre_injected_sql: Union[str, NoneType] = None, contract: dbt.contracts.graph.nodes.Contract = , access: dbt.node_types.AccessType = , constraints: List[dbt.contracts.graph.nodes.ModelLevelConstraint] = , version: Union[str, float, NoneType] = None, latest_version: Union[str, float, NoneType] = None, deprecation_date: Union[datetime.datetime, NoneType] = None, state_relation: Union[dbt.contracts.graph.nodes.StateRelation, NoneType] = None)" + "description": "ModelNode(database: Union[str, NoneType], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.NodeConfig = , _event_status: Dict[str, Any] = , tags: List[str] = , description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = , meta: Dict[str, Any] = , group: Union[str, NoneType] = None, docs: dbt.contracts.graph.unparsed.Docs = , patch_path: Union[str, NoneType] = None, build_path: Union[str, NoneType] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = , created_at: float = , config_call_dict: Dict[str, Any] = , relation_name: Union[str, NoneType] = None, raw_code: str = '', language: str = 'sql', refs: List[dbt.contracts.graph.nodes.RefArgs] = , sources: List[List[str]] = , metrics: List[List[str]] = , depends_on: dbt.contracts.graph.nodes.DependsOn = , compiled_path: Union[str, NoneType] = None, compiled: bool = False, compiled_code: Union[str, NoneType] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = , _pre_injected_sql: Union[str, NoneType] = None, contract: dbt.contracts.graph.nodes.Contract = , access: dbt.node_types.AccessType = , constraints: List[dbt.contracts.graph.nodes.ModelLevelConstraint] = , version: Union[str, float, NoneType] = None, latest_version: Union[str, float, NoneType] = None, deprecation_date: Union[datetime.datetime, NoneType] = None, defer_relation: Union[dbt.contracts.graph.nodes.DeferRelation, NoneType] = None)" }, "ModelLevelConstraint": { "type": "object", @@ -2071,16 +2074,13 @@ "additionalProperties": false, "description": "ModelLevelConstraint(type: dbt.contracts.graph.nodes.ConstraintType, name: Union[str, NoneType] = None, expression: Union[str, NoneType] = None, warn_unenforced: bool = True, warn_unsupported: bool = True, columns: List[str] = )" }, - "StateRelation": { + "DeferRelation": { "type": "object", "required": [ - "alias", - "schema" + "schema", + "alias" ], "properties": { - "alias": { - "type": "string" - }, "database": { "oneOf": [ { @@ -2093,10 +2093,23 @@ }, "schema": { "type": "string" + }, + "alias": { + "type": "string" + }, + "relation_name": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] } }, "additionalProperties": false, - "description": "StateRelation(alias: str, database: Union[str, NoneType], schema: str)" + "description": "DeferRelation(database: Union[str, NoneType], schema: str, alias: str, relation_name: Union[str, NoneType])" }, "RPCNode": { "type": "object", @@ -2260,7 +2273,7 @@ }, "created_at": { "type": "number", - "default": 1686861158.808148 + "default": 1696465994.416128 }, "config_call_dict": { "type": "object", @@ -2398,7 +2411,7 @@ "resource_type": { "type": "string", "enum": [ - "sqloperation" + "sql_operation" ] }, "package_name": { @@ -2526,7 +2539,7 @@ }, "created_at": { "type": "number", - "default": 1686861158.8088078 + "default": 1696465994.41679 }, "config_call_dict": { "type": "object", @@ -2784,7 +2797,7 @@ }, "created_at": { "type": "number", - "default": 1686861158.8095539 + "default": 1696465994.4175282 }, "config_call_dict": { "type": "object", @@ -3079,7 +3092,7 @@ }, "created_at": { "type": "number", - "default": 1686861158.810841 + "default": 1696465994.418854 }, "config_call_dict": { "type": "object", @@ -3179,10 +3192,10 @@ "checksum": null } }, - "state_relation": { + "defer_relation": { "oneOf": [ { - "$ref": "#/definitions/StateRelation" + "$ref": "#/definitions/DeferRelation" }, { "type": "null" @@ -3191,7 +3204,7 @@ } }, "additionalProperties": false, - "description": "SnapshotNode(database: Union[str, NoneType], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.SnapshotConfig, _event_status: Dict[str, Any] = , tags: List[str] = , description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = , meta: Dict[str, Any] = , group: Union[str, NoneType] = None, docs: dbt.contracts.graph.unparsed.Docs = , patch_path: Union[str, NoneType] = None, build_path: Union[str, NoneType] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = , created_at: float = , config_call_dict: Dict[str, Any] = , relation_name: Union[str, NoneType] = None, raw_code: str = '', language: str = 'sql', refs: List[dbt.contracts.graph.nodes.RefArgs] = , sources: List[List[str]] = , metrics: List[List[str]] = , depends_on: dbt.contracts.graph.nodes.DependsOn = , compiled_path: Union[str, NoneType] = None, compiled: bool = False, compiled_code: Union[str, NoneType] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = , _pre_injected_sql: Union[str, NoneType] = None, contract: dbt.contracts.graph.nodes.Contract = , state_relation: Union[dbt.contracts.graph.nodes.StateRelation, NoneType] = None)" + "description": "SnapshotNode(database: Union[str, NoneType], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.SnapshotConfig, _event_status: Dict[str, Any] = , tags: List[str] = , description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = , meta: Dict[str, Any] = , group: Union[str, NoneType] = None, docs: dbt.contracts.graph.unparsed.Docs = , patch_path: Union[str, NoneType] = None, build_path: Union[str, NoneType] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = , created_at: float = , config_call_dict: Dict[str, Any] = , relation_name: Union[str, NoneType] = None, raw_code: str = '', language: str = 'sql', refs: List[dbt.contracts.graph.nodes.RefArgs] = , sources: List[List[str]] = , metrics: List[List[str]] = , depends_on: dbt.contracts.graph.nodes.DependsOn = , compiled_path: Union[str, NoneType] = None, compiled: bool = False, compiled_code: Union[str, NoneType] = None, extra_ctes_injected: bool = False, extra_ctes: List[dbt.contracts.graph.nodes.InjectedCTE] = , _pre_injected_sql: Union[str, NoneType] = None, contract: dbt.contracts.graph.nodes.Contract = , defer_relation: Union[dbt.contracts.graph.nodes.DeferRelation, NoneType] = None)" }, "SnapshotConfig": { "type": "object", @@ -3586,7 +3599,7 @@ }, "created_at": { "type": "number", - "default": 1686861158.812035 + "default": 1696465994.420199 }, "config_call_dict": { "type": "object", @@ -3622,10 +3635,10 @@ "macros": [] } }, - "state_relation": { + "defer_relation": { "oneOf": [ { - "$ref": "#/definitions/StateRelation" + "$ref": "#/definitions/DeferRelation" }, { "type": "null" @@ -3634,7 +3647,7 @@ } }, "additionalProperties": false, - "description": "SeedNode(database: Union[str, NoneType], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.SeedConfig = , _event_status: Dict[str, Any] = , tags: List[str] = , description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = , meta: Dict[str, Any] = , group: Union[str, NoneType] = None, docs: dbt.contracts.graph.unparsed.Docs = , patch_path: Union[str, NoneType] = None, build_path: Union[str, NoneType] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = , created_at: float = , config_call_dict: Dict[str, Any] = , relation_name: Union[str, NoneType] = None, raw_code: str = '', root_path: Union[str, NoneType] = None, depends_on: dbt.contracts.graph.nodes.MacroDependsOn = , state_relation: Union[dbt.contracts.graph.nodes.StateRelation, NoneType] = None)" + "description": "SeedNode(database: Union[str, NoneType], schema: str, name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], alias: str, checksum: dbt.contracts.files.FileHash, config: dbt.contracts.graph.model_config.SeedConfig = , _event_status: Dict[str, Any] = , tags: List[str] = , description: str = '', columns: Dict[str, dbt.contracts.graph.nodes.ColumnInfo] = , meta: Dict[str, Any] = , group: Union[str, NoneType] = None, docs: dbt.contracts.graph.unparsed.Docs = , patch_path: Union[str, NoneType] = None, build_path: Union[str, NoneType] = None, deferred: bool = False, unrendered_config: Dict[str, Any] = , created_at: float = , config_call_dict: Dict[str, Any] = , relation_name: Union[str, NoneType] = None, raw_code: str = '', root_path: Union[str, NoneType] = None, depends_on: dbt.contracts.graph.nodes.MacroDependsOn = , defer_relation: Union[dbt.contracts.graph.nodes.DeferRelation, NoneType] = None)" }, "SeedConfig": { "type": "object", @@ -4007,7 +4020,7 @@ }, "created_at": { "type": "number", - "default": 1686861158.8133152 + "default": 1696465994.421661 } }, "additionalProperties": false, @@ -4319,7 +4332,7 @@ }, "created_at": { "type": "number", - "default": 1686861158.8135822 + "default": 1696465994.421958 }, "supported_languages": { "oneOf": [ @@ -4559,7 +4572,7 @@ }, "created_at": { "type": "number", - "default": 1686861158.814228 + "default": 1696465994.422623 } }, "additionalProperties": false, @@ -4659,7 +4672,6 @@ "enum": [ "simple", "ratio", - "expr", "cumulative", "derived" ] @@ -4745,7 +4757,7 @@ }, "created_at": { "type": "number", - "default": 1686861158.815338 + "default": 1696465994.4238322 }, "group": { "oneOf": [ @@ -4775,23 +4787,17 @@ } ] }, - "measures": { - "oneOf": [ - { - "type": "array", - "items": { - "$ref": "#/definitions/MetricInputMeasure" - } - }, - { - "type": "null" - } - ] + "input_measures": { + "type": "array", + "items": { + "$ref": "#/definitions/MetricInputMeasure" + }, + "default": [] }, "numerator": { "oneOf": [ { - "$ref": "#/definitions/MetricInputMeasure" + "$ref": "#/definitions/MetricInput" }, { "type": "null" @@ -4801,7 +4807,7 @@ "denominator": { "oneOf": [ { - "$ref": "#/definitions/MetricInputMeasure" + "$ref": "#/definitions/MetricInput" }, { "type": "null" @@ -4860,7 +4866,7 @@ } }, "additionalProperties": false, - "description": "MetricTypeParams(measure: Union[dbt.contracts.graph.nodes.MetricInputMeasure, NoneType] = None, measures: Union[List[dbt.contracts.graph.nodes.MetricInputMeasure], NoneType] = None, numerator: Union[dbt.contracts.graph.nodes.MetricInputMeasure, NoneType] = None, denominator: Union[dbt.contracts.graph.nodes.MetricInputMeasure, NoneType] = None, expr: Union[str, NoneType] = None, window: Union[dbt.contracts.graph.nodes.MetricTimeWindow, NoneType] = None, grain_to_date: Union[dbt_semantic_interfaces.type_enums.time_granularity.TimeGranularity, NoneType] = None, metrics: Union[List[dbt.contracts.graph.nodes.MetricInput], NoneType] = None)" + "description": "MetricTypeParams(measure: Union[dbt.contracts.graph.nodes.MetricInputMeasure, NoneType] = None, input_measures: List[dbt.contracts.graph.nodes.MetricInputMeasure] = , numerator: Union[dbt.contracts.graph.nodes.MetricInput, NoneType] = None, denominator: Union[dbt.contracts.graph.nodes.MetricInput, NoneType] = None, expr: Union[str, NoneType] = None, window: Union[dbt.contracts.graph.nodes.MetricTimeWindow, NoneType] = None, grain_to_date: Union[dbt_semantic_interfaces.type_enums.time_granularity.TimeGranularity, NoneType] = None, metrics: Union[List[dbt.contracts.graph.nodes.MetricInput], NoneType] = None)" }, "MetricInputMeasure": { "type": "object", @@ -4890,10 +4896,24 @@ "type": "null" } ] + }, + "join_to_timespine": { + "type": "boolean", + "default": false + }, + "fill_nulls_with": { + "oneOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ] } }, "additionalProperties": false, - "description": "MetricInputMeasure(name: str, filter: Union[dbt.contracts.graph.nodes.WhereFilter, NoneType] = None, alias: Union[str, NoneType] = None)" + "description": "MetricInputMeasure(name: str, filter: Union[dbt.contracts.graph.nodes.WhereFilter, NoneType] = None, alias: Union[str, NoneType] = None, join_to_timespine: bool = False, fill_nulls_with: Union[int, NoneType] = None)" }, "WhereFilter": { "type": "object", @@ -4908,30 +4928,6 @@ "additionalProperties": false, "description": "WhereFilter(where_sql_template: str)" }, - "MetricTimeWindow": { - "type": "object", - "required": [ - "count", - "granularity" - ], - "properties": { - "count": { - "type": "integer" - }, - "granularity": { - "type": "string", - "enum": [ - "day", - "week", - "month", - "quarter", - "year" - ] - } - }, - "additionalProperties": false, - "description": "MetricTimeWindow(count: int, granularity: dbt_semantic_interfaces.type_enums.time_granularity.TimeGranularity)" - }, "MetricInput": { "type": "object", "required": [ @@ -4992,6 +4988,30 @@ "additionalProperties": false, "description": "MetricInput(name: str, filter: Union[dbt.contracts.graph.nodes.WhereFilter, NoneType] = None, alias: Union[str, NoneType] = None, offset_window: Union[dbt.contracts.graph.nodes.MetricTimeWindow, NoneType] = None, offset_to_grain: Union[dbt_semantic_interfaces.type_enums.time_granularity.TimeGranularity, NoneType] = None)" }, + "MetricTimeWindow": { + "type": "object", + "required": [ + "count", + "granularity" + ], + "properties": { + "count": { + "type": "integer" + }, + "granularity": { + "type": "string", + "enum": [ + "day", + "week", + "month", + "quarter", + "year" + ] + } + }, + "additionalProperties": false, + "description": "MetricTimeWindow(count: int, granularity: dbt_semantic_interfaces.type_enums.time_granularity.TimeGranularity)" + }, "SourceFileMetadata": { "type": "object", "required": [ @@ -5122,14 +5142,14 @@ "operation", "seed", "rpc", - "sqloperation", + "sql_operation", "doc", "source", "macro", "exposure", "metric", "group", - "semanticmodel" + "semantic_model" ] }, "package_name": { @@ -5173,6 +5193,16 @@ } ] }, + "label": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, "defaults": { "oneOf": [ { @@ -5213,10 +5243,44 @@ "type": "null" } ] + }, + "depends_on": { + "$ref": "#/definitions/DependsOn", + "default": { + "macros": [], + "nodes": [] + } + }, + "refs": { + "type": "array", + "items": { + "$ref": "#/definitions/RefArgs" + }, + "default": [] + }, + "created_at": { + "type": "number", + "default": 1696465994.425479 + }, + "config": { + "$ref": "#/definitions/SemanticModelConfig", + "default": { + "enabled": true + } + }, + "primary_entity": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] } }, "additionalProperties": false, - "description": "SemanticModel(name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], model: str, node_relation: Union[dbt.contracts.graph.nodes.NodeRelation, NoneType], description: Union[str, NoneType] = None, defaults: Union[dbt.contracts.graph.semantic_models.Defaults, NoneType] = None, entities: Sequence[dbt.contracts.graph.semantic_models.Entity] = , measures: Sequence[dbt.contracts.graph.semantic_models.Measure] = , dimensions: Sequence[dbt.contracts.graph.semantic_models.Dimension] = , metadata: Union[dbt.contracts.graph.semantic_models.SourceFileMetadata, NoneType] = None)" + "description": "SemanticModel(name: str, resource_type: dbt.node_types.NodeType, package_name: str, path: str, original_file_path: str, unique_id: str, fqn: List[str], model: str, node_relation: Union[dbt.contracts.graph.nodes.NodeRelation, NoneType], description: Union[str, NoneType] = None, label: Union[str, NoneType] = None, defaults: Union[dbt.contracts.graph.semantic_models.Defaults, NoneType] = None, entities: Sequence[dbt.contracts.graph.semantic_models.Entity] = , measures: Sequence[dbt.contracts.graph.semantic_models.Measure] = , dimensions: Sequence[dbt.contracts.graph.semantic_models.Dimension] = , metadata: Union[dbt.contracts.graph.semantic_models.SourceFileMetadata, NoneType] = None, depends_on: dbt.contracts.graph.nodes.DependsOn = , refs: List[dbt.contracts.graph.nodes.RefArgs] = , created_at: float = , config: dbt.contracts.graph.model_config.SemanticModelConfig = , primary_entity: Union[str, NoneType] = None)" }, "NodeRelation": { "type": "object", @@ -5240,10 +5304,20 @@ "type": "null" } ] + }, + "relation_name": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] } }, "additionalProperties": false, - "description": "NodeRelation(alias: str, schema_name: str, database: Union[str, NoneType] = None)" + "description": "NodeRelation(alias: str, schema_name: str, database: Union[str, NoneType] = None, relation_name: Union[str, NoneType] = None)" }, "Defaults": { "type": "object", @@ -5292,6 +5366,16 @@ } ] }, + "label": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, "role": { "oneOf": [ { @@ -5314,7 +5398,7 @@ } }, "additionalProperties": false, - "description": "Entity(name: str, type: dbt_semantic_interfaces.type_enums.entity_type.EntityType, description: Union[str, NoneType] = None, role: Union[str, NoneType] = None, expr: Union[str, NoneType] = None)" + "description": "Entity(name: str, type: dbt_semantic_interfaces.type_enums.entity_type.EntityType, description: Union[str, NoneType] = None, label: Union[str, NoneType] = None, role: Union[str, NoneType] = None, expr: Union[str, NoneType] = None)" }, "Measure": { "type": "object", @@ -5350,6 +5434,16 @@ } ] }, + "label": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, "create_metric": { "type": "boolean", "default": false @@ -5396,7 +5490,7 @@ } }, "additionalProperties": false, - "description": "Measure(name: str, agg: dbt_semantic_interfaces.type_enums.aggregation_type.AggregationType, description: Union[str, NoneType] = None, create_metric: bool = False, expr: Union[str, NoneType] = None, agg_params: Union[dbt.contracts.graph.semantic_models.MeasureAggregationParameters, NoneType] = None, non_additive_dimension: Union[dbt.contracts.graph.semantic_models.NonAdditiveDimension, NoneType] = None, agg_time_dimension: Union[str, NoneType] = None)" + "description": "Measure(name: str, agg: dbt_semantic_interfaces.type_enums.aggregation_type.AggregationType, description: Union[str, NoneType] = None, label: Union[str, NoneType] = None, create_metric: bool = False, expr: Union[str, NoneType] = None, agg_params: Union[dbt.contracts.graph.semantic_models.MeasureAggregationParameters, NoneType] = None, non_additive_dimension: Union[dbt.contracts.graph.semantic_models.NonAdditiveDimension, NoneType] = None, agg_time_dimension: Union[str, NoneType] = None)" }, "MeasureAggregationParameters": { "type": "object", @@ -5413,35 +5507,23 @@ ] }, "use_discrete_percentile": { - "oneOf": [ - { - "type": "boolean" - }, - { - "type": "null" - } - ] + "type": "boolean", + "default": false }, "use_approximate_percentile": { - "oneOf": [ - { - "type": "boolean" - }, - { - "type": "null" - } - ] + "type": "boolean", + "default": false } }, "additionalProperties": false, - "description": "MeasureAggregationParameters(percentile: Union[float, NoneType] = None, use_discrete_percentile: Union[bool, NoneType] = None, use_approximate_percentile: Union[bool, NoneType] = None)" + "description": "MeasureAggregationParameters(percentile: Union[float, NoneType] = None, use_discrete_percentile: bool = False, use_approximate_percentile: bool = False)" }, "NonAdditiveDimension": { "type": "object", "required": [ "name", "window_choice", - "window_grouples" + "window_groupings" ], "properties": { "name": { @@ -5461,7 +5543,7 @@ "count" ] }, - "window_grouples": { + "window_groupings": { "type": "array", "items": { "type": "string" @@ -5469,7 +5551,7 @@ } }, "additionalProperties": false, - "description": "NonAdditiveDimension(name: str, window_choice: dbt_semantic_interfaces.type_enums.aggregation_type.AggregationType, window_grouples: List[str])" + "description": "NonAdditiveDimension(name: str, window_choice: dbt_semantic_interfaces.type_enums.aggregation_type.AggregationType, window_groupings: List[str])" }, "Dimension": { "type": "object", @@ -5498,6 +5580,16 @@ } ] }, + "label": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, "is_partition": { "type": "boolean", "default": false @@ -5534,7 +5626,7 @@ } }, "additionalProperties": false, - "description": "Dimension(name: str, type: dbt_semantic_interfaces.type_enums.dimension_type.DimensionType, description: Union[str, NoneType] = None, is_partition: bool = False, type_params: Union[dbt.contracts.graph.semantic_models.DimensionTypeParams, NoneType] = None, expr: Union[str, NoneType] = None, metadata: Union[dbt.contracts.graph.semantic_models.SourceFileMetadata, NoneType] = None)" + "description": "Dimension(name: str, type: dbt_semantic_interfaces.type_enums.dimension_type.DimensionType, description: Union[str, NoneType] = None, label: Union[str, NoneType] = None, is_partition: bool = False, type_params: Union[dbt.contracts.graph.semantic_models.DimensionTypeParams, NoneType] = None, expr: Union[str, NoneType] = None, metadata: Union[dbt.contracts.graph.semantic_models.SourceFileMetadata, NoneType] = None)" }, "DimensionTypeParams": { "type": "object", @@ -5581,8 +5673,20 @@ }, "additionalProperties": false, "description": "DimensionValidityParams(is_start: bool = False, is_end: bool = False)" + }, + "SemanticModelConfig": { + "type": "object", + "required": [], + "properties": { + "enabled": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": true, + "description": "SemanticModelConfig(_extra: Dict[str, Any] = , enabled: bool = True)" } }, "$schema": "http://json-schema.org/draft-07/schema#", "$id": "https://schemas.getdbt.com/dbt/manifest/v10.json" -} +} \ No newline at end of file diff --git a/schemas/dbt/manifest/v11.json b/schemas/dbt/manifest/v11.json new file mode 100644 index 00000000000..43cb42cb157 --- /dev/null +++ b/schemas/dbt/manifest/v11.json @@ -0,0 +1,7063 @@ +{ + "$ref": "#/$defs/WritableManifest", + "$defs": { + "ManifestMetadata": { + "type": "object", + "title": "ManifestMetadata", + "properties": { + "dbt_schema_version": { + "type": "string" + }, + "dbt_version": { + "type": "string", + "default": "1.8.0a1" + }, + "generated_at": { + "type": "string" + }, + "invocation_id": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "env": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "propertyNames": { + "type": "string" + } + }, + "project_name": { + "description": "Name of the root project", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "project_id": { + "description": "A unique identifier for the project, hashed from the project name", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "user_id": { + "description": "A unique identifier for the user", + "anyOf": [ + { + "type": "string", + "format": "uuid" + }, + { + "type": "null" + } + ], + "default": null + }, + "send_anonymous_usage_stats": { + "description": "Whether dbt is configured to send anonymous usage statistics", + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "adapter_type": { + "description": "The type name of the adapter", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "FileHash": { + "type": "object", + "title": "FileHash", + "properties": { + "name": { + "type": "string" + }, + "checksum": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "name", + "checksum" + ] + }, + "Hook": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + }, + "Docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "ContractConfig": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + }, + "NodeConfig": { + "type": "object", + "title": "NodeConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "view" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "$ref": "#/$defs/Hook" + } + }, + "pre-hook": { + "type": "array", + "items": { + "$ref": "#/$defs/Hook" + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "$ref": "#/$defs/Docs" + }, + "contract": { + "$ref": "#/$defs/ContractConfig" + } + }, + "additionalProperties": true + }, + "ColumnLevelConstraint": { + "type": "object", + "title": "ColumnLevelConstraint", + "properties": { + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expression": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false, + "required": [ + "type" + ] + }, + "ColumnInfo": { + "type": "object", + "title": "ColumnInfo", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "data_type": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "constraints": { + "type": "array", + "items": { + "$ref": "#/$defs/ColumnLevelConstraint" + } + }, + "quote": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": true, + "required": [ + "name" + ] + }, + "RefArgs": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + }, + "DependsOn": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "InjectedCTE": { + "type": "object", + "title": "InjectedCTE", + "properties": { + "id": { + "type": "string" + }, + "sql": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "id", + "sql" + ] + }, + "Contract": { + "type": "object", + "title": "Contract", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + }, + "checksum": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "AnalysisNode": { + "type": "object", + "title": "AnalysisNode", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "analysis" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "$ref": "#/$defs/FileHash" + }, + "config": { + "$ref": "#/$defs/NodeConfig" + }, + "_event_status": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "$ref": "#/$defs/ColumnInfo" + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "$ref": "#/$defs/Docs" + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "deferred": { + "type": "boolean", + "default": false + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "$ref": "#/$defs/RefArgs" + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "$ref": "#/$defs/DependsOn" + }, + "compiled_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "$ref": "#/$defs/InjectedCTE" + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "$ref": "#/$defs/Contract" + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + "TestConfig": { + "type": "object", + "title": "TestConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "dbt_test__audit" + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "test" + }, + "severity": { + "type": "string", + "default": "ERROR", + "pattern": "^([Ww][Aa][Rr][Nn]|[Ee][Rr][Rr][Oo][Rr])$" + }, + "store_failures": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "store_failures_as": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "where": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "limit": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + }, + "fail_calc": { + "type": "string", + "default": "count(*)" + }, + "warn_if": { + "type": "string", + "default": "!= 0" + }, + "error_if": { + "type": "string", + "default": "!= 0" + } + }, + "additionalProperties": true + }, + "SingularTestNode": { + "type": "object", + "title": "SingularTestNode", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "test" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "$ref": "#/$defs/FileHash" + }, + "config": { + "$ref": "#/$defs/TestConfig" + }, + "_event_status": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "$ref": "#/$defs/ColumnInfo" + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "$ref": "#/$defs/Docs" + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "deferred": { + "type": "boolean", + "default": false + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "$ref": "#/$defs/RefArgs" + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "$ref": "#/$defs/DependsOn" + }, + "compiled_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "$ref": "#/$defs/InjectedCTE" + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "$ref": "#/$defs/Contract" + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + "HookNode": { + "type": "object", + "title": "HookNode", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "operation" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "$ref": "#/$defs/FileHash" + }, + "config": { + "$ref": "#/$defs/NodeConfig" + }, + "_event_status": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "$ref": "#/$defs/ColumnInfo" + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "$ref": "#/$defs/Docs" + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "deferred": { + "type": "boolean", + "default": false + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "$ref": "#/$defs/RefArgs" + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "$ref": "#/$defs/DependsOn" + }, + "compiled_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "$ref": "#/$defs/InjectedCTE" + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "$ref": "#/$defs/Contract" + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + "ModelConfig": { + "type": "object", + "title": "ModelConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "view" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "$ref": "#/$defs/Hook" + } + }, + "pre-hook": { + "type": "array", + "items": { + "$ref": "#/$defs/Hook" + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "$ref": "#/$defs/Docs" + }, + "contract": { + "$ref": "#/$defs/ContractConfig" + }, + "access": { + "enum": [ + "private", + "protected", + "public" + ], + "default": "protected" + } + }, + "additionalProperties": true + }, + "ModelLevelConstraint": { + "type": "object", + "title": "ModelLevelConstraint", + "properties": { + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expression": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + }, + "columns": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "type" + ] + }, + "DeferRelation": { + "type": "object", + "title": "DeferRelation", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "alias": { + "type": "string" + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "alias", + "relation_name" + ] + }, + "ModelNode": { + "type": "object", + "title": "ModelNode", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "model" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "$ref": "#/$defs/FileHash" + }, + "config": { + "$ref": "#/$defs/ModelConfig" + }, + "_event_status": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "$ref": "#/$defs/ColumnInfo" + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "$ref": "#/$defs/Docs" + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "deferred": { + "type": "boolean", + "default": false + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "$ref": "#/$defs/RefArgs" + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "$ref": "#/$defs/DependsOn" + }, + "compiled_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "$ref": "#/$defs/InjectedCTE" + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "$ref": "#/$defs/Contract" + }, + "access": { + "enum": [ + "private", + "protected", + "public" + ], + "default": "protected" + }, + "constraints": { + "type": "array", + "items": { + "$ref": "#/$defs/ModelLevelConstraint" + } + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + }, + "latest_version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + }, + "deprecation_date": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "defer_relation": { + "anyOf": [ + { + "$ref": "#/$defs/DeferRelation" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + "RPCNode": { + "type": "object", + "title": "RPCNode", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "rpc" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "$ref": "#/$defs/FileHash" + }, + "config": { + "$ref": "#/$defs/NodeConfig" + }, + "_event_status": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "$ref": "#/$defs/ColumnInfo" + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "$ref": "#/$defs/Docs" + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "deferred": { + "type": "boolean", + "default": false + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "$ref": "#/$defs/RefArgs" + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "$ref": "#/$defs/DependsOn" + }, + "compiled_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "$ref": "#/$defs/InjectedCTE" + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "$ref": "#/$defs/Contract" + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + "SqlNode": { + "type": "object", + "title": "SqlNode", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "sql_operation" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "$ref": "#/$defs/FileHash" + }, + "config": { + "$ref": "#/$defs/NodeConfig" + }, + "_event_status": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "$ref": "#/$defs/ColumnInfo" + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "$ref": "#/$defs/Docs" + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "deferred": { + "type": "boolean", + "default": false + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "$ref": "#/$defs/RefArgs" + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "$ref": "#/$defs/DependsOn" + }, + "compiled_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "$ref": "#/$defs/InjectedCTE" + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "$ref": "#/$defs/Contract" + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + "TestMetadata": { + "type": "object", + "title": "TestMetadata", + "properties": { + "name": { + "type": "string" + }, + "kwargs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "namespace": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + }, + "GenericTestNode": { + "type": "object", + "title": "GenericTestNode", + "properties": { + "test_metadata": { + "$ref": "#/$defs/TestMetadata" + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "test" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "$ref": "#/$defs/FileHash" + }, + "config": { + "$ref": "#/$defs/TestConfig" + }, + "_event_status": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "$ref": "#/$defs/ColumnInfo" + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "$ref": "#/$defs/Docs" + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "deferred": { + "type": "boolean", + "default": false + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "$ref": "#/$defs/RefArgs" + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "$ref": "#/$defs/DependsOn" + }, + "compiled_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "$ref": "#/$defs/InjectedCTE" + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "$ref": "#/$defs/Contract" + }, + "column_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "file_key_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "attached_node": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "test_metadata", + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + "SnapshotConfig": { + "type": "object", + "title": "SnapshotConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "snapshot" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "$ref": "#/$defs/Hook" + } + }, + "pre-hook": { + "type": "array", + "items": { + "$ref": "#/$defs/Hook" + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "$ref": "#/$defs/Docs" + }, + "contract": { + "$ref": "#/$defs/ContractConfig" + }, + "strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "target_schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "target_database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "updated_at": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "check_cols": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true + }, + "SnapshotNode": { + "type": "object", + "title": "SnapshotNode", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "snapshot" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "$ref": "#/$defs/FileHash" + }, + "config": { + "$ref": "#/$defs/SnapshotConfig" + }, + "_event_status": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "$ref": "#/$defs/ColumnInfo" + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "$ref": "#/$defs/Docs" + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "deferred": { + "type": "boolean", + "default": false + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "$ref": "#/$defs/RefArgs" + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "$ref": "#/$defs/DependsOn" + }, + "compiled_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "$ref": "#/$defs/InjectedCTE" + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "$ref": "#/$defs/Contract" + }, + "defer_relation": { + "anyOf": [ + { + "$ref": "#/$defs/DeferRelation" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum", + "config" + ] + }, + "UnitTestNodeConfig": { + "type": "object", + "title": "UnitTestNodeConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "view" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "$ref": "#/$defs/Hook" + } + }, + "pre-hook": { + "type": "array", + "items": { + "$ref": "#/$defs/Hook" + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "$ref": "#/$defs/Docs" + }, + "contract": { + "$ref": "#/$defs/ContractConfig" + }, + "expected_rows": { + "type": "array", + "items": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + } + }, + "additionalProperties": true + }, + "UnitTestOverrides": { + "type": "object", + "title": "UnitTestOverrides", + "properties": { + "macros": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "vars": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "env_vars": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "UnitTestNode": { + "type": "object", + "title": "UnitTestNode", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "unit_test" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "$ref": "#/$defs/FileHash" + }, + "config": { + "$ref": "#/$defs/UnitTestNodeConfig" + }, + "_event_status": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "$ref": "#/$defs/ColumnInfo" + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "$ref": "#/$defs/Docs" + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "deferred": { + "type": "boolean", + "default": false + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "$ref": "#/$defs/RefArgs" + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "$ref": "#/$defs/DependsOn" + }, + "compiled_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "$ref": "#/$defs/InjectedCTE" + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "$ref": "#/$defs/Contract" + }, + "tested_node_unique_id": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "this_input_node_unique_id": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "overrides": { + "anyOf": [ + { + "$ref": "#/$defs/UnitTestOverrides" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + "SeedConfig": { + "type": "object", + "title": "SeedConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "seed" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "$ref": "#/$defs/Hook" + } + }, + "pre-hook": { + "type": "array", + "items": { + "$ref": "#/$defs/Hook" + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "$ref": "#/$defs/Docs" + }, + "contract": { + "$ref": "#/$defs/ContractConfig" + }, + "delimiter": { + "type": "string", + "default": "," + }, + "quote_columns": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true + }, + "MacroDependsOn": { + "type": "object", + "title": "MacroDependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "SeedNode": { + "type": "object", + "title": "SeedNode", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "seed" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "$ref": "#/$defs/FileHash" + }, + "config": { + "$ref": "#/$defs/SeedConfig" + }, + "_event_status": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "$ref": "#/$defs/ColumnInfo" + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "$ref": "#/$defs/Docs" + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "deferred": { + "type": "boolean", + "default": false + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "root_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "depends_on": { + "$ref": "#/$defs/MacroDependsOn" + }, + "defer_relation": { + "anyOf": [ + { + "$ref": "#/$defs/DeferRelation" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + "Quoting": { + "type": "object", + "title": "Quoting", + "properties": { + "database": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "identifier": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "column": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "Time": { + "type": "object", + "title": "Time", + "properties": { + "count": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + }, + "period": { + "anyOf": [ + { + "enum": [ + "minute", + "hour", + "day" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "FreshnessThreshold": { + "type": "object", + "title": "FreshnessThreshold", + "properties": { + "warn_after": { + "anyOf": [ + { + "$ref": "#/$defs/Time" + }, + { + "type": "null" + } + ] + }, + "error_after": { + "anyOf": [ + { + "$ref": "#/$defs/Time" + }, + { + "type": "null" + } + ] + }, + "filter": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "ExternalPartition": { + "type": "object", + "title": "ExternalPartition", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "name": { + "type": "string", + "default": "" + }, + "description": { + "type": "string", + "default": "" + }, + "data_type": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": true + }, + "ExternalTable": { + "type": "object", + "title": "ExternalTable", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "location": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "file_format": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "row_format": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tbl_properties": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "partitions": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "array", + "items": { + "$ref": "#/$defs/ExternalPartition" + } + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true + }, + "SourceConfig": { + "type": "object", + "title": "SourceConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": true + }, + "SourceDefinition": { + "type": "object", + "title": "SourceDefinition", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "source" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "source_name": { + "type": "string" + }, + "source_description": { + "type": "string" + }, + "loader": { + "type": "string" + }, + "identifier": { + "type": "string" + }, + "_event_status": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "quoting": { + "$ref": "#/$defs/Quoting" + }, + "loaded_at_field": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "freshness": { + "anyOf": [ + { + "$ref": "#/$defs/FreshnessThreshold" + }, + { + "type": "null" + } + ], + "default": null + }, + "external": { + "anyOf": [ + { + "$ref": "#/$defs/ExternalTable" + }, + { + "type": "null" + } + ], + "default": null + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "$ref": "#/$defs/ColumnInfo" + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "source_meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "config": { + "$ref": "#/$defs/SourceConfig" + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "created_at": { + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "source_name", + "source_description", + "loader", + "identifier" + ] + }, + "MacroArgument": { + "type": "object", + "title": "MacroArgument", + "properties": { + "name": { + "type": "string" + }, + "type": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "description": { + "type": "string", + "default": "" + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + }, + "Macro": { + "type": "object", + "title": "Macro", + "properties": { + "name": { + "type": "string" + }, + "resource_type": { + "const": "macro" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "macro_sql": { + "type": "string" + }, + "depends_on": { + "$ref": "#/$defs/MacroDependsOn" + }, + "description": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "docs": { + "$ref": "#/$defs/Docs" + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "arguments": { + "type": "array", + "items": { + "$ref": "#/$defs/MacroArgument" + } + }, + "created_at": { + "type": "number" + }, + "supported_languages": { + "anyOf": [ + { + "type": "array", + "items": { + "enum": [ + "python", + "sql" + ] + } + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "macro_sql" + ] + }, + "Documentation": { + "type": "object", + "title": "Documentation", + "properties": { + "name": { + "type": "string" + }, + "resource_type": { + "const": "doc" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "block_contents": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "block_contents" + ] + }, + "Owner": { + "type": "object", + "title": "Owner", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "email": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true + }, + "ExposureConfig": { + "type": "object", + "title": "ExposureConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": true + }, + "Exposure": { + "type": "object", + "title": "Exposure", + "properties": { + "name": { + "type": "string" + }, + "resource_type": { + "const": "exposure" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "type": { + "enum": [ + "dashboard", + "notebook", + "analysis", + "ml", + "application" + ] + }, + "owner": { + "$ref": "#/$defs/Owner" + }, + "description": { + "type": "string", + "default": "" + }, + "label": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "maturity": { + "anyOf": [ + { + "enum": [ + "low", + "medium", + "high" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "config": { + "$ref": "#/$defs/ExposureConfig" + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "url": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "depends_on": { + "$ref": "#/$defs/DependsOn" + }, + "refs": { + "type": "array", + "items": { + "$ref": "#/$defs/RefArgs" + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "created_at": { + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "type", + "owner" + ] + }, + "WhereFilter": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + }, + "WhereFilterIntersection": { + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "$ref": "#/$defs/WhereFilter" + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] + }, + "MetricInputMeasure": { + "type": "object", + "title": "MetricInputMeasure", + "properties": { + "name": { + "type": "string" + }, + "filter": { + "anyOf": [ + { + "$ref": "#/$defs/WhereFilterIntersection" + }, + { + "type": "null" + } + ], + "default": null + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "join_to_timespine": { + "type": "boolean", + "default": false + }, + "fill_nulls_with": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + }, + "MetricTimeWindow": { + "type": "object", + "title": "MetricTimeWindow", + "properties": { + "count": { + "type": "integer" + }, + "granularity": { + "enum": [ + "day", + "week", + "month", + "quarter", + "year" + ] + } + }, + "additionalProperties": false, + "required": [ + "count", + "granularity" + ] + }, + "MetricInput": { + "type": "object", + "title": "MetricInput", + "properties": { + "name": { + "type": "string" + }, + "filter": { + "anyOf": [ + { + "$ref": "#/$defs/WhereFilterIntersection" + }, + { + "type": "null" + } + ], + "default": null + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "offset_window": { + "anyOf": [ + { + "$ref": "#/$defs/MetricTimeWindow" + }, + { + "type": "null" + } + ], + "default": null + }, + "offset_to_grain": { + "anyOf": [ + { + "enum": [ + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + }, + "MetricTypeParams": { + "type": "object", + "title": "MetricTypeParams", + "properties": { + "measure": { + "anyOf": [ + { + "$ref": "#/$defs/MetricInputMeasure" + }, + { + "type": "null" + } + ], + "default": null + }, + "input_measures": { + "type": "array", + "items": { + "$ref": "#/$defs/MetricInputMeasure" + } + }, + "numerator": { + "anyOf": [ + { + "$ref": "#/$defs/MetricInput" + }, + { + "type": "null" + } + ], + "default": null + }, + "denominator": { + "anyOf": [ + { + "$ref": "#/$defs/MetricInput" + }, + { + "type": "null" + } + ], + "default": null + }, + "expr": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "window": { + "anyOf": [ + { + "$ref": "#/$defs/MetricTimeWindow" + }, + { + "type": "null" + } + ], + "default": null + }, + "grain_to_date": { + "anyOf": [ + { + "enum": [ + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "metrics": { + "anyOf": [ + { + "type": "array", + "items": { + "$ref": "#/$defs/MetricInput" + } + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "FileSlice": { + "type": "object", + "title": "FileSlice", + "properties": { + "filename": { + "type": "string" + }, + "content": { + "type": "string" + }, + "start_line_number": { + "type": "integer" + }, + "end_line_number": { + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ + "filename", + "content", + "start_line_number", + "end_line_number" + ] + }, + "SourceFileMetadata": { + "type": "object", + "title": "SourceFileMetadata", + "properties": { + "repo_file_path": { + "type": "string" + }, + "file_slice": { + "$ref": "#/$defs/FileSlice" + } + }, + "additionalProperties": false, + "required": [ + "repo_file_path", + "file_slice" + ] + }, + "MetricConfig": { + "type": "object", + "title": "MetricConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true + }, + "Metric": { + "type": "object", + "title": "Metric", + "properties": { + "name": { + "type": "string" + }, + "resource_type": { + "const": "metric" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string" + }, + "label": { + "type": "string" + }, + "type": { + "enum": [ + "simple", + "ratio", + "cumulative", + "derived" + ] + }, + "type_params": { + "$ref": "#/$defs/MetricTypeParams" + }, + "filter": { + "anyOf": [ + { + "$ref": "#/$defs/WhereFilterIntersection" + }, + { + "type": "null" + } + ], + "default": null + }, + "metadata": { + "anyOf": [ + { + "$ref": "#/$defs/SourceFileMetadata" + }, + { + "type": "null" + } + ], + "default": null + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "config": { + "$ref": "#/$defs/MetricConfig" + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "$ref": "#/$defs/DependsOn" + }, + "refs": { + "type": "array", + "items": { + "$ref": "#/$defs/RefArgs" + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "created_at": { + "type": "number" + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "description", + "label", + "type", + "type_params" + ] + }, + "Group": { + "type": "object", + "title": "Group", + "properties": { + "name": { + "type": "string" + }, + "resource_type": { + "const": "group" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "owner": { + "$ref": "#/$defs/Owner" + } + }, + "additionalProperties": false, + "required": [ + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "owner" + ] + }, + "QueryParams": { + "type": "object", + "title": "QueryParams", + "properties": { + "metrics": { + "type": "array", + "items": { + "type": "string" + } + }, + "group_by": { + "type": "array", + "items": { + "type": "string" + } + }, + "where": { + "anyOf": [ + { + "$ref": "#/$defs/WhereFilterIntersection" + }, + { + "type": "null" + } + ] + } + }, + "additionalProperties": false, + "required": [ + "metrics", + "group_by", + "where" + ] + }, + "ExportConfig": { + "type": "object", + "title": "ExportConfig", + "properties": { + "export_as": { + "enum": [ + "table", + "view" + ] + }, + "schema_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "export_as" + ] + }, + "Export": { + "type": "object", + "title": "Export", + "properties": { + "name": { + "type": "string" + }, + "config": { + "$ref": "#/$defs/ExportConfig" + } + }, + "additionalProperties": false, + "required": [ + "name", + "config" + ] + }, + "SavedQueryConfig": { + "type": "object", + "title": "SavedQueryConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "export_as": { + "anyOf": [ + { + "enum": [ + "table", + "view" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true + }, + "SavedQuery": { + "type": "object", + "title": "SavedQuery", + "properties": { + "name": { + "type": "string" + }, + "resource_type": { + "enum": [ + "model", + "analysis", + "test", + "snapshot", + "operation", + "seed", + "rpc", + "sql_operation", + "doc", + "source", + "macro", + "exposure", + "metric", + "group", + "saved_query", + "semantic_model", + "unit_test" + ] + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "query_params": { + "$ref": "#/$defs/QueryParams" + }, + "exports": { + "type": "array", + "items": { + "$ref": "#/$defs/Export" + } + }, + "_event_status": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "label": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "metadata": { + "anyOf": [ + { + "$ref": "#/$defs/SourceFileMetadata" + }, + { + "type": "null" + } + ], + "default": null + }, + "config": { + "$ref": "#/$defs/SavedQueryConfig" + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "depends_on": { + "$ref": "#/$defs/DependsOn" + }, + "created_at": { + "type": "number" + }, + "refs": { + "type": "array", + "items": { + "$ref": "#/$defs/RefArgs" + } + } + }, + "additionalProperties": false, + "required": [ + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "query_params", + "exports" + ] + }, + "NodeRelation": { + "type": "object", + "title": "NodeRelation", + "properties": { + "alias": { + "type": "string" + }, + "schema_name": { + "type": "string" + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "alias", + "schema_name" + ] + }, + "Defaults": { + "type": "object", + "title": "Defaults", + "properties": { + "agg_time_dimension": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "Entity": { + "type": "object", + "title": "Entity", + "properties": { + "name": { + "type": "string" + }, + "type": { + "enum": [ + "foreign", + "natural", + "primary", + "unique" + ] + }, + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "label": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "role": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expr": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name", + "type" + ] + }, + "MeasureAggregationParameters": { + "type": "object", + "title": "MeasureAggregationParameters", + "properties": { + "percentile": { + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + }, + "use_discrete_percentile": { + "type": "boolean", + "default": false + }, + "use_approximate_percentile": { + "type": "boolean", + "default": false + } + }, + "additionalProperties": false + }, + "NonAdditiveDimension": { + "type": "object", + "title": "NonAdditiveDimension", + "properties": { + "name": { + "type": "string" + }, + "window_choice": { + "enum": [ + "sum", + "min", + "max", + "count_distinct", + "sum_boolean", + "average", + "percentile", + "median", + "count" + ] + }, + "window_groupings": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "name", + "window_choice", + "window_groupings" + ] + }, + "Measure": { + "type": "object", + "title": "Measure", + "properties": { + "name": { + "type": "string" + }, + "agg": { + "enum": [ + "sum", + "min", + "max", + "count_distinct", + "sum_boolean", + "average", + "percentile", + "median", + "count" + ] + }, + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "label": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "create_metric": { + "type": "boolean", + "default": false + }, + "expr": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "agg_params": { + "anyOf": [ + { + "$ref": "#/$defs/MeasureAggregationParameters" + }, + { + "type": "null" + } + ], + "default": null + }, + "non_additive_dimension": { + "anyOf": [ + { + "$ref": "#/$defs/NonAdditiveDimension" + }, + { + "type": "null" + } + ], + "default": null + }, + "agg_time_dimension": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name", + "agg" + ] + }, + "DimensionValidityParams": { + "type": "object", + "title": "DimensionValidityParams", + "properties": { + "is_start": { + "type": "boolean", + "default": false + }, + "is_end": { + "type": "boolean", + "default": false + } + }, + "additionalProperties": false + }, + "DimensionTypeParams": { + "type": "object", + "title": "DimensionTypeParams", + "properties": { + "time_granularity": { + "enum": [ + "day", + "week", + "month", + "quarter", + "year" + ] + }, + "validity_params": { + "anyOf": [ + { + "$ref": "#/$defs/DimensionValidityParams" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "time_granularity" + ] + }, + "Dimension": { + "type": "object", + "title": "Dimension", + "properties": { + "name": { + "type": "string" + }, + "type": { + "enum": [ + "categorical", + "time" + ] + }, + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "label": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "is_partition": { + "type": "boolean", + "default": false + }, + "type_params": { + "anyOf": [ + { + "$ref": "#/$defs/DimensionTypeParams" + }, + { + "type": "null" + } + ], + "default": null + }, + "expr": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "metadata": { + "anyOf": [ + { + "$ref": "#/$defs/SourceFileMetadata" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name", + "type" + ] + }, + "SemanticModelConfig": { + "type": "object", + "title": "SemanticModelConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": true + }, + "SemanticModel": { + "type": "object", + "title": "SemanticModel", + "properties": { + "name": { + "type": "string" + }, + "resource_type": { + "enum": [ + "model", + "analysis", + "test", + "snapshot", + "operation", + "seed", + "rpc", + "sql_operation", + "doc", + "source", + "macro", + "exposure", + "metric", + "group", + "saved_query", + "semantic_model", + "unit_test" + ] + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "model": { + "type": "string" + }, + "node_relation": { + "anyOf": [ + { + "$ref": "#/$defs/NodeRelation" + }, + { + "type": "null" + } + ] + }, + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "label": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "defaults": { + "anyOf": [ + { + "$ref": "#/$defs/Defaults" + }, + { + "type": "null" + } + ], + "default": null + }, + "entities": { + "type": "array", + "items": { + "$ref": "#/$defs/Entity" + } + }, + "measures": { + "type": "array", + "items": { + "$ref": "#/$defs/Measure" + } + }, + "dimensions": { + "type": "array", + "items": { + "$ref": "#/$defs/Dimension" + } + }, + "metadata": { + "anyOf": [ + { + "$ref": "#/$defs/SourceFileMetadata" + }, + { + "type": "null" + } + ], + "default": null + }, + "depends_on": { + "$ref": "#/$defs/DependsOn" + }, + "refs": { + "type": "array", + "items": { + "$ref": "#/$defs/RefArgs" + } + }, + "created_at": { + "type": "number" + }, + "config": { + "$ref": "#/$defs/SemanticModelConfig" + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "primary_entity": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "model", + "node_relation" + ] + }, + "UnitTestInputFixture": { + "type": "object", + "title": "UnitTestInputFixture", + "properties": { + "input": { + "type": "string" + }, + "rows": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + { + "type": "null" + } + ], + "default": null + }, + "format": { + "enum": [ + "csv", + "dict" + ], + "default": "dict" + }, + "fixture": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "input" + ] + }, + "UnitTestOutputFixture": { + "type": "object", + "title": "UnitTestOutputFixture", + "properties": { + "rows": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + { + "type": "null" + } + ], + "default": null + }, + "format": { + "enum": [ + "csv", + "dict" + ], + "default": "dict" + }, + "fixture": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "UnitTestConfig": { + "type": "object", + "title": "UnitTestConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": true + }, + "UnitTestDefinition": { + "type": "object", + "title": "UnitTestDefinition", + "properties": { + "model": { + "type": "string" + }, + "given": { + "type": "array", + "items": { + "$ref": "#/$defs/UnitTestInputFixture" + } + }, + "expect": { + "$ref": "#/$defs/UnitTestOutputFixture" + }, + "name": { + "type": "string" + }, + "resource_type": { + "enum": [ + "model", + "analysis", + "test", + "snapshot", + "operation", + "seed", + "rpc", + "sql_operation", + "doc", + "source", + "macro", + "exposure", + "metric", + "group", + "saved_query", + "semantic_model", + "unit_test" + ] + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "_event_status": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "overrides": { + "anyOf": [ + { + "$ref": "#/$defs/UnitTestOverrides" + }, + { + "type": "null" + } + ], + "default": null + }, + "depends_on": { + "$ref": "#/$defs/DependsOn" + }, + "config": { + "$ref": "#/$defs/UnitTestConfig" + }, + "checksum": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "model", + "given", + "expect", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn" + ] + }, + "WritableManifest": { + "type": "object", + "title": "WritableManifest", + "properties": { + "metadata": { + "description": "Metadata about the manifest", + "$ref": "#/$defs/ManifestMetadata" + }, + "nodes": { + "type": "object", + "description": "The nodes defined in the dbt project and its dependencies", + "additionalProperties": { + "anyOf": [ + { + "$ref": "#/$defs/AnalysisNode" + }, + { + "$ref": "#/$defs/SingularTestNode" + }, + { + "$ref": "#/$defs/HookNode" + }, + { + "$ref": "#/$defs/ModelNode" + }, + { + "$ref": "#/$defs/RPCNode" + }, + { + "$ref": "#/$defs/SqlNode" + }, + { + "$ref": "#/$defs/GenericTestNode" + }, + { + "$ref": "#/$defs/SnapshotNode" + }, + { + "$ref": "#/$defs/UnitTestNode" + }, + { + "$ref": "#/$defs/SeedNode" + } + ] + }, + "propertyNames": { + "type": "string" + } + }, + "sources": { + "type": "object", + "description": "The sources defined in the dbt project and its dependencies", + "additionalProperties": { + "$ref": "#/$defs/SourceDefinition" + }, + "propertyNames": { + "type": "string" + } + }, + "macros": { + "type": "object", + "description": "The macros defined in the dbt project and its dependencies", + "additionalProperties": { + "$ref": "#/$defs/Macro" + }, + "propertyNames": { + "type": "string" + } + }, + "docs": { + "type": "object", + "description": "The docs defined in the dbt project and its dependencies", + "additionalProperties": { + "$ref": "#/$defs/Documentation" + }, + "propertyNames": { + "type": "string" + } + }, + "exposures": { + "type": "object", + "description": "The exposures defined in the dbt project and its dependencies", + "additionalProperties": { + "$ref": "#/$defs/Exposure" + }, + "propertyNames": { + "type": "string" + } + }, + "metrics": { + "type": "object", + "description": "The metrics defined in the dbt project and its dependencies", + "additionalProperties": { + "$ref": "#/$defs/Metric" + }, + "propertyNames": { + "type": "string" + } + }, + "groups": { + "type": "object", + "description": "The groups defined in the dbt project", + "additionalProperties": { + "$ref": "#/$defs/Group" + }, + "propertyNames": { + "type": "string" + } + }, + "selectors": { + "type": "object", + "description": "The selectors defined in selectors.yml", + "propertyNames": { + "type": "string" + } + }, + "disabled": { + "description": "A mapping of the disabled nodes in the target", + "anyOf": [ + { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "anyOf": [ + { + "$ref": "#/$defs/AnalysisNode" + }, + { + "$ref": "#/$defs/SingularTestNode" + }, + { + "$ref": "#/$defs/HookNode" + }, + { + "$ref": "#/$defs/ModelNode" + }, + { + "$ref": "#/$defs/RPCNode" + }, + { + "$ref": "#/$defs/SqlNode" + }, + { + "$ref": "#/$defs/GenericTestNode" + }, + { + "$ref": "#/$defs/SnapshotNode" + }, + { + "$ref": "#/$defs/UnitTestNode" + }, + { + "$ref": "#/$defs/SeedNode" + }, + { + "$ref": "#/$defs/SourceDefinition" + }, + { + "$ref": "#/$defs/Exposure" + }, + { + "$ref": "#/$defs/Metric" + }, + { + "$ref": "#/$defs/SavedQuery" + }, + { + "$ref": "#/$defs/SemanticModel" + }, + { + "$ref": "#/$defs/UnitTestDefinition" + } + ] + } + }, + "propertyNames": { + "type": "string" + } + }, + { + "type": "null" + } + ] + }, + "parent_map": { + "description": "A mapping from\u00a0child nodes to their dependencies", + "anyOf": [ + { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + } + }, + "propertyNames": { + "type": "string" + } + }, + { + "type": "null" + } + ] + }, + "child_map": { + "description": "A mapping from parent nodes to their dependents", + "anyOf": [ + { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + } + }, + "propertyNames": { + "type": "string" + } + }, + { + "type": "null" + } + ] + }, + "group_map": { + "description": "A mapping from group names to their nodes", + "anyOf": [ + { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + } + }, + "propertyNames": { + "type": "string" + } + }, + { + "type": "null" + } + ] + }, + "saved_queries": { + "type": "object", + "description": "The saved queries defined in the dbt project", + "additionalProperties": { + "$ref": "#/$defs/SavedQuery" + }, + "propertyNames": { + "type": "string" + } + }, + "semantic_models": { + "type": "object", + "description": "The semantic models defined in the dbt project", + "additionalProperties": { + "$ref": "#/$defs/SemanticModel" + }, + "propertyNames": { + "type": "string" + } + }, + "unit_tests": { + "type": "object", + "description": "The unit tests defined in the project", + "additionalProperties": { + "$ref": "#/$defs/UnitTestDefinition" + }, + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "metadata", + "nodes", + "sources", + "macros", + "docs", + "exposures", + "metrics", + "groups", + "selectors", + "disabled", + "parent_map", + "child_map", + "group_map", + "saved_queries", + "semantic_models", + "unit_tests" + ] + } + }, + "$id": "https://schemas.getdbt.com/dbt/manifest/v11.json" +} \ No newline at end of file diff --git a/schemas/dbt/manifest/v12.json b/schemas/dbt/manifest/v12.json new file mode 100644 index 00000000000..95cefb7654a --- /dev/null +++ b/schemas/dbt/manifest/v12.json @@ -0,0 +1,22381 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "title": "WritableManifest", + "properties": { + "metadata": { + "type": "object", + "title": "ManifestMetadata", + "description": "Metadata about the manifest", + "properties": { + "dbt_schema_version": { + "type": "string" + }, + "dbt_version": { + "type": "string", + "default": "1.9.0a1" + }, + "generated_at": { + "type": "string" + }, + "invocation_id": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "env": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "propertyNames": { + "type": "string" + } + }, + "project_name": { + "description": "Name of the root project", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "project_id": { + "description": "A unique identifier for the project, hashed from the project name", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "user_id": { + "description": "A unique identifier for the user", + "anyOf": [ + { + "type": "string", + "format": "uuid" + }, + { + "type": "null" + } + ], + "default": null + }, + "send_anonymous_usage_stats": { + "description": "Whether dbt is configured to send anonymous usage statistics", + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "adapter_type": { + "description": "The type name of the adapter", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "nodes": { + "type": "object", + "description": "The nodes defined in the dbt project and its dependencies", + "additionalProperties": { + "anyOf": [ + { + "type": "object", + "title": "Seed", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "seed" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "type": "object", + "title": "FileHash", + "properties": { + "name": { + "type": "string" + }, + "checksum": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "name", + "checksum" + ] + }, + "config": { + "type": "object", + "title": "SeedConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "seed" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "batch_size": { + "default": null + }, + "lookback": { + "default": 0 + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + }, + "event_time": { + "default": null + }, + "delimiter": { + "type": "string", + "default": "," + }, + "quote_columns": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "type": "object", + "title": "ColumnInfo", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "data_type": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "constraints": { + "type": "array", + "items": { + "type": "object", + "title": "ColumnLevelConstraint", + "properties": { + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expression": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + }, + "to": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "to_columns": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "type" + ] + } + }, + "quote": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "granularity": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true, + "required": [ + "name" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "root_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "depends_on": { + "type": "object", + "title": "MacroDependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "defer_relation": { + "anyOf": [ + { + "type": "object", + "title": "DeferRelation", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "alias": { + "type": "string" + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "resource_type": { + "enum": [ + "model", + "analysis", + "test", + "snapshot", + "operation", + "seed", + "rpc", + "sql_operation", + "doc", + "source", + "macro", + "exposure", + "metric", + "group", + "saved_query", + "semantic_model", + "unit_test", + "fixture" + ] + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "config": { + "anyOf": [ + { + "type": "object", + "title": "NodeConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "view" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "batch_size": { + "default": null + }, + "lookback": { + "default": 0 + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + }, + "event_time": { + "default": null + } + }, + "additionalProperties": true + }, + { + "type": "null" + } + ] + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "alias", + "relation_name", + "resource_type", + "name", + "description", + "compiled_code", + "meta", + "tags", + "config" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + { + "type": "object", + "title": "Analysis", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "analysis" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "type": "object", + "title": "FileHash", + "properties": { + "name": { + "type": "string" + }, + "checksum": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "name", + "checksum" + ] + }, + "config": { + "type": "object", + "title": "NodeConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "view" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "batch_size": { + "default": null + }, + "lookback": { + "default": 0 + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + }, + "event_time": { + "default": null + } + }, + "additionalProperties": true + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "type": "object", + "title": "ColumnInfo", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "data_type": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "constraints": { + "type": "array", + "items": { + "type": "object", + "title": "ColumnLevelConstraint", + "properties": { + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expression": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + }, + "to": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "to_columns": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "type" + ] + } + }, + "quote": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "granularity": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true, + "required": [ + "name" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "compiled_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "type": "object", + "title": "InjectedCTE", + "properties": { + "id": { + "type": "string" + }, + "sql": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "id", + "sql" + ] + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "type": "object", + "title": "Contract", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + }, + "checksum": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + { + "type": "object", + "title": "SingularTest", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "test" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "type": "object", + "title": "FileHash", + "properties": { + "name": { + "type": "string" + }, + "checksum": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "name", + "checksum" + ] + }, + "config": { + "type": "object", + "title": "TestConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "dbt_test__audit" + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "test" + }, + "severity": { + "type": "string", + "default": "ERROR", + "pattern": "^([Ww][Aa][Rr][Nn]|[Ee][Rr][Rr][Oo][Rr])$" + }, + "store_failures": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "store_failures_as": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "where": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "limit": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + }, + "fail_calc": { + "type": "string", + "default": "count(*)" + }, + "warn_if": { + "type": "string", + "default": "!= 0" + }, + "error_if": { + "type": "string", + "default": "!= 0" + } + }, + "additionalProperties": true + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "type": "object", + "title": "ColumnInfo", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "data_type": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "constraints": { + "type": "array", + "items": { + "type": "object", + "title": "ColumnLevelConstraint", + "properties": { + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expression": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + }, + "to": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "to_columns": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "type" + ] + } + }, + "quote": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "granularity": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true, + "required": [ + "name" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "compiled_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "type": "object", + "title": "InjectedCTE", + "properties": { + "id": { + "type": "string" + }, + "sql": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "id", + "sql" + ] + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "type": "object", + "title": "Contract", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + }, + "checksum": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + { + "type": "object", + "title": "HookNode", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "operation" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "type": "object", + "title": "FileHash", + "properties": { + "name": { + "type": "string" + }, + "checksum": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "name", + "checksum" + ] + }, + "config": { + "type": "object", + "title": "NodeConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "view" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "batch_size": { + "default": null + }, + "lookback": { + "default": 0 + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + }, + "event_time": { + "default": null + } + }, + "additionalProperties": true + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "type": "object", + "title": "ColumnInfo", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "data_type": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "constraints": { + "type": "array", + "items": { + "type": "object", + "title": "ColumnLevelConstraint", + "properties": { + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expression": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + }, + "to": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "to_columns": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "type" + ] + } + }, + "quote": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "granularity": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true, + "required": [ + "name" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "compiled_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "type": "object", + "title": "InjectedCTE", + "properties": { + "id": { + "type": "string" + }, + "sql": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "id", + "sql" + ] + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "type": "object", + "title": "Contract", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + }, + "checksum": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + { + "type": "object", + "title": "Model", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "model" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "type": "object", + "title": "FileHash", + "properties": { + "name": { + "type": "string" + }, + "checksum": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "name", + "checksum" + ] + }, + "config": { + "type": "object", + "title": "ModelConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "view" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "batch_size": { + "default": null + }, + "lookback": { + "default": 0 + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + }, + "event_time": { + "default": null + }, + "access": { + "enum": [ + "private", + "protected", + "public" + ], + "default": "protected" + } + }, + "additionalProperties": true + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "type": "object", + "title": "ColumnInfo", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "data_type": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "constraints": { + "type": "array", + "items": { + "type": "object", + "title": "ColumnLevelConstraint", + "properties": { + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expression": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + }, + "to": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "to_columns": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "type" + ] + } + }, + "quote": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "granularity": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true, + "required": [ + "name" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "compiled_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "type": "object", + "title": "InjectedCTE", + "properties": { + "id": { + "type": "string" + }, + "sql": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "id", + "sql" + ] + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "type": "object", + "title": "Contract", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + }, + "checksum": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "access": { + "enum": [ + "private", + "protected", + "public" + ], + "default": "protected" + }, + "constraints": { + "type": "array", + "items": { + "type": "object", + "title": "ModelLevelConstraint", + "properties": { + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expression": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + }, + "to": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "to_columns": { + "type": "array", + "items": { + "type": "string" + } + }, + "columns": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "type" + ] + } + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + }, + "latest_version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + }, + "deprecation_date": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "defer_relation": { + "anyOf": [ + { + "type": "object", + "title": "DeferRelation", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "alias": { + "type": "string" + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "resource_type": { + "enum": [ + "model", + "analysis", + "test", + "snapshot", + "operation", + "seed", + "rpc", + "sql_operation", + "doc", + "source", + "macro", + "exposure", + "metric", + "group", + "saved_query", + "semantic_model", + "unit_test", + "fixture" + ] + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "config": { + "anyOf": [ + { + "type": "object", + "title": "NodeConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "view" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "batch_size": { + "default": null + }, + "lookback": { + "default": 0 + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + }, + "event_time": { + "default": null + } + }, + "additionalProperties": true + }, + { + "type": "null" + } + ] + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "alias", + "relation_name", + "resource_type", + "name", + "description", + "compiled_code", + "meta", + "tags", + "config" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "primary_key": { + "type": "array", + "items": { + "type": "string" + } + }, + "time_spine": { + "anyOf": [ + { + "type": "object", + "title": "TimeSpine", + "properties": { + "standard_granularity_column": { + "type": "string" + }, + "custom_granularities": { + "type": "array", + "items": { + "type": "object", + "title": "CustomGranularity", + "properties": { + "name": { + "type": "string" + }, + "column_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "standard_granularity_column" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + { + "type": "object", + "title": "SqlOperation", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "sql_operation" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "type": "object", + "title": "FileHash", + "properties": { + "name": { + "type": "string" + }, + "checksum": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "name", + "checksum" + ] + }, + "config": { + "type": "object", + "title": "NodeConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "view" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "batch_size": { + "default": null + }, + "lookback": { + "default": 0 + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + }, + "event_time": { + "default": null + } + }, + "additionalProperties": true + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "type": "object", + "title": "ColumnInfo", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "data_type": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "constraints": { + "type": "array", + "items": { + "type": "object", + "title": "ColumnLevelConstraint", + "properties": { + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expression": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + }, + "to": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "to_columns": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "type" + ] + } + }, + "quote": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "granularity": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true, + "required": [ + "name" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "compiled_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "type": "object", + "title": "InjectedCTE", + "properties": { + "id": { + "type": "string" + }, + "sql": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "id", + "sql" + ] + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "type": "object", + "title": "Contract", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + }, + "checksum": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + { + "type": "object", + "title": "GenericTest", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "test" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "type": "object", + "title": "FileHash", + "properties": { + "name": { + "type": "string" + }, + "checksum": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "name", + "checksum" + ] + }, + "config": { + "type": "object", + "title": "TestConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "dbt_test__audit" + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "test" + }, + "severity": { + "type": "string", + "default": "ERROR", + "pattern": "^([Ww][Aa][Rr][Nn]|[Ee][Rr][Rr][Oo][Rr])$" + }, + "store_failures": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "store_failures_as": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "where": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "limit": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + }, + "fail_calc": { + "type": "string", + "default": "count(*)" + }, + "warn_if": { + "type": "string", + "default": "!= 0" + }, + "error_if": { + "type": "string", + "default": "!= 0" + } + }, + "additionalProperties": true + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "type": "object", + "title": "ColumnInfo", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "data_type": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "constraints": { + "type": "array", + "items": { + "type": "object", + "title": "ColumnLevelConstraint", + "properties": { + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expression": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + }, + "to": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "to_columns": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "type" + ] + } + }, + "quote": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "granularity": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true, + "required": [ + "name" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "compiled_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "type": "object", + "title": "InjectedCTE", + "properties": { + "id": { + "type": "string" + }, + "sql": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "id", + "sql" + ] + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "type": "object", + "title": "Contract", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + }, + "checksum": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "column_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "file_key_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "attached_node": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "test_metadata": { + "type": "object", + "title": "TestMetadata", + "properties": { + "name": { + "type": "string", + "default": "test" + }, + "kwargs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "namespace": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + { + "type": "object", + "title": "Snapshot", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "snapshot" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "type": "object", + "title": "FileHash", + "properties": { + "name": { + "type": "string" + }, + "checksum": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "name", + "checksum" + ] + }, + "config": { + "type": "object", + "title": "SnapshotConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "snapshot" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "batch_size": { + "default": null + }, + "lookback": { + "default": 0 + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + }, + "event_time": { + "default": null + }, + "strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "target_schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "target_database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "updated_at": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "check_cols": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "snapshot_meta_column_names": { + "type": "object", + "title": "SnapshotMetaColumnNames", + "properties": { + "dbt_valid_to": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "dbt_valid_from": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "dbt_scd_id": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "dbt_updated_at": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + } + }, + "additionalProperties": true + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "type": "object", + "title": "ColumnInfo", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "data_type": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "constraints": { + "type": "array", + "items": { + "type": "object", + "title": "ColumnLevelConstraint", + "properties": { + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expression": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + }, + "to": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "to_columns": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "type" + ] + } + }, + "quote": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "granularity": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true, + "required": [ + "name" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "compiled_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "type": "object", + "title": "InjectedCTE", + "properties": { + "id": { + "type": "string" + }, + "sql": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "id", + "sql" + ] + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "type": "object", + "title": "Contract", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + }, + "checksum": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "defer_relation": { + "anyOf": [ + { + "type": "object", + "title": "DeferRelation", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "alias": { + "type": "string" + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "resource_type": { + "enum": [ + "model", + "analysis", + "test", + "snapshot", + "operation", + "seed", + "rpc", + "sql_operation", + "doc", + "source", + "macro", + "exposure", + "metric", + "group", + "saved_query", + "semantic_model", + "unit_test", + "fixture" + ] + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "config": { + "anyOf": [ + { + "type": "object", + "title": "NodeConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "view" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "batch_size": { + "default": null + }, + "lookback": { + "default": 0 + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + }, + "event_time": { + "default": null + } + }, + "additionalProperties": true + }, + { + "type": "null" + } + ] + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "alias", + "relation_name", + "resource_type", + "name", + "description", + "compiled_code", + "meta", + "tags", + "config" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum", + "config" + ] + } + ] + }, + "propertyNames": { + "type": "string" + } + }, + "sources": { + "type": "object", + "description": "The sources defined in the dbt project and its dependencies", + "additionalProperties": { + "type": "object", + "title": "SourceDefinition", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "source" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "source_name": { + "type": "string" + }, + "source_description": { + "type": "string" + }, + "loader": { + "type": "string" + }, + "identifier": { + "type": "string" + }, + "quoting": { + "type": "object", + "title": "Quoting", + "properties": { + "database": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "identifier": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "column": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "loaded_at_field": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "freshness": { + "anyOf": [ + { + "type": "object", + "title": "FreshnessThreshold", + "properties": { + "warn_after": { + "anyOf": [ + { + "type": "object", + "title": "Time", + "properties": { + "count": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + }, + "period": { + "anyOf": [ + { + "enum": [ + "minute", + "hour", + "day" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ] + }, + "error_after": { + "anyOf": [ + { + "type": "object", + "title": "Time", + "properties": { + "count": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + }, + "period": { + "anyOf": [ + { + "enum": [ + "minute", + "hour", + "day" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ] + }, + "filter": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ], + "default": null + }, + "external": { + "anyOf": [ + { + "type": "object", + "title": "ExternalTable", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "location": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "file_format": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "row_format": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tbl_properties": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "partitions": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "array", + "items": { + "type": "object", + "title": "ExternalPartition", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "name": { + "type": "string", + "default": "" + }, + "description": { + "type": "string", + "default": "" + }, + "data_type": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": true + } + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true + }, + { + "type": "null" + } + ], + "default": null + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "type": "object", + "title": "ColumnInfo", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "data_type": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "constraints": { + "type": "array", + "items": { + "type": "object", + "title": "ColumnLevelConstraint", + "properties": { + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expression": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + }, + "to": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "to_columns": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "type" + ] + } + }, + "quote": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "granularity": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true, + "required": [ + "name" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "source_meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "config": { + "type": "object", + "title": "SourceConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "event_time": { + "default": null + } + }, + "additionalProperties": true + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "created_at": { + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "source_name", + "source_description", + "loader", + "identifier" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "macros": { + "type": "object", + "description": "The macros defined in the dbt project and its dependencies", + "additionalProperties": { + "type": "object", + "title": "Macro", + "properties": { + "name": { + "type": "string" + }, + "resource_type": { + "const": "macro" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "macro_sql": { + "type": "string" + }, + "depends_on": { + "type": "object", + "title": "MacroDependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "description": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "arguments": { + "type": "array", + "items": { + "type": "object", + "title": "MacroArgument", + "properties": { + "name": { + "type": "string" + }, + "type": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "description": { + "type": "string", + "default": "" + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "created_at": { + "type": "number" + }, + "supported_languages": { + "anyOf": [ + { + "type": "array", + "items": { + "enum": [ + "python", + "sql" + ] + } + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "macro_sql" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "docs": { + "type": "object", + "description": "The docs defined in the dbt project and its dependencies", + "additionalProperties": { + "type": "object", + "title": "Documentation", + "properties": { + "name": { + "type": "string" + }, + "resource_type": { + "const": "doc" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "block_contents": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "block_contents" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "exposures": { + "type": "object", + "description": "The exposures defined in the dbt project and its dependencies", + "additionalProperties": { + "type": "object", + "title": "Exposure", + "properties": { + "name": { + "type": "string" + }, + "resource_type": { + "const": "exposure" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "type": { + "enum": [ + "dashboard", + "notebook", + "analysis", + "ml", + "application" + ] + }, + "owner": { + "type": "object", + "title": "Owner", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "email": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true + }, + "description": { + "type": "string", + "default": "" + }, + "label": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "maturity": { + "anyOf": [ + { + "enum": [ + "low", + "medium", + "high" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "config": { + "type": "object", + "title": "ExposureConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": true + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "url": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "created_at": { + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "type", + "owner" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "metrics": { + "type": "object", + "description": "The metrics defined in the dbt project and its dependencies", + "additionalProperties": { + "type": "object", + "title": "Metric", + "properties": { + "name": { + "type": "string" + }, + "resource_type": { + "const": "metric" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string" + }, + "label": { + "type": "string" + }, + "type": { + "enum": [ + "simple", + "ratio", + "cumulative", + "derived", + "conversion" + ] + }, + "type_params": { + "type": "object", + "title": "MetricTypeParams", + "properties": { + "measure": { + "anyOf": [ + { + "type": "object", + "title": "MetricInputMeasure", + "properties": { + "name": { + "type": "string" + }, + "filter": { + "anyOf": [ + { + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "join_to_timespine": { + "type": "boolean", + "default": false + }, + "fill_nulls_with": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "input_measures": { + "type": "array", + "items": { + "type": "object", + "title": "MetricInputMeasure", + "properties": { + "name": { + "type": "string" + }, + "filter": { + "anyOf": [ + { + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "join_to_timespine": { + "type": "boolean", + "default": false + }, + "fill_nulls_with": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "numerator": { + "anyOf": [ + { + "type": "object", + "title": "MetricInput", + "properties": { + "name": { + "type": "string" + }, + "filter": { + "anyOf": [ + { + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "offset_window": { + "anyOf": [ + { + "type": "object", + "title": "MetricTimeWindow", + "properties": { + "count": { + "type": "integer" + }, + "granularity": { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + } + }, + "additionalProperties": false, + "required": [ + "count", + "granularity" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "offset_to_grain": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "denominator": { + "anyOf": [ + { + "type": "object", + "title": "MetricInput", + "properties": { + "name": { + "type": "string" + }, + "filter": { + "anyOf": [ + { + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "offset_window": { + "anyOf": [ + { + "type": "object", + "title": "MetricTimeWindow", + "properties": { + "count": { + "type": "integer" + }, + "granularity": { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + } + }, + "additionalProperties": false, + "required": [ + "count", + "granularity" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "offset_to_grain": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "expr": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "window": { + "anyOf": [ + { + "type": "object", + "title": "MetricTimeWindow", + "properties": { + "count": { + "type": "integer" + }, + "granularity": { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + } + }, + "additionalProperties": false, + "required": [ + "count", + "granularity" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "grain_to_date": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "metrics": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "object", + "title": "MetricInput", + "properties": { + "name": { + "type": "string" + }, + "filter": { + "anyOf": [ + { + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "offset_window": { + "anyOf": [ + { + "type": "object", + "title": "MetricTimeWindow", + "properties": { + "count": { + "type": "integer" + }, + "granularity": { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + } + }, + "additionalProperties": false, + "required": [ + "count", + "granularity" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "offset_to_grain": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + { + "type": "null" + } + ], + "default": null + }, + "conversion_type_params": { + "anyOf": [ + { + "type": "object", + "title": "ConversionTypeParams", + "properties": { + "base_measure": { + "type": "object", + "title": "MetricInputMeasure", + "properties": { + "name": { + "type": "string" + }, + "filter": { + "anyOf": [ + { + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "join_to_timespine": { + "type": "boolean", + "default": false + }, + "fill_nulls_with": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + }, + "conversion_measure": { + "type": "object", + "title": "MetricInputMeasure", + "properties": { + "name": { + "type": "string" + }, + "filter": { + "anyOf": [ + { + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "join_to_timespine": { + "type": "boolean", + "default": false + }, + "fill_nulls_with": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + }, + "entity": { + "type": "string" + }, + "calculation": { + "enum": [ + "conversions", + "conversion_rate" + ], + "default": "conversion_rate" + }, + "window": { + "anyOf": [ + { + "type": "object", + "title": "MetricTimeWindow", + "properties": { + "count": { + "type": "integer" + }, + "granularity": { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + } + }, + "additionalProperties": false, + "required": [ + "count", + "granularity" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "constant_properties": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "object", + "title": "ConstantPropertyInput", + "properties": { + "base_property": { + "type": "string" + }, + "conversion_property": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "base_property", + "conversion_property" + ] + } + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "base_measure", + "conversion_measure", + "entity" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "cumulative_type_params": { + "anyOf": [ + { + "type": "object", + "title": "CumulativeTypeParams", + "properties": { + "window": { + "anyOf": [ + { + "type": "object", + "title": "MetricTimeWindow", + "properties": { + "count": { + "type": "integer" + }, + "granularity": { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + } + }, + "additionalProperties": false, + "required": [ + "count", + "granularity" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "grain_to_date": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "period_agg": { + "enum": [ + "first", + "last", + "average" + ], + "default": "first" + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "filter": { + "anyOf": [ + { + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "metadata": { + "anyOf": [ + { + "type": "object", + "title": "SourceFileMetadata", + "properties": { + "repo_file_path": { + "type": "string" + }, + "file_slice": { + "type": "object", + "title": "FileSlice", + "properties": { + "filename": { + "type": "string" + }, + "content": { + "type": "string" + }, + "start_line_number": { + "type": "integer" + }, + "end_line_number": { + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ + "filename", + "content", + "start_line_number", + "end_line_number" + ] + } + }, + "additionalProperties": false, + "required": [ + "repo_file_path", + "file_slice" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "time_granularity": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "config": { + "type": "object", + "title": "MetricConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": true + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "created_at": { + "type": "number" + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "description", + "label", + "type", + "type_params" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "groups": { + "type": "object", + "description": "The groups defined in the dbt project", + "additionalProperties": { + "type": "object", + "title": "Group", + "properties": { + "name": { + "type": "string" + }, + "resource_type": { + "const": "group" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "owner": { + "type": "object", + "title": "Owner", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "email": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true + } + }, + "additionalProperties": false, + "required": [ + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "owner" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "selectors": { + "type": "object", + "description": "The selectors defined in selectors.yml", + "propertyNames": { + "type": "string" + } + }, + "disabled": { + "description": "A mapping of the disabled nodes in the target", + "anyOf": [ + { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "anyOf": [ + { + "type": "object", + "title": "Seed", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "seed" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "type": "object", + "title": "FileHash", + "properties": { + "name": { + "type": "string" + }, + "checksum": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "name", + "checksum" + ] + }, + "config": { + "type": "object", + "title": "SeedConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "seed" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "batch_size": { + "default": null + }, + "lookback": { + "default": 0 + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + }, + "event_time": { + "default": null + }, + "delimiter": { + "type": "string", + "default": "," + }, + "quote_columns": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "type": "object", + "title": "ColumnInfo", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "data_type": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "constraints": { + "type": "array", + "items": { + "type": "object", + "title": "ColumnLevelConstraint", + "properties": { + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expression": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + }, + "to": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "to_columns": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "type" + ] + } + }, + "quote": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "granularity": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true, + "required": [ + "name" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "root_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "depends_on": { + "type": "object", + "title": "MacroDependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "defer_relation": { + "anyOf": [ + { + "type": "object", + "title": "DeferRelation", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "alias": { + "type": "string" + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "resource_type": { + "enum": [ + "model", + "analysis", + "test", + "snapshot", + "operation", + "seed", + "rpc", + "sql_operation", + "doc", + "source", + "macro", + "exposure", + "metric", + "group", + "saved_query", + "semantic_model", + "unit_test", + "fixture" + ] + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "config": { + "anyOf": [ + { + "type": "object", + "title": "NodeConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "view" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "batch_size": { + "default": null + }, + "lookback": { + "default": 0 + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + }, + "event_time": { + "default": null + } + }, + "additionalProperties": true + }, + { + "type": "null" + } + ] + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "alias", + "relation_name", + "resource_type", + "name", + "description", + "compiled_code", + "meta", + "tags", + "config" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + { + "type": "object", + "title": "Analysis", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "analysis" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "type": "object", + "title": "FileHash", + "properties": { + "name": { + "type": "string" + }, + "checksum": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "name", + "checksum" + ] + }, + "config": { + "type": "object", + "title": "NodeConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "view" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "batch_size": { + "default": null + }, + "lookback": { + "default": 0 + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + }, + "event_time": { + "default": null + } + }, + "additionalProperties": true + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "type": "object", + "title": "ColumnInfo", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "data_type": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "constraints": { + "type": "array", + "items": { + "type": "object", + "title": "ColumnLevelConstraint", + "properties": { + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expression": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + }, + "to": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "to_columns": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "type" + ] + } + }, + "quote": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "granularity": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true, + "required": [ + "name" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "compiled_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "type": "object", + "title": "InjectedCTE", + "properties": { + "id": { + "type": "string" + }, + "sql": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "id", + "sql" + ] + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "type": "object", + "title": "Contract", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + }, + "checksum": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + { + "type": "object", + "title": "SingularTest", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "test" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "type": "object", + "title": "FileHash", + "properties": { + "name": { + "type": "string" + }, + "checksum": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "name", + "checksum" + ] + }, + "config": { + "type": "object", + "title": "TestConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "dbt_test__audit" + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "test" + }, + "severity": { + "type": "string", + "default": "ERROR", + "pattern": "^([Ww][Aa][Rr][Nn]|[Ee][Rr][Rr][Oo][Rr])$" + }, + "store_failures": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "store_failures_as": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "where": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "limit": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + }, + "fail_calc": { + "type": "string", + "default": "count(*)" + }, + "warn_if": { + "type": "string", + "default": "!= 0" + }, + "error_if": { + "type": "string", + "default": "!= 0" + } + }, + "additionalProperties": true + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "type": "object", + "title": "ColumnInfo", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "data_type": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "constraints": { + "type": "array", + "items": { + "type": "object", + "title": "ColumnLevelConstraint", + "properties": { + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expression": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + }, + "to": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "to_columns": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "type" + ] + } + }, + "quote": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "granularity": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true, + "required": [ + "name" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "compiled_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "type": "object", + "title": "InjectedCTE", + "properties": { + "id": { + "type": "string" + }, + "sql": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "id", + "sql" + ] + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "type": "object", + "title": "Contract", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + }, + "checksum": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + { + "type": "object", + "title": "HookNode", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "operation" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "type": "object", + "title": "FileHash", + "properties": { + "name": { + "type": "string" + }, + "checksum": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "name", + "checksum" + ] + }, + "config": { + "type": "object", + "title": "NodeConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "view" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "batch_size": { + "default": null + }, + "lookback": { + "default": 0 + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + }, + "event_time": { + "default": null + } + }, + "additionalProperties": true + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "type": "object", + "title": "ColumnInfo", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "data_type": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "constraints": { + "type": "array", + "items": { + "type": "object", + "title": "ColumnLevelConstraint", + "properties": { + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expression": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + }, + "to": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "to_columns": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "type" + ] + } + }, + "quote": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "granularity": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true, + "required": [ + "name" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "compiled_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "type": "object", + "title": "InjectedCTE", + "properties": { + "id": { + "type": "string" + }, + "sql": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "id", + "sql" + ] + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "type": "object", + "title": "Contract", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + }, + "checksum": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + { + "type": "object", + "title": "Model", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "model" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "type": "object", + "title": "FileHash", + "properties": { + "name": { + "type": "string" + }, + "checksum": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "name", + "checksum" + ] + }, + "config": { + "type": "object", + "title": "ModelConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "view" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "batch_size": { + "default": null + }, + "lookback": { + "default": 0 + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + }, + "event_time": { + "default": null + }, + "access": { + "enum": [ + "private", + "protected", + "public" + ], + "default": "protected" + } + }, + "additionalProperties": true + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "type": "object", + "title": "ColumnInfo", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "data_type": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "constraints": { + "type": "array", + "items": { + "type": "object", + "title": "ColumnLevelConstraint", + "properties": { + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expression": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + }, + "to": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "to_columns": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "type" + ] + } + }, + "quote": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "granularity": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true, + "required": [ + "name" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "compiled_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "type": "object", + "title": "InjectedCTE", + "properties": { + "id": { + "type": "string" + }, + "sql": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "id", + "sql" + ] + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "type": "object", + "title": "Contract", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + }, + "checksum": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "access": { + "enum": [ + "private", + "protected", + "public" + ], + "default": "protected" + }, + "constraints": { + "type": "array", + "items": { + "type": "object", + "title": "ModelLevelConstraint", + "properties": { + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expression": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + }, + "to": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "to_columns": { + "type": "array", + "items": { + "type": "string" + } + }, + "columns": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "type" + ] + } + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + }, + "latest_version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + }, + "deprecation_date": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "defer_relation": { + "anyOf": [ + { + "type": "object", + "title": "DeferRelation", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "alias": { + "type": "string" + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "resource_type": { + "enum": [ + "model", + "analysis", + "test", + "snapshot", + "operation", + "seed", + "rpc", + "sql_operation", + "doc", + "source", + "macro", + "exposure", + "metric", + "group", + "saved_query", + "semantic_model", + "unit_test", + "fixture" + ] + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "config": { + "anyOf": [ + { + "type": "object", + "title": "NodeConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "view" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "batch_size": { + "default": null + }, + "lookback": { + "default": 0 + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + }, + "event_time": { + "default": null + } + }, + "additionalProperties": true + }, + { + "type": "null" + } + ] + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "alias", + "relation_name", + "resource_type", + "name", + "description", + "compiled_code", + "meta", + "tags", + "config" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "primary_key": { + "type": "array", + "items": { + "type": "string" + } + }, + "time_spine": { + "anyOf": [ + { + "type": "object", + "title": "TimeSpine", + "properties": { + "standard_granularity_column": { + "type": "string" + }, + "custom_granularities": { + "type": "array", + "items": { + "type": "object", + "title": "CustomGranularity", + "properties": { + "name": { + "type": "string" + }, + "column_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "standard_granularity_column" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + { + "type": "object", + "title": "SqlOperation", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "sql_operation" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "type": "object", + "title": "FileHash", + "properties": { + "name": { + "type": "string" + }, + "checksum": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "name", + "checksum" + ] + }, + "config": { + "type": "object", + "title": "NodeConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "view" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "batch_size": { + "default": null + }, + "lookback": { + "default": 0 + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + }, + "event_time": { + "default": null + } + }, + "additionalProperties": true + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "type": "object", + "title": "ColumnInfo", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "data_type": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "constraints": { + "type": "array", + "items": { + "type": "object", + "title": "ColumnLevelConstraint", + "properties": { + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expression": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + }, + "to": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "to_columns": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "type" + ] + } + }, + "quote": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "granularity": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true, + "required": [ + "name" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "compiled_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "type": "object", + "title": "InjectedCTE", + "properties": { + "id": { + "type": "string" + }, + "sql": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "id", + "sql" + ] + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "type": "object", + "title": "Contract", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + }, + "checksum": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + { + "type": "object", + "title": "GenericTest", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "test" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "type": "object", + "title": "FileHash", + "properties": { + "name": { + "type": "string" + }, + "checksum": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "name", + "checksum" + ] + }, + "config": { + "type": "object", + "title": "TestConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "dbt_test__audit" + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "test" + }, + "severity": { + "type": "string", + "default": "ERROR", + "pattern": "^([Ww][Aa][Rr][Nn]|[Ee][Rr][Rr][Oo][Rr])$" + }, + "store_failures": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "store_failures_as": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "where": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "limit": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + }, + "fail_calc": { + "type": "string", + "default": "count(*)" + }, + "warn_if": { + "type": "string", + "default": "!= 0" + }, + "error_if": { + "type": "string", + "default": "!= 0" + } + }, + "additionalProperties": true + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "type": "object", + "title": "ColumnInfo", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "data_type": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "constraints": { + "type": "array", + "items": { + "type": "object", + "title": "ColumnLevelConstraint", + "properties": { + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expression": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + }, + "to": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "to_columns": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "type" + ] + } + }, + "quote": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "granularity": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true, + "required": [ + "name" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "compiled_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "type": "object", + "title": "InjectedCTE", + "properties": { + "id": { + "type": "string" + }, + "sql": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "id", + "sql" + ] + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "type": "object", + "title": "Contract", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + }, + "checksum": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "column_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "file_key_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "attached_node": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "test_metadata": { + "type": "object", + "title": "TestMetadata", + "properties": { + "name": { + "type": "string", + "default": "test" + }, + "kwargs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "namespace": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum" + ] + }, + { + "type": "object", + "title": "Snapshot", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "snapshot" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "alias": { + "type": "string" + }, + "checksum": { + "type": "object", + "title": "FileHash", + "properties": { + "name": { + "type": "string" + }, + "checksum": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "name", + "checksum" + ] + }, + "config": { + "type": "object", + "title": "SnapshotConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "snapshot" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "batch_size": { + "default": null + }, + "lookback": { + "default": 0 + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + }, + "event_time": { + "default": null + }, + "strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "target_schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "target_database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "updated_at": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "check_cols": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "snapshot_meta_column_names": { + "type": "object", + "title": "SnapshotMetaColumnNames", + "properties": { + "dbt_valid_to": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "dbt_valid_from": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "dbt_scd_id": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "dbt_updated_at": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + } + }, + "additionalProperties": true + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "type": "object", + "title": "ColumnInfo", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "data_type": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "constraints": { + "type": "array", + "items": { + "type": "object", + "title": "ColumnLevelConstraint", + "properties": { + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expression": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + }, + "to": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "to_columns": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "type" + ] + } + }, + "quote": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "granularity": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true, + "required": [ + "name" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "build_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "created_at": { + "type": "number" + }, + "config_call_dict": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "raw_code": { + "type": "string", + "default": "" + }, + "language": { + "type": "string", + "default": "sql" + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "compiled_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "compiled": { + "type": "boolean", + "default": false + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "extra_ctes_injected": { + "type": "boolean", + "default": false + }, + "extra_ctes": { + "type": "array", + "items": { + "type": "object", + "title": "InjectedCTE", + "properties": { + "id": { + "type": "string" + }, + "sql": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "id", + "sql" + ] + } + }, + "_pre_injected_sql": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "contract": { + "type": "object", + "title": "Contract", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + }, + "checksum": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "defer_relation": { + "anyOf": [ + { + "type": "object", + "title": "DeferRelation", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "alias": { + "type": "string" + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "resource_type": { + "enum": [ + "model", + "analysis", + "test", + "snapshot", + "operation", + "seed", + "rpc", + "sql_operation", + "doc", + "source", + "macro", + "exposure", + "metric", + "group", + "saved_query", + "semantic_model", + "unit_test", + "fixture" + ] + }, + "name": { + "type": "string" + }, + "description": { + "type": "string" + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "config": { + "anyOf": [ + { + "type": "object", + "title": "NodeConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "materialized": { + "type": "string", + "default": "view" + }, + "incremental_strategy": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "batch_size": { + "default": null + }, + "lookback": { + "default": 0 + }, + "persist_docs": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "post-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "pre-hook": { + "type": "array", + "items": { + "type": "object", + "title": "Hook", + "properties": { + "sql": { + "type": "string" + }, + "transaction": { + "type": "boolean", + "default": true + }, + "index": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "sql" + ] + } + }, + "quoting": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "column_types": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "full_refresh": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "unique_key": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "null" + } + ], + "default": null + }, + "on_schema_change": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "ignore" + }, + "on_configuration_change": { + "enum": [ + "apply", + "continue", + "fail" + ] + }, + "grants": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "packages": { + "type": "array", + "items": { + "type": "string" + } + }, + "docs": { + "type": "object", + "title": "Docs", + "properties": { + "show": { + "type": "boolean", + "default": true + }, + "node_color": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "contract": { + "type": "object", + "title": "ContractConfig", + "properties": { + "enforced": { + "type": "boolean", + "default": false + }, + "alias_types": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": false + }, + "event_time": { + "default": null + } + }, + "additionalProperties": true + }, + { + "type": "null" + } + ] + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "alias", + "relation_name", + "resource_type", + "name", + "description", + "compiled_code", + "meta", + "tags", + "config" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "alias", + "checksum", + "config" + ] + }, + { + "type": "object", + "title": "SourceDefinition", + "properties": { + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "schema": { + "type": "string" + }, + "name": { + "type": "string" + }, + "resource_type": { + "const": "source" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "source_name": { + "type": "string" + }, + "source_description": { + "type": "string" + }, + "loader": { + "type": "string" + }, + "identifier": { + "type": "string" + }, + "quoting": { + "type": "object", + "title": "Quoting", + "properties": { + "database": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "identifier": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "column": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "loaded_at_field": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "freshness": { + "anyOf": [ + { + "type": "object", + "title": "FreshnessThreshold", + "properties": { + "warn_after": { + "anyOf": [ + { + "type": "object", + "title": "Time", + "properties": { + "count": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + }, + "period": { + "anyOf": [ + { + "enum": [ + "minute", + "hour", + "day" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ] + }, + "error_after": { + "anyOf": [ + { + "type": "object", + "title": "Time", + "properties": { + "count": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + }, + "period": { + "anyOf": [ + { + "enum": [ + "minute", + "hour", + "day" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ] + }, + "filter": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ], + "default": null + }, + "external": { + "anyOf": [ + { + "type": "object", + "title": "ExternalTable", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "location": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "file_format": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "row_format": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "tbl_properties": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "partitions": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "array", + "items": { + "type": "object", + "title": "ExternalPartition", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "name": { + "type": "string", + "default": "" + }, + "description": { + "type": "string", + "default": "" + }, + "data_type": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": true + } + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true + }, + { + "type": "null" + } + ], + "default": null + }, + "description": { + "type": "string", + "default": "" + }, + "columns": { + "type": "object", + "additionalProperties": { + "type": "object", + "title": "ColumnInfo", + "properties": { + "name": { + "type": "string" + }, + "description": { + "type": "string", + "default": "" + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "data_type": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "constraints": { + "type": "array", + "items": { + "type": "object", + "title": "ColumnLevelConstraint", + "properties": { + "type": { + "enum": [ + "check", + "not_null", + "unique", + "primary_key", + "foreign_key", + "custom" + ] + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expression": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "warn_unenforced": { + "type": "boolean", + "default": true + }, + "warn_unsupported": { + "type": "boolean", + "default": true + }, + "to": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "to_columns": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "type" + ] + } + }, + "quote": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "default": null + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "granularity": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true, + "required": [ + "name" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "source_meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "config": { + "type": "object", + "title": "SourceConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "event_time": { + "default": null + } + }, + "additionalProperties": true + }, + "patch_path": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "created_at": { + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "database", + "schema", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "source_name", + "source_description", + "loader", + "identifier" + ] + }, + { + "type": "object", + "title": "Exposure", + "properties": { + "name": { + "type": "string" + }, + "resource_type": { + "const": "exposure" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "type": { + "enum": [ + "dashboard", + "notebook", + "analysis", + "ml", + "application" + ] + }, + "owner": { + "type": "object", + "title": "Owner", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "email": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": true + }, + "description": { + "type": "string", + "default": "" + }, + "label": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "maturity": { + "anyOf": [ + { + "enum": [ + "low", + "medium", + "high" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "config": { + "type": "object", + "title": "ExposureConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + } + }, + "additionalProperties": true + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "url": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "created_at": { + "type": "number" + } + }, + "additionalProperties": false, + "required": [ + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "type", + "owner" + ] + }, + { + "type": "object", + "title": "Metric", + "properties": { + "name": { + "type": "string" + }, + "resource_type": { + "const": "metric" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string" + }, + "label": { + "type": "string" + }, + "type": { + "enum": [ + "simple", + "ratio", + "cumulative", + "derived", + "conversion" + ] + }, + "type_params": { + "type": "object", + "title": "MetricTypeParams", + "properties": { + "measure": { + "anyOf": [ + { + "type": "object", + "title": "MetricInputMeasure", + "properties": { + "name": { + "type": "string" + }, + "filter": { + "anyOf": [ + { + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "join_to_timespine": { + "type": "boolean", + "default": false + }, + "fill_nulls_with": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "input_measures": { + "type": "array", + "items": { + "type": "object", + "title": "MetricInputMeasure", + "properties": { + "name": { + "type": "string" + }, + "filter": { + "anyOf": [ + { + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "join_to_timespine": { + "type": "boolean", + "default": false + }, + "fill_nulls_with": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "numerator": { + "anyOf": [ + { + "type": "object", + "title": "MetricInput", + "properties": { + "name": { + "type": "string" + }, + "filter": { + "anyOf": [ + { + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "offset_window": { + "anyOf": [ + { + "type": "object", + "title": "MetricTimeWindow", + "properties": { + "count": { + "type": "integer" + }, + "granularity": { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + } + }, + "additionalProperties": false, + "required": [ + "count", + "granularity" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "offset_to_grain": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "denominator": { + "anyOf": [ + { + "type": "object", + "title": "MetricInput", + "properties": { + "name": { + "type": "string" + }, + "filter": { + "anyOf": [ + { + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "offset_window": { + "anyOf": [ + { + "type": "object", + "title": "MetricTimeWindow", + "properties": { + "count": { + "type": "integer" + }, + "granularity": { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + } + }, + "additionalProperties": false, + "required": [ + "count", + "granularity" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "offset_to_grain": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "expr": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "window": { + "anyOf": [ + { + "type": "object", + "title": "MetricTimeWindow", + "properties": { + "count": { + "type": "integer" + }, + "granularity": { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + } + }, + "additionalProperties": false, + "required": [ + "count", + "granularity" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "grain_to_date": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "metrics": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "object", + "title": "MetricInput", + "properties": { + "name": { + "type": "string" + }, + "filter": { + "anyOf": [ + { + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "offset_window": { + "anyOf": [ + { + "type": "object", + "title": "MetricTimeWindow", + "properties": { + "count": { + "type": "integer" + }, + "granularity": { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + } + }, + "additionalProperties": false, + "required": [ + "count", + "granularity" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "offset_to_grain": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + { + "type": "null" + } + ], + "default": null + }, + "conversion_type_params": { + "anyOf": [ + { + "type": "object", + "title": "ConversionTypeParams", + "properties": { + "base_measure": { + "type": "object", + "title": "MetricInputMeasure", + "properties": { + "name": { + "type": "string" + }, + "filter": { + "anyOf": [ + { + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "join_to_timespine": { + "type": "boolean", + "default": false + }, + "fill_nulls_with": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + }, + "conversion_measure": { + "type": "object", + "title": "MetricInputMeasure", + "properties": { + "name": { + "type": "string" + }, + "filter": { + "anyOf": [ + { + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "join_to_timespine": { + "type": "boolean", + "default": false + }, + "fill_nulls_with": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + }, + "entity": { + "type": "string" + }, + "calculation": { + "enum": [ + "conversions", + "conversion_rate" + ], + "default": "conversion_rate" + }, + "window": { + "anyOf": [ + { + "type": "object", + "title": "MetricTimeWindow", + "properties": { + "count": { + "type": "integer" + }, + "granularity": { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + } + }, + "additionalProperties": false, + "required": [ + "count", + "granularity" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "constant_properties": { + "anyOf": [ + { + "type": "array", + "items": { + "type": "object", + "title": "ConstantPropertyInput", + "properties": { + "base_property": { + "type": "string" + }, + "conversion_property": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "base_property", + "conversion_property" + ] + } + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "base_measure", + "conversion_measure", + "entity" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "cumulative_type_params": { + "anyOf": [ + { + "type": "object", + "title": "CumulativeTypeParams", + "properties": { + "window": { + "anyOf": [ + { + "type": "object", + "title": "MetricTimeWindow", + "properties": { + "count": { + "type": "integer" + }, + "granularity": { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + } + }, + "additionalProperties": false, + "required": [ + "count", + "granularity" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "grain_to_date": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "period_agg": { + "enum": [ + "first", + "last", + "average" + ], + "default": "first" + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "filter": { + "anyOf": [ + { + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "metadata": { + "anyOf": [ + { + "type": "object", + "title": "SourceFileMetadata", + "properties": { + "repo_file_path": { + "type": "string" + }, + "file_slice": { + "type": "object", + "title": "FileSlice", + "properties": { + "filename": { + "type": "string" + }, + "content": { + "type": "string" + }, + "start_line_number": { + "type": "integer" + }, + "end_line_number": { + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ + "filename", + "content", + "start_line_number", + "end_line_number" + ] + } + }, + "additionalProperties": false, + "required": [ + "repo_file_path", + "file_slice" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "time_granularity": { + "anyOf": [ + { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "type": "array", + "items": { + "type": "string" + } + }, + "config": { + "type": "object", + "title": "MetricConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": true + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "sources": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "metrics": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "created_at": { + "type": "number" + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "description", + "label", + "type", + "type_params" + ] + }, + { + "type": "object", + "title": "SavedQuery", + "properties": { + "name": { + "type": "string" + }, + "resource_type": { + "const": "saved_query" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "query_params": { + "type": "object", + "title": "QueryParams", + "properties": { + "metrics": { + "type": "array", + "items": { + "type": "string" + } + }, + "group_by": { + "type": "array", + "items": { + "type": "string" + } + }, + "where": { + "anyOf": [ + { + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] + }, + { + "type": "null" + } + ] + } + }, + "additionalProperties": false, + "required": [ + "metrics", + "group_by", + "where" + ] + }, + "exports": { + "type": "array", + "items": { + "type": "object", + "title": "Export", + "properties": { + "name": { + "type": "string" + }, + "config": { + "type": "object", + "title": "ExportConfig", + "properties": { + "export_as": { + "enum": [ + "table", + "view" + ] + }, + "schema_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "export_as" + ] + }, + "unrendered_config": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "name", + "config" + ] + } + }, + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "label": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "metadata": { + "anyOf": [ + { + "type": "object", + "title": "SourceFileMetadata", + "properties": { + "repo_file_path": { + "type": "string" + }, + "file_slice": { + "type": "object", + "title": "FileSlice", + "properties": { + "filename": { + "type": "string" + }, + "content": { + "type": "string" + }, + "start_line_number": { + "type": "integer" + }, + "end_line_number": { + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ + "filename", + "content", + "start_line_number", + "end_line_number" + ] + } + }, + "additionalProperties": false, + "required": [ + "repo_file_path", + "file_slice" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "config": { + "type": "object", + "title": "SavedQueryConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "export_as": { + "anyOf": [ + { + "enum": [ + "table", + "view" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "cache": { + "type": "object", + "title": "SavedQueryCache", + "properties": { + "enabled": { + "type": "boolean", + "default": false + } + }, + "additionalProperties": false + } + }, + "additionalProperties": true + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "created_at": { + "type": "number" + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "query_params", + "exports" + ] + }, + { + "type": "object", + "title": "SemanticModel", + "properties": { + "name": { + "type": "string" + }, + "resource_type": { + "enum": [ + "model", + "analysis", + "test", + "snapshot", + "operation", + "seed", + "rpc", + "sql_operation", + "doc", + "source", + "macro", + "exposure", + "metric", + "group", + "saved_query", + "semantic_model", + "unit_test", + "fixture" + ] + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "model": { + "type": "string" + }, + "node_relation": { + "anyOf": [ + { + "type": "object", + "title": "NodeRelation", + "properties": { + "alias": { + "type": "string" + }, + "schema_name": { + "type": "string" + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "" + } + }, + "additionalProperties": false, + "required": [ + "alias", + "schema_name" + ] + }, + { + "type": "null" + } + ] + }, + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "label": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "defaults": { + "anyOf": [ + { + "type": "object", + "title": "Defaults", + "properties": { + "agg_time_dimension": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ], + "default": null + }, + "entities": { + "type": "array", + "items": { + "type": "object", + "title": "Entity", + "properties": { + "name": { + "type": "string" + }, + "type": { + "enum": [ + "foreign", + "natural", + "primary", + "unique" + ] + }, + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "label": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "role": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expr": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name", + "type" + ] + } + }, + "measures": { + "type": "array", + "items": { + "type": "object", + "title": "Measure", + "properties": { + "name": { + "type": "string" + }, + "agg": { + "enum": [ + "sum", + "min", + "max", + "count_distinct", + "sum_boolean", + "average", + "percentile", + "median", + "count" + ] + }, + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "label": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "create_metric": { + "type": "boolean", + "default": false + }, + "expr": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "agg_params": { + "anyOf": [ + { + "type": "object", + "title": "MeasureAggregationParameters", + "properties": { + "percentile": { + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + }, + "use_discrete_percentile": { + "type": "boolean", + "default": false + }, + "use_approximate_percentile": { + "type": "boolean", + "default": false + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ], + "default": null + }, + "non_additive_dimension": { + "anyOf": [ + { + "type": "object", + "title": "NonAdditiveDimension", + "properties": { + "name": { + "type": "string" + }, + "window_choice": { + "enum": [ + "sum", + "min", + "max", + "count_distinct", + "sum_boolean", + "average", + "percentile", + "median", + "count" + ] + }, + "window_groupings": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "name", + "window_choice", + "window_groupings" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "agg_time_dimension": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name", + "agg" + ] + } + }, + "dimensions": { + "type": "array", + "items": { + "type": "object", + "title": "Dimension", + "properties": { + "name": { + "type": "string" + }, + "type": { + "enum": [ + "categorical", + "time" + ] + }, + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "label": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "is_partition": { + "type": "boolean", + "default": false + }, + "type_params": { + "anyOf": [ + { + "type": "object", + "title": "DimensionTypeParams", + "properties": { + "time_granularity": { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + "validity_params": { + "anyOf": [ + { + "type": "object", + "title": "DimensionValidityParams", + "properties": { + "is_start": { + "type": "boolean", + "default": false + }, + "is_end": { + "type": "boolean", + "default": false + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "time_granularity" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "expr": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "metadata": { + "anyOf": [ + { + "type": "object", + "title": "SourceFileMetadata", + "properties": { + "repo_file_path": { + "type": "string" + }, + "file_slice": { + "type": "object", + "title": "FileSlice", + "properties": { + "filename": { + "type": "string" + }, + "content": { + "type": "string" + }, + "start_line_number": { + "type": "integer" + }, + "end_line_number": { + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ + "filename", + "content", + "start_line_number", + "end_line_number" + ] + } + }, + "additionalProperties": false, + "required": [ + "repo_file_path", + "file_slice" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name", + "type" + ] + } + }, + "metadata": { + "anyOf": [ + { + "type": "object", + "title": "SourceFileMetadata", + "properties": { + "repo_file_path": { + "type": "string" + }, + "file_slice": { + "type": "object", + "title": "FileSlice", + "properties": { + "filename": { + "type": "string" + }, + "content": { + "type": "string" + }, + "start_line_number": { + "type": "integer" + }, + "end_line_number": { + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ + "filename", + "content", + "start_line_number", + "end_line_number" + ] + } + }, + "additionalProperties": false, + "required": [ + "repo_file_path", + "file_slice" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "created_at": { + "type": "number" + }, + "config": { + "type": "object", + "title": "SemanticModelConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": true + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "primary_entity": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "model", + "node_relation" + ] + }, + { + "type": "object", + "title": "UnitTestDefinition", + "properties": { + "model": { + "type": "string" + }, + "given": { + "type": "array", + "items": { + "type": "object", + "title": "UnitTestInputFixture", + "properties": { + "input": { + "type": "string" + }, + "rows": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + { + "type": "null" + } + ], + "default": null + }, + "format": { + "enum": [ + "csv", + "dict", + "sql" + ], + "default": "dict" + }, + "fixture": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "input" + ] + } + }, + "expect": { + "type": "object", + "title": "UnitTestOutputFixture", + "properties": { + "rows": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + { + "type": "null" + } + ], + "default": null + }, + "format": { + "enum": [ + "csv", + "dict", + "sql" + ], + "default": "dict" + }, + "fixture": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "name": { + "type": "string" + }, + "resource_type": { + "enum": [ + "model", + "analysis", + "test", + "snapshot", + "operation", + "seed", + "rpc", + "sql_operation", + "doc", + "source", + "macro", + "exposure", + "metric", + "group", + "saved_query", + "semantic_model", + "unit_test", + "fixture" + ] + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "overrides": { + "anyOf": [ + { + "type": "object", + "title": "UnitTestOverrides", + "properties": { + "macros": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "vars": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "env_vars": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ], + "default": null + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "config": { + "type": "object", + "title": "UnitTestConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": true + }, + "checksum": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "created_at": { + "type": "number" + }, + "versions": { + "anyOf": [ + { + "type": "object", + "title": "UnitTestNodeVersions", + "properties": { + "include": { + "anyOf": [ + { + "type": "array", + "items": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + } + ] + } + }, + { + "type": "null" + } + ], + "default": null + }, + "exclude": { + "anyOf": [ + { + "type": "array", + "items": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + } + ] + } + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "model", + "given", + "expect", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn" + ] + } + ] + } + }, + "propertyNames": { + "type": "string" + } + }, + { + "type": "null" + } + ] + }, + "parent_map": { + "description": "A mapping from\u00a0child nodes to their dependencies", + "anyOf": [ + { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + } + }, + "propertyNames": { + "type": "string" + } + }, + { + "type": "null" + } + ] + }, + "child_map": { + "description": "A mapping from parent nodes to their dependents", + "anyOf": [ + { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + } + }, + "propertyNames": { + "type": "string" + } + }, + { + "type": "null" + } + ] + }, + "group_map": { + "description": "A mapping from group names to their nodes", + "anyOf": [ + { + "type": "object", + "additionalProperties": { + "type": "array", + "items": { + "type": "string" + } + }, + "propertyNames": { + "type": "string" + } + }, + { + "type": "null" + } + ] + }, + "saved_queries": { + "type": "object", + "description": "The saved queries defined in the dbt project", + "additionalProperties": { + "type": "object", + "title": "SavedQuery", + "properties": { + "name": { + "type": "string" + }, + "resource_type": { + "const": "saved_query" + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "query_params": { + "type": "object", + "title": "QueryParams", + "properties": { + "metrics": { + "type": "array", + "items": { + "type": "string" + } + }, + "group_by": { + "type": "array", + "items": { + "type": "string" + } + }, + "where": { + "anyOf": [ + { + "type": "object", + "title": "WhereFilterIntersection", + "properties": { + "where_filters": { + "type": "array", + "items": { + "type": "object", + "title": "WhereFilter", + "properties": { + "where_sql_template": { + "type": "string" + } + }, + "additionalProperties": false, + "required": [ + "where_sql_template" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "where_filters" + ] + }, + { + "type": "null" + } + ] + } + }, + "additionalProperties": false, + "required": [ + "metrics", + "group_by", + "where" + ] + }, + "exports": { + "type": "array", + "items": { + "type": "object", + "title": "Export", + "properties": { + "name": { + "type": "string" + }, + "config": { + "type": "object", + "title": "ExportConfig", + "properties": { + "export_as": { + "enum": [ + "table", + "view" + ] + }, + "schema_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "alias": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "export_as" + ] + }, + "unrendered_config": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "name", + "config" + ] + } + }, + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "label": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "metadata": { + "anyOf": [ + { + "type": "object", + "title": "SourceFileMetadata", + "properties": { + "repo_file_path": { + "type": "string" + }, + "file_slice": { + "type": "object", + "title": "FileSlice", + "properties": { + "filename": { + "type": "string" + }, + "content": { + "type": "string" + }, + "start_line_number": { + "type": "integer" + }, + "end_line_number": { + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ + "filename", + "content", + "start_line_number", + "end_line_number" + ] + } + }, + "additionalProperties": false, + "required": [ + "repo_file_path", + "file_slice" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "config": { + "type": "object", + "title": "SavedQueryConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "export_as": { + "anyOf": [ + { + "enum": [ + "table", + "view" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "cache": { + "type": "object", + "title": "SavedQueryCache", + "properties": { + "enabled": { + "type": "boolean", + "default": false + } + }, + "additionalProperties": false + } + }, + "additionalProperties": true + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "created_at": { + "type": "number" + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "query_params", + "exports" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "semantic_models": { + "type": "object", + "description": "The semantic models defined in the dbt project", + "additionalProperties": { + "type": "object", + "title": "SemanticModel", + "properties": { + "name": { + "type": "string" + }, + "resource_type": { + "enum": [ + "model", + "analysis", + "test", + "snapshot", + "operation", + "seed", + "rpc", + "sql_operation", + "doc", + "source", + "macro", + "exposure", + "metric", + "group", + "saved_query", + "semantic_model", + "unit_test", + "fixture" + ] + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "model": { + "type": "string" + }, + "node_relation": { + "anyOf": [ + { + "type": "object", + "title": "NodeRelation", + "properties": { + "alias": { + "type": "string" + }, + "schema_name": { + "type": "string" + }, + "database": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": "" + } + }, + "additionalProperties": false, + "required": [ + "alias", + "schema_name" + ] + }, + { + "type": "null" + } + ] + }, + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "label": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "defaults": { + "anyOf": [ + { + "type": "object", + "title": "Defaults", + "properties": { + "agg_time_dimension": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ], + "default": null + }, + "entities": { + "type": "array", + "items": { + "type": "object", + "title": "Entity", + "properties": { + "name": { + "type": "string" + }, + "type": { + "enum": [ + "foreign", + "natural", + "primary", + "unique" + ] + }, + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "label": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "role": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "expr": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name", + "type" + ] + } + }, + "measures": { + "type": "array", + "items": { + "type": "object", + "title": "Measure", + "properties": { + "name": { + "type": "string" + }, + "agg": { + "enum": [ + "sum", + "min", + "max", + "count_distinct", + "sum_boolean", + "average", + "percentile", + "median", + "count" + ] + }, + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "label": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "create_metric": { + "type": "boolean", + "default": false + }, + "expr": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "agg_params": { + "anyOf": [ + { + "type": "object", + "title": "MeasureAggregationParameters", + "properties": { + "percentile": { + "anyOf": [ + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + }, + "use_discrete_percentile": { + "type": "boolean", + "default": false + }, + "use_approximate_percentile": { + "type": "boolean", + "default": false + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ], + "default": null + }, + "non_additive_dimension": { + "anyOf": [ + { + "type": "object", + "title": "NonAdditiveDimension", + "properties": { + "name": { + "type": "string" + }, + "window_choice": { + "enum": [ + "sum", + "min", + "max", + "count_distinct", + "sum_boolean", + "average", + "percentile", + "median", + "count" + ] + }, + "window_groupings": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "name", + "window_choice", + "window_groupings" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "agg_time_dimension": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name", + "agg" + ] + } + }, + "dimensions": { + "type": "array", + "items": { + "type": "object", + "title": "Dimension", + "properties": { + "name": { + "type": "string" + }, + "type": { + "enum": [ + "categorical", + "time" + ] + }, + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "label": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "is_partition": { + "type": "boolean", + "default": false + }, + "type_params": { + "anyOf": [ + { + "type": "object", + "title": "DimensionTypeParams", + "properties": { + "time_granularity": { + "enum": [ + "nanosecond", + "microsecond", + "millisecond", + "second", + "minute", + "hour", + "day", + "week", + "month", + "quarter", + "year" + ] + }, + "validity_params": { + "anyOf": [ + { + "type": "object", + "title": "DimensionValidityParams", + "properties": { + "is_start": { + "type": "boolean", + "default": false + }, + "is_end": { + "type": "boolean", + "default": false + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "time_granularity" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "expr": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "metadata": { + "anyOf": [ + { + "type": "object", + "title": "SourceFileMetadata", + "properties": { + "repo_file_path": { + "type": "string" + }, + "file_slice": { + "type": "object", + "title": "FileSlice", + "properties": { + "filename": { + "type": "string" + }, + "content": { + "type": "string" + }, + "start_line_number": { + "type": "integer" + }, + "end_line_number": { + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ + "filename", + "content", + "start_line_number", + "end_line_number" + ] + } + }, + "additionalProperties": false, + "required": [ + "repo_file_path", + "file_slice" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name", + "type" + ] + } + }, + "metadata": { + "anyOf": [ + { + "type": "object", + "title": "SourceFileMetadata", + "properties": { + "repo_file_path": { + "type": "string" + }, + "file_slice": { + "type": "object", + "title": "FileSlice", + "properties": { + "filename": { + "type": "string" + }, + "content": { + "type": "string" + }, + "start_line_number": { + "type": "integer" + }, + "end_line_number": { + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ + "filename", + "content", + "start_line_number", + "end_line_number" + ] + } + }, + "additionalProperties": false, + "required": [ + "repo_file_path", + "file_slice" + ] + }, + { + "type": "null" + } + ], + "default": null + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "refs": { + "type": "array", + "items": { + "type": "object", + "title": "RefArgs", + "properties": { + "name": { + "type": "string" + }, + "package": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "created_at": { + "type": "number" + }, + "config": { + "type": "object", + "title": "SemanticModelConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "enabled": { + "type": "boolean", + "default": true + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": true + }, + "unrendered_config": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "primary_entity": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "group": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn", + "model", + "node_relation" + ] + }, + "propertyNames": { + "type": "string" + } + }, + "unit_tests": { + "type": "object", + "description": "The unit tests defined in the project", + "additionalProperties": { + "type": "object", + "title": "UnitTestDefinition", + "properties": { + "model": { + "type": "string" + }, + "given": { + "type": "array", + "items": { + "type": "object", + "title": "UnitTestInputFixture", + "properties": { + "input": { + "type": "string" + }, + "rows": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + { + "type": "null" + } + ], + "default": null + }, + "format": { + "enum": [ + "csv", + "dict", + "sql" + ], + "default": "dict" + }, + "fixture": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "input" + ] + } + }, + "expect": { + "type": "object", + "title": "UnitTestOutputFixture", + "properties": { + "rows": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + { + "type": "null" + } + ], + "default": null + }, + "format": { + "enum": [ + "csv", + "dict", + "sql" + ], + "default": "dict" + }, + "fixture": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "name": { + "type": "string" + }, + "resource_type": { + "enum": [ + "model", + "analysis", + "test", + "snapshot", + "operation", + "seed", + "rpc", + "sql_operation", + "doc", + "source", + "macro", + "exposure", + "metric", + "group", + "saved_query", + "semantic_model", + "unit_test", + "fixture" + ] + }, + "package_name": { + "type": "string" + }, + "path": { + "type": "string" + }, + "original_file_path": { + "type": "string" + }, + "unique_id": { + "type": "string" + }, + "fqn": { + "type": "array", + "items": { + "type": "string" + } + }, + "description": { + "type": "string", + "default": "" + }, + "overrides": { + "anyOf": [ + { + "type": "object", + "title": "UnitTestOverrides", + "properties": { + "macros": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "vars": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "env_vars": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ], + "default": null + }, + "depends_on": { + "type": "object", + "title": "DependsOn", + "properties": { + "macros": { + "type": "array", + "items": { + "type": "string" + } + }, + "nodes": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false + }, + "config": { + "type": "object", + "title": "UnitTestConfig", + "properties": { + "_extra": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "tags": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + } + ] + }, + "meta": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": true + }, + "checksum": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "created_at": { + "type": "number" + }, + "versions": { + "anyOf": [ + { + "type": "object", + "title": "UnitTestNodeVersions", + "properties": { + "include": { + "anyOf": [ + { + "type": "array", + "items": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + } + ] + } + }, + { + "type": "null" + } + ], + "default": null + }, + "exclude": { + "anyOf": [ + { + "type": "array", + "items": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + } + ] + } + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ], + "default": null + }, + "version": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "model", + "given", + "expect", + "name", + "resource_type", + "package_name", + "path", + "original_file_path", + "unique_id", + "fqn" + ] + }, + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "metadata", + "nodes", + "sources", + "macros", + "docs", + "exposures", + "metrics", + "groups", + "selectors", + "disabled", + "parent_map", + "child_map", + "group_map", + "saved_queries", + "semantic_models", + "unit_tests" + ], + "$id": "https://schemas.getdbt.com/dbt/manifest/v12.json" +} \ No newline at end of file diff --git a/schemas/dbt/manifest/v5.json b/schemas/dbt/manifest/v5.json index 6c73ab8876b..6b048d7c231 100644 --- a/schemas/dbt/manifest/v5.json +++ b/schemas/dbt/manifest/v5.json @@ -5981,4 +5981,4 @@ }, "$schema": "http://json-schema.org/draft-07/schema#", "$id": "https://schemas.getdbt.com/dbt/manifest/v5.json" -} +} \ No newline at end of file diff --git a/schemas/dbt/manifest/v6.json b/schemas/dbt/manifest/v6.json index 79b627332cf..d835acbab92 100644 --- a/schemas/dbt/manifest/v6.json +++ b/schemas/dbt/manifest/v6.json @@ -6206,4 +6206,4 @@ }, "$schema": "http://json-schema.org/draft-07/schema#", "$id": "https://schemas.getdbt.com/dbt/manifest/v6.json" -} +} \ No newline at end of file diff --git a/schemas/dbt/manifest/v7.json b/schemas/dbt/manifest/v7.json index efb3ff99c89..03884400405 100644 --- a/schemas/dbt/manifest/v7.json +++ b/schemas/dbt/manifest/v7.json @@ -6572,4 +6572,4 @@ }, "$schema": "http://json-schema.org/draft-07/schema#", "$id": "https://schemas.getdbt.com/dbt/manifest/v7.json" -} +} \ No newline at end of file diff --git a/schemas/dbt/manifest/v8.json b/schemas/dbt/manifest/v8.json index d7192ab84c0..ff71790e5f0 100644 --- a/schemas/dbt/manifest/v8.json +++ b/schemas/dbt/manifest/v8.json @@ -4431,4 +4431,4 @@ }, "$schema": "http://json-schema.org/draft-07/schema#", "$id": "https://schemas.getdbt.com/dbt/manifest/v8.json" -} +} \ No newline at end of file diff --git a/schemas/dbt/manifest/v9.json b/schemas/dbt/manifest/v9.json index 28a477367a7..48ede936b47 100644 --- a/schemas/dbt/manifest/v9.json +++ b/schemas/dbt/manifest/v9.json @@ -4962,4 +4962,4 @@ }, "$schema": "http://json-schema.org/draft-07/schema#", "$id": "https://schemas.getdbt.com/dbt/manifest/v9.json" -} +} \ No newline at end of file diff --git a/schemas/dbt/run-results/v4.json b/schemas/dbt/run-results/v4.json index a1948721752..029886a3a45 100644 --- a/schemas/dbt/run-results/v4.json +++ b/schemas/dbt/run-results/v4.json @@ -1,86 +1,89 @@ { - "type": "object", - "required": [ - "metadata", - "results", - "elapsed_time" - ], - "properties": { - "metadata": { - "$ref": "#/definitions/BaseArtifactMetadata" - }, - "results": { - "type": "array", - "items": { - "$ref": "#/definitions/RunResultOutput" - } - }, - "elapsed_time": { - "type": "number" - }, - "args": { - "type": "object", - "default": {} - } - }, - "additionalProperties": false, - "description": "RunResultsArtifact(metadata: dbt.contracts.util.BaseArtifactMetadata, results: Sequence[dbt.contracts.results.RunResultOutput], elapsed_time: float, args: Dict[str, Any] = )", - "definitions": { + "$ref": "#/$defs/RunResultsArtifact", + "$defs": { "BaseArtifactMetadata": { "type": "object", - "required": [ - "dbt_schema_version" - ], + "title": "BaseArtifactMetadata", "properties": { "dbt_schema_version": { "type": "string" }, "dbt_version": { "type": "string", - "default": "1.5.0a1" + "default": "1.7.0b1" }, "generated_at": { - "type": "string", - "format": "date-time", - "default": "2023-02-09T23:46:55.264544Z" + "type": "string" }, "invocation_id": { - "oneOf": [ + "anyOf": [ { "type": "string" }, { "type": "null" } - ], - "default": "e6a9b266-203d-4fec-93af-fb8f55423a6b" + ] }, "env": { "type": "object", "additionalProperties": { "type": "string" }, - "default": {} + "propertyNames": { + "type": "string" + } } }, "additionalProperties": false, - "description": "BaseArtifactMetadata(dbt_schema_version: str, dbt_version: str = '1.5.0a1', generated_at: datetime.datetime = , invocation_id: Optional[str] = , env: Dict[str, str] = )" + "required": [ + "dbt_schema_version" + ] }, - "RunResultOutput": { + "TimingInfo": { "type": "object", + "title": "TimingInfo", + "properties": { + "name": { + "type": "string" + }, + "started_at": { + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], + "default": null + }, + "completed_at": { + "anyOf": [ + { + "type": "string", + "format": "date-time" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, "required": [ - "status", - "timing", - "thread_id", - "execution_time", - "adapter_response", - "unique_id" - ], + "name" + ] + }, + "RunResultOutput": { + "type": "object", + "title": "RunResultOutput", "properties": { "status": { - "oneOf": [ + "anyOf": [ { - "type": "string", "enum": [ "success", "error", @@ -88,7 +91,6 @@ ] }, { - "type": "string", "enum": [ "pass", "error", @@ -98,7 +100,6 @@ ] }, { - "type": "string", "enum": [ "pass", "warn", @@ -111,7 +112,7 @@ "timing": { "type": "array", "items": { - "$ref": "#/definitions/TimingInfo" + "$ref": "#/$defs/TimingInfo" } }, "thread_id": { @@ -121,10 +122,13 @@ "type": "number" }, "adapter_response": { - "type": "object" + "type": "object", + "propertyNames": { + "type": "string" + } }, "message": { - "oneOf": [ + "anyOf": [ { "type": "string" }, @@ -134,7 +138,7 @@ ] }, "failures": { - "oneOf": [ + "anyOf": [ { "type": "integer" }, @@ -148,255 +152,47 @@ } }, "additionalProperties": false, - "description": "RunResultOutput(status: Union[dbt.contracts.results.RunStatus, dbt.contracts.results.TestStatus, dbt.contracts.results.FreshnessStatus], timing: List[dbt.contracts.results.TimingInfo], thread_id: str, execution_time: float, adapter_response: Dict[str, Any], message: Optional[str], failures: Optional[int], unique_id: str)" - }, - "TimingInfo": { - "type": "object", "required": [ - "name" - ], - "properties": { - "name": { - "type": "string" - }, - "started_at": { - "oneOf": [ - { - "type": "string", - "format": "date-time" - }, - { - "type": "null" - } - ] - }, - "completed_at": { - "oneOf": [ - { - "type": "string", - "format": "date-time" - }, - { - "type": "null" - } - ] - } - }, - "additionalProperties": false, - "description": "TimingInfo(name: str, started_at: Optional[datetime.datetime] = None, completed_at: Optional[datetime.datetime] = None)" - }, - "FreshnessMetadata": { - "type": "object", - "required": [], - "properties": { - "dbt_schema_version": { - "type": "string", - "default": "https://schemas.getdbt.com/dbt/sources/v3.json" - }, - "dbt_version": { - "type": "string", - "default": "1.5.0a1" - }, - "generated_at": { - "type": "string", - "format": "date-time", - "default": "2023-02-09T23:46:55.263337Z" - }, - "invocation_id": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "default": "e6a9b266-203d-4fec-93af-fb8f55423a6b" - }, - "env": { - "type": "object", - "additionalProperties": { - "type": "string" - }, - "default": {} - } - }, - "additionalProperties": false, - "description": "FreshnessMetadata(dbt_schema_version: str = , dbt_version: str = '1.5.0a1', generated_at: datetime.datetime = , invocation_id: Optional[str] = , env: Dict[str, str] = )" - }, - "SourceFreshnessRuntimeError": { - "type": "object", - "required": [ - "unique_id", - "status" - ], - "properties": { - "unique_id": { - "type": "string" - }, - "error": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "integer" - }, - { - "type": "null" - } - ] - }, - "status": { - "type": "string", - "enum": [ - "runtime error" - ] - } - }, - "additionalProperties": false, - "description": "SourceFreshnessRuntimeError(unique_id: str, error: Union[str, int, NoneType], status: dbt.contracts.results.FreshnessErrorEnum)" - }, - "SourceFreshnessOutput": { - "type": "object", - "required": [ - "unique_id", - "max_loaded_at", - "snapshotted_at", - "max_loaded_at_time_ago_in_s", "status", - "criteria", - "adapter_response", "timing", "thread_id", - "execution_time" - ], + "execution_time", + "adapter_response", + "message", + "failures", + "unique_id" + ] + }, + "RunResultsArtifact": { + "type": "object", + "title": "RunResultsArtifact", "properties": { - "unique_id": { - "type": "string" - }, - "max_loaded_at": { - "type": "string", - "format": "date-time" - }, - "snapshotted_at": { - "type": "string", - "format": "date-time" - }, - "max_loaded_at_time_ago_in_s": { - "type": "number" - }, - "status": { - "type": "string", - "enum": [ - "pass", - "warn", - "error", - "runtime error" - ] + "metadata": { + "$ref": "#/$defs/BaseArtifactMetadata" }, - "criteria": { - "$ref": "#/definitions/FreshnessThreshold" - }, - "adapter_response": { - "type": "object" - }, - "timing": { + "results": { "type": "array", "items": { - "$ref": "#/definitions/TimingInfo" + "$ref": "#/$defs/RunResultOutput" } }, - "thread_id": { - "type": "string" - }, - "execution_time": { + "elapsed_time": { "type": "number" - } - }, - "additionalProperties": false, - "description": "SourceFreshnessOutput(unique_id: str, max_loaded_at: datetime.datetime, snapshotted_at: datetime.datetime, max_loaded_at_time_ago_in_s: float, status: dbt.contracts.results.FreshnessStatus, criteria: dbt.contracts.graph.unparsed.FreshnessThreshold, adapter_response: Dict[str, Any], timing: List[dbt.contracts.results.TimingInfo], thread_id: str, execution_time: float)" - }, - "FreshnessThreshold": { - "type": "object", - "required": [], - "properties": { - "warn_after": { - "oneOf": [ - { - "$ref": "#/definitions/Time" - }, - { - "type": "null" - } - ], - "default": { - "count": null, - "period": null - } }, - "error_after": { - "oneOf": [ - { - "$ref": "#/definitions/Time" - }, - { - "type": "null" - } - ], - "default": { - "count": null, - "period": null + "args": { + "type": "object", + "propertyNames": { + "type": "string" } - }, - "filter": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - } - }, - "additionalProperties": false, - "description": "FreshnessThreshold(warn_after: Optional[dbt.contracts.graph.unparsed.Time] = , error_after: Optional[dbt.contracts.graph.unparsed.Time] = , filter: Optional[str] = None)" - }, - "Time": { - "type": "object", - "required": [], - "properties": { - "count": { - "oneOf": [ - { - "type": "integer" - }, - { - "type": "null" - } - ] - }, - "period": { - "oneOf": [ - { - "type": "string", - "enum": [ - "minute", - "hour", - "day" - ] - }, - { - "type": "null" - } - ] } }, "additionalProperties": false, - "description": "Time(count: Optional[int] = None, period: Optional[dbt.contracts.graph.unparsed.TimePeriod] = None)" + "required": [ + "metadata", + "results", + "elapsed_time" + ] } }, - "$schema": "http://json-schema.org/draft-07/schema#", "$id": "https://schemas.getdbt.com/dbt/run-results/v4.json" } diff --git a/schemas/dbt/run-results/v5.json b/schemas/dbt/run-results/v5.json new file mode 100644 index 00000000000..4e400e5f18a --- /dev/null +++ b/schemas/dbt/run-results/v5.json @@ -0,0 +1,229 @@ +{ + "$ref": "#/$defs/RunResultsArtifact", + "$defs": { + "BaseArtifactMetadata": { + "type": "object", + "title": "BaseArtifactMetadata", + "properties": { + "dbt_schema_version": { + "type": "string" + }, + "dbt_version": { + "type": "string", + "default": "1.7.0b1" + }, + "generated_at": { + "type": "string" + }, + "invocation_id": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "env": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "dbt_schema_version" + ] + }, + "TimingInfo": { + "type": "object", + "title": "TimingInfo", + "properties": { + "name": { + "type": "string" + }, + "started_at": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "completed_at": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + }, + "RunResultOutput": { + "type": "object", + "title": "RunResultOutput", + "properties": { + "status": { + "anyOf": [ + { + "enum": [ + "success", + "error", + "skipped" + ] + }, + { + "enum": [ + "pass", + "error", + "fail", + "warn", + "skipped" + ] + }, + { + "enum": [ + "pass", + "warn", + "error", + "runtime error" + ] + } + ] + }, + "timing": { + "type": "array", + "items": { + "$ref": "#/$defs/TimingInfo" + } + }, + "thread_id": { + "type": "string" + }, + "execution_time": { + "type": "number" + }, + "adapter_response": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "message": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "failures": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ] + }, + "unique_id": { + "type": "string" + }, + "compiled": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ] + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + } + }, + "additionalProperties": false, + "required": [ + "status", + "timing", + "thread_id", + "execution_time", + "adapter_response", + "message", + "failures", + "unique_id", + "compiled", + "compiled_code", + "relation_name" + ] + }, + "RunResultsArtifact": { + "type": "object", + "title": "RunResultsArtifact", + "properties": { + "metadata": { + "$ref": "#/$defs/BaseArtifactMetadata" + }, + "results": { + "type": "array", + "items": { + "$ref": "#/$defs/RunResultOutput" + } + }, + "elapsed_time": { + "type": "number" + }, + "args": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "metadata", + "results", + "elapsed_time" + ] + } + }, + "$id": "https://schemas.getdbt.com/dbt/run-results/v5.json" +} diff --git a/schemas/dbt/run-results/v6.json b/schemas/dbt/run-results/v6.json new file mode 100644 index 00000000000..86b79e1206c --- /dev/null +++ b/schemas/dbt/run-results/v6.json @@ -0,0 +1,216 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "title": "RunResultsArtifact", + "properties": { + "metadata": { + "type": "object", + "title": "BaseArtifactMetadata", + "properties": { + "dbt_schema_version": { + "type": "string" + }, + "dbt_version": { + "type": "string", + "default": "1.9.0a1" + }, + "generated_at": { + "type": "string" + }, + "invocation_id": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "env": { + "type": "object", + "additionalProperties": { + "type": "string" + }, + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "dbt_schema_version" + ] + }, + "results": { + "type": "array", + "items": { + "type": "object", + "title": "RunResultOutput", + "properties": { + "status": { + "anyOf": [ + { + "enum": [ + "success", + "error", + "skipped" + ] + }, + { + "enum": [ + "pass", + "error", + "fail", + "warn", + "skipped" + ] + }, + { + "enum": [ + "pass", + "warn", + "error", + "runtime error" + ] + } + ] + }, + "timing": { + "type": "array", + "items": { + "type": "object", + "title": "TimingInfo", + "properties": { + "name": { + "type": "string" + }, + "started_at": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "completed_at": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "thread_id": { + "type": "string" + }, + "execution_time": { + "type": "number" + }, + "adapter_response": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "message": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "failures": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ] + }, + "unique_id": { + "type": "string" + }, + "compiled": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ] + }, + "compiled_code": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + }, + "relation_name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + } + }, + "additionalProperties": false, + "required": [ + "status", + "timing", + "thread_id", + "execution_time", + "adapter_response", + "message", + "failures", + "unique_id", + "compiled", + "compiled_code", + "relation_name" + ] + } + }, + "elapsed_time": { + "type": "number" + }, + "args": { + "type": "object", + "propertyNames": { + "type": "string" + } + } + }, + "additionalProperties": false, + "required": [ + "metadata", + "results", + "elapsed_time" + ], + "$id": "https://schemas.getdbt.com/dbt/run-results/v6.json" +} diff --git a/schemas/dbt/sources/v3.json b/schemas/dbt/sources/v3.json index e36e44b90f7..5ade4a90be0 100644 --- a/schemas/dbt/sources/v3.json +++ b/schemas/dbt/sources/v3.json @@ -1,281 +1,280 @@ { + "$schema": "https://json-schema.org/draft/2020-12/schema", "type": "object", - "required": [ - "metadata", - "results", - "elapsed_time" - ], + "title": "FreshnessExecutionResultArtifact", "properties": { "metadata": { - "$ref": "#/definitions/FreshnessMetadata" - }, - "results": { - "type": "array", - "items": { - "oneOf": [ - { - "$ref": "#/definitions/SourceFreshnessRuntimeError" - }, - { - "$ref": "#/definitions/SourceFreshnessOutput" - } - ] - } - }, - "elapsed_time": { - "type": "number" - } - }, - "additionalProperties": false, - "description": "FreshnessExecutionResultArtifact(metadata: dbt.contracts.results.FreshnessMetadata, results: Sequence[Union[dbt.contracts.results.SourceFreshnessRuntimeError, dbt.contracts.results.SourceFreshnessOutput]], elapsed_time: float)", - "definitions": { - "FreshnessMetadata": { "type": "object", - "required": [], + "title": "FreshnessMetadata", "properties": { "dbt_schema_version": { - "type": "string", - "default": "https://schemas.getdbt.com/dbt/sources/v3.json" + "type": "string" }, "dbt_version": { "type": "string", - "default": "1.5.0a1" + "default": "1.9.0a1" }, "generated_at": { - "type": "string", - "format": "date-time", - "default": "2023-02-18T14:20:00.362449Z" + "type": "string" }, "invocation_id": { - "oneOf": [ + "anyOf": [ { "type": "string" }, { "type": "null" } - ], - "default": "b1e277bf-2a2c-4d49-9d37-8d1a72cf26c7" + ] }, "env": { "type": "object", "additionalProperties": { "type": "string" }, - "default": {} - } - }, - "additionalProperties": false, - "description": "FreshnessMetadata(dbt_schema_version: str = , dbt_version: str = '1.5.0a1', generated_at: datetime.datetime = , invocation_id: Optional[str] = , env: Dict[str, str] = )" - }, - "SourceFreshnessRuntimeError": { - "type": "object", - "required": [ - "unique_id", - "status" - ], - "properties": { - "unique_id": { - "type": "string" - }, - "error": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "integer" - }, - { - "type": "null" - } - ] - }, - "status": { - "type": "string", - "enum": [ - "runtime error" - ] - } - }, - "additionalProperties": false, - "description": "SourceFreshnessRuntimeError(unique_id: str, error: Union[str, int, NoneType], status: dbt.contracts.results.FreshnessErrorEnum)" - }, - "SourceFreshnessOutput": { - "type": "object", - "required": [ - "unique_id", - "max_loaded_at", - "snapshotted_at", - "max_loaded_at_time_ago_in_s", - "status", - "criteria", - "adapter_response", - "timing", - "thread_id", - "execution_time" - ], - "properties": { - "unique_id": { - "type": "string" - }, - "max_loaded_at": { - "type": "string", - "format": "date-time" - }, - "snapshotted_at": { - "type": "string", - "format": "date-time" - }, - "max_loaded_at_time_ago_in_s": { - "type": "number" - }, - "status": { - "type": "string", - "enum": [ - "pass", - "warn", - "error", - "runtime error" - ] - }, - "criteria": { - "$ref": "#/definitions/FreshnessThreshold" - }, - "adapter_response": { - "type": "object" - }, - "timing": { - "type": "array", - "items": { - "$ref": "#/definitions/TimingInfo" + "propertyNames": { + "type": "string" } - }, - "thread_id": { - "type": "string" - }, - "execution_time": { - "type": "number" } }, - "additionalProperties": false, - "description": "SourceFreshnessOutput(unique_id: str, max_loaded_at: datetime.datetime, snapshotted_at: datetime.datetime, max_loaded_at_time_ago_in_s: float, status: dbt.contracts.results.FreshnessStatus, criteria: dbt.contracts.graph.unparsed.FreshnessThreshold, adapter_response: Dict[str, Any], timing: List[dbt.contracts.results.TimingInfo], thread_id: str, execution_time: float)" + "additionalProperties": false }, - "FreshnessThreshold": { - "type": "object", - "required": [], - "properties": { - "warn_after": { - "oneOf": [ - { - "$ref": "#/definitions/Time" + "results": { + "type": "array", + "items": { + "anyOf": [ + { + "type": "object", + "title": "SourceFreshnessRuntimeError", + "properties": { + "unique_id": { + "type": "string" + }, + "error": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "integer" + }, + { + "type": "null" + } + ] + }, + "status": { + "enum": [ + "runtime error" + ] + } }, - { - "type": "null" - } - ], - "default": { - "count": null, - "period": null - } - }, - "error_after": { - "oneOf": [ - { - "$ref": "#/definitions/Time" + "additionalProperties": false, + "required": [ + "unique_id", + "error", + "status" + ] + }, + { + "type": "object", + "title": "SourceFreshnessOutput", + "properties": { + "unique_id": { + "type": "string" + }, + "max_loaded_at": { + "type": "string" + }, + "snapshotted_at": { + "type": "string" + }, + "max_loaded_at_time_ago_in_s": { + "type": "number" + }, + "status": { + "enum": [ + "pass", + "warn", + "error", + "runtime error" + ] + }, + "criteria": { + "type": "object", + "title": "FreshnessThreshold", + "properties": { + "warn_after": { + "anyOf": [ + { + "type": "object", + "title": "Time", + "properties": { + "count": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + }, + "period": { + "anyOf": [ + { + "enum": [ + "minute", + "hour", + "day" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ] + }, + "error_after": { + "anyOf": [ + { + "type": "object", + "title": "Time", + "properties": { + "count": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "default": null + }, + "period": { + "anyOf": [ + { + "enum": [ + "minute", + "hour", + "day" + ] + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + { + "type": "null" + } + ] + }, + "filter": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false + }, + "adapter_response": { + "type": "object", + "propertyNames": { + "type": "string" + } + }, + "timing": { + "type": "array", + "items": { + "type": "object", + "title": "TimingInfo", + "properties": { + "name": { + "type": "string" + }, + "started_at": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + }, + "completed_at": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "default": null + } + }, + "additionalProperties": false, + "required": [ + "name" + ] + } + }, + "thread_id": { + "type": "string" + }, + "execution_time": { + "type": "number" + } }, - { - "type": "null" - } - ], - "default": { - "count": null, - "period": null + "additionalProperties": false, + "required": [ + "unique_id", + "max_loaded_at", + "snapshotted_at", + "max_loaded_at_time_ago_in_s", + "status", + "criteria", + "adapter_response", + "timing", + "thread_id", + "execution_time" + ] } - }, - "filter": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - } - }, - "additionalProperties": false, - "description": "FreshnessThreshold(warn_after: Optional[dbt.contracts.graph.unparsed.Time] = , error_after: Optional[dbt.contracts.graph.unparsed.Time] = , filter: Optional[str] = None)" - }, - "Time": { - "type": "object", - "required": [], - "properties": { - "count": { - "oneOf": [ - { - "type": "integer" - }, - { - "type": "null" - } - ] - }, - "period": { - "oneOf": [ - { - "type": "string", - "enum": [ - "minute", - "hour", - "day" - ] - }, - { - "type": "null" - } - ] - } - }, - "additionalProperties": false, - "description": "Time(count: Optional[int] = None, period: Optional[dbt.contracts.graph.unparsed.TimePeriod] = None)" + ] + } }, - "TimingInfo": { - "type": "object", - "required": [ - "name" - ], - "properties": { - "name": { - "type": "string" - }, - "started_at": { - "oneOf": [ - { - "type": "string", - "format": "date-time" - }, - { - "type": "null" - } - ] - }, - "completed_at": { - "oneOf": [ - { - "type": "string", - "format": "date-time" - }, - { - "type": "null" - } - ] - } - }, - "additionalProperties": false, - "description": "TimingInfo(name: str, started_at: Optional[datetime.datetime] = None, completed_at: Optional[datetime.datetime] = None)" + "elapsed_time": { + "type": "number" } }, - "$schema": "http://json-schema.org/draft-07/schema#", + "additionalProperties": false, + "required": [ + "metadata", + "results", + "elapsed_time" + ], "$id": "https://schemas.getdbt.com/dbt/sources/v3.json" } diff --git a/scripts/build-dist.sh b/scripts/build-dist.sh index de41a1bfd8a..5dd0fa17f22 100755 --- a/scripts/build-dist.sh +++ b/scripts/build-dist.sh @@ -14,13 +14,11 @@ rm -rf "$DBT_PATH"/dist rm -rf "$DBT_PATH"/build mkdir -p "$DBT_PATH"/dist -for SUBPATH in core plugins/postgres tests/adapter -do - rm -rf "$DBT_PATH"/"$SUBPATH"/dist - rm -rf "$DBT_PATH"/"$SUBPATH"/build - cd "$DBT_PATH"/"$SUBPATH" - $PYTHON_BIN setup.py sdist bdist_wheel - cp -r "$DBT_PATH"/"$SUBPATH"/dist/* "$DBT_PATH"/dist/ -done +rm -rf "$DBT_PATH"/core/dist +rm -rf "$DBT_PATH"core/build +cd "$DBT_PATH"/core +$PYTHON_BIN setup.py sdist bdist_wheel +cp -r "$DBT_PATH"/"core"/dist/* "$DBT_PATH"/dist/ + set +x diff --git a/scripts/collect-artifact-schema.py b/scripts/collect-artifact-schema.py index da2c6448830..5bedbfbbd6b 100755 --- a/scripts/collect-artifact-schema.py +++ b/scripts/collect-artifact-schema.py @@ -6,13 +6,11 @@ from typing import Type, Dict, Any from dbt.contracts.graph.manifest import WritableManifest -from dbt.contracts.results import ( - CatalogArtifact, - RunResultsArtifact, - FreshnessExecutionResultArtifact, -) -from dbt.contracts.util import VersionedSchema -from dbt.clients.system import write_file +from dbt.artifacts.schemas.catalog import CatalogArtifact +from dbt.artifacts.schemas.run import RunResultsArtifact +from dbt.artifacts.schemas.freshness import FreshnessExecutionResultArtifact +from dbt.artifacts.schemas.base import VersionedSchema +from dbt_common.clients.system import write_file @dataclass diff --git a/scripts/collect-dbt-contexts.py b/scripts/collect-dbt-contexts.py index 035c5ac5183..9e93494c9b1 100644 --- a/scripts/collect-dbt-contexts.py +++ b/scripts/collect-dbt-contexts.py @@ -4,7 +4,7 @@ import json from dataclasses import dataclass from typing import List, Optional, Iterable, Union, Dict, Any -from dbt.dataclass_schema import dbtClassMixin +from dbt_common.dataclass_schema import dbtClassMixin from dbt.context.base import BaseContext diff --git a/scripts/migrate-adapters.py b/scripts/migrate-adapters.py new file mode 100644 index 00000000000..ead59bd271e --- /dev/null +++ b/scripts/migrate-adapters.py @@ -0,0 +1,60 @@ +import argparse +import ast +from collections import namedtuple +from pathlib import Path + +Import = namedtuple("Import", ["module", "name", "alias"]) + + +def get_imports(path): + with open(path) as fh: + root = ast.parse(fh.read(), path) + + for node in ast.iter_child_nodes(root): + if isinstance(node, ast.Import): + module = [] + elif isinstance(node, ast.ImportFrom): + module = node.module.split(".") + else: + continue + + for n in node.names: + yield Import(module, n.name.split("."), n.asname) + + +parser = argparse.ArgumentParser("migrate_adapters") +parser.add_argument("path", help="The path to run the migration tool over.", type=str) +args = parser.parse_args() + +path = Path(args.path) +pathlist = path.rglob("*.py") + +total_dbt_imports = 0 +invalid_dbt_imports = 0 +path_to_invalid_imports = {} +for path in pathlist: + path_to_invalid_imports[path] = [] + for imported_module in get_imports(str(path)): + if imported_module.module and imported_module.module[0] == "dbt": + total_dbt_imports += 1 + if imported_module.module[1] not in ("common", "adapters"): + invalid_dbt_imports += 1 + path_to_invalid_imports[path].append( + f"{'.'.join(imported_module.module)}::{imported_module.name[0]}" + ) + +migrated_imports = total_dbt_imports - invalid_dbt_imports +migrated_imports_progress = round((migrated_imports / total_dbt_imports) * 100, 2) + +for path, invalid_imports in path_to_invalid_imports.items(): + if invalid_imports: + print() + print(f"\033[92m{path}:\033[0m") + for invalid_import in invalid_imports: + print(f" - {invalid_import}") + +print() +print( + f"migration progress: {migrated_imports_progress}% of dbt imports are valid (from adapters or common)" +) +print(f"remaining core imports: {invalid_dbt_imports}") diff --git a/scripts/update_dev_packages.sh b/scripts/update_dev_packages.sh new file mode 100755 index 00000000000..3f140f04b89 --- /dev/null +++ b/scripts/update_dev_packages.sh @@ -0,0 +1,14 @@ +#!/bin/bash -e +set -e + +repo=$1 +ref=$2 +target_req_file="dev-requirements.txt" + +req_sed_pattern="s|${repo}.git@main|${repo}.git@${ref}|g" +if [[ "$OSTYPE" == darwin* ]]; then + # mac ships with a different version of sed that requires a delimiter arg + sed -i "" "$req_sed_pattern" $target_req_file +else + sed -i "$req_sed_pattern" $target_req_file +fi diff --git a/test/setup_db.sh b/test/setup_db.sh index de59bf0fac6..2d877a9e8df 100755 --- a/test/setup_db.sh +++ b/test/setup_db.sh @@ -39,6 +39,7 @@ for i in {1..10}; do done; createdb dbt +psql -c "SELECT version();" psql -c "CREATE ROLE root WITH PASSWORD 'password';" psql -c "ALTER ROLE root WITH LOGIN;" psql -c "GRANT CREATE, CONNECT ON DATABASE dbt TO root WITH GRANT OPTION;" diff --git a/tests/adapter/README.md b/tests/adapter/README.md deleted file mode 100644 index 954fa9432fd..00000000000 --- a/tests/adapter/README.md +++ /dev/null @@ -1,41 +0,0 @@ -

- dbt logo -

- -# dbt-tests-adapter - -For context and guidance on using this package, please read: ["Testing a new adapter"](https://docs.getdbt.com/docs/contributing/testing-a-new-adapter) - -## What is it? - -This package includes reusable test cases that reinforce behaviors common to all or many adapter plugins. There are two categories of tests: - -1. **Basic tests** that every adapter plugin is expected to pass. These are defined in `tests.adapter.basic`. Given differences across data platforms, these may require slight modification or reimplementation. Significantly overriding or disabling these tests should be with good reason, since each represents basic functionality expected by dbt users. For example, if your adapter does not support incremental models, you should disable the test, [by marking it with `skip` or `xfail`](https://docs.pytest.org/en/latest/how-to/skipping.html), as well as noting that limitation in any documentation, READMEs, and usage guides that accompany your adapter. - -2. **Optional tests**, for second-order functionality that is common across plugins, but not required for basic use. Your plugin can opt into these test cases by inheriting existing ones, or reimplementing them with adjustments. For now, this category includes all tests located outside the `basic` subdirectory. More tests will be added as we convert older tests defined on dbt-core and mature plugins to use the standard framework. - -## How to use it? - -Each test case in this repo is packaged as a class, prefixed `Base`. To enable a test case to run with your adapter plugin, you should inherit the base class into a new class, prefixed `Test`. That test class will be discovered and run by `pytest`. It can also makes modifications if needed. - -```python -class TestSimpleMaterializations(BaseSimpleMaterializations): - pass -``` - -## Distribution - -To install: - -```sh -pip install dbt-tests-adapter -``` - -This package is versioned in lockstep with `dbt-core`, and [the same versioning guidelines](https://docs.getdbt.com/docs/core-versions) apply: -- New "basic" test cases MAY be added in minor versions ONLY. They may not be included in patch releases. -- Breaking changes to existing test cases MAY be included and communicated as part of minor version upgrades ONLY. They MAY NOT be included in patch releases. We will aim to avoid these whenever possible. -- New "optional" test cases, and non-breaking fixes to existing test cases, MAY be added in minor or patch versions. - -Assuming you adapter plugin is pinned to a specific minor version of `dbt-core` (e.g. `~=1.1.0`), you can use the same pin for `dbt-tests-adapter`. - -**Note:** This is packaged as a plugin using a python namespace package. It cannot have an `__init__.py` file in the part of the hierarchy to which it needs to be attached. diff --git a/tests/adapter/dbt/tests/adapter/__version__.py b/tests/adapter/dbt/tests/adapter/__version__.py deleted file mode 100644 index 874bd74c8ac..00000000000 --- a/tests/adapter/dbt/tests/adapter/__version__.py +++ /dev/null @@ -1 +0,0 @@ -version = "1.7.0a1" diff --git a/tests/adapter/dbt/tests/adapter/grants/base_grants.py b/tests/adapter/dbt/tests/adapter/grants/base_grants.py deleted file mode 100644 index 82f5b9fe664..00000000000 --- a/tests/adapter/dbt/tests/adapter/grants/base_grants.py +++ /dev/null @@ -1,58 +0,0 @@ -import pytest -import os -from dbt.tests.util import ( - relation_from_name, - get_connection, -) -from dbt.context.base import BaseContext # diff_of_two_dicts only - -TEST_USER_ENV_VARS = ["DBT_TEST_USER_1", "DBT_TEST_USER_2", "DBT_TEST_USER_3"] - - -def replace_all(text, dic): - for i, j in dic.items(): - text = text.replace(i, j) - return text - - -class BaseGrants: - def privilege_grantee_name_overrides(self): - # these privilege and grantee names are valid on most databases, but not all! - # looking at you, BigQuery - # optionally use this to map from "select" --> "other_select_name", "insert" --> ... - return { - "select": "select", - "insert": "insert", - "fake_privilege": "fake_privilege", - "invalid_user": "invalid_user", - } - - def interpolate_name_overrides(self, yaml_text): - return replace_all(yaml_text, self.privilege_grantee_name_overrides()) - - @pytest.fixture(scope="class", autouse=True) - def get_test_users(self, project): - test_users = [] - for env_var in TEST_USER_ENV_VARS: - user_name = os.getenv(env_var) - if user_name: - test_users.append(user_name) - return test_users - - def get_grants_on_relation(self, project, relation_name): - relation = relation_from_name(project.adapter, relation_name) - adapter = project.adapter - with get_connection(adapter): - kwargs = {"relation": relation} - show_grant_sql = adapter.execute_macro("get_show_grant_sql", kwargs=kwargs) - _, grant_table = adapter.execute(show_grant_sql, fetch=True) - actual_grants = adapter.standardize_grants_dict(grant_table) - return actual_grants - - def assert_expected_grants_match_actual(self, project, relation_name, expected_grants): - actual_grants = self.get_grants_on_relation(project, relation_name) - # need a case-insensitive comparison - # so just a simple "assert expected == actual_grants" won't work - diff_a = BaseContext.diff_of_two_dicts(actual_grants, expected_grants) - diff_b = BaseContext.diff_of_two_dicts(expected_grants, actual_grants) - assert diff_a == diff_b == {} diff --git a/tests/adapter/dbt/tests/adapter/grants/test_incremental_grants.py b/tests/adapter/dbt/tests/adapter/grants/test_incremental_grants.py deleted file mode 100644 index 2f28eac02ab..00000000000 --- a/tests/adapter/dbt/tests/adapter/grants/test_incremental_grants.py +++ /dev/null @@ -1,102 +0,0 @@ -import pytest -from dbt.tests.util import ( - run_dbt, - run_dbt_and_capture, - get_manifest, - write_file, - relation_from_name, - get_connection, -) -from dbt.tests.adapter.grants.base_grants import BaseGrants - -my_incremental_model_sql = """ - select 1 as fun -""" - -incremental_model_schema_yml = """ -version: 2 -models: - - name: my_incremental_model - config: - materialized: incremental - grants: - select: ["{{ env_var('DBT_TEST_USER_1') }}"] -""" - -user2_incremental_model_schema_yml = """ -version: 2 -models: - - name: my_incremental_model - config: - materialized: incremental - grants: - select: ["{{ env_var('DBT_TEST_USER_2') }}"] -""" - - -class BaseIncrementalGrants(BaseGrants): - @pytest.fixture(scope="class") - def models(self): - updated_schema = self.interpolate_name_overrides(incremental_model_schema_yml) - return { - "my_incremental_model.sql": my_incremental_model_sql, - "schema.yml": updated_schema, - } - - def test_incremental_grants(self, project, get_test_users): - # we want the test to fail, not silently skip - test_users = get_test_users - select_privilege_name = self.privilege_grantee_name_overrides()["select"] - assert len(test_users) == 3 - - # Incremental materialization, single select grant - (results, log_output) = run_dbt_and_capture(["--debug", "run"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - model_id = "model.test.my_incremental_model" - model = manifest.nodes[model_id] - assert model.config.materialized == "incremental" - expected = {select_privilege_name: [test_users[0]]} - self.assert_expected_grants_match_actual(project, "my_incremental_model", expected) - - # Incremental materialization, run again without changes - (results, log_output) = run_dbt_and_capture(["--debug", "run"]) - assert len(results) == 1 - assert "revoke " not in log_output - assert "grant " not in log_output # with space to disambiguate from 'show grants' - self.assert_expected_grants_match_actual(project, "my_incremental_model", expected) - - # Incremental materialization, change select grant user - updated_yaml = self.interpolate_name_overrides(user2_incremental_model_schema_yml) - write_file(updated_yaml, project.project_root, "models", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "run"]) - assert len(results) == 1 - assert "revoke " in log_output - manifest = get_manifest(project.project_root) - model = manifest.nodes[model_id] - assert model.config.materialized == "incremental" - expected = {select_privilege_name: [test_users[1]]} - self.assert_expected_grants_match_actual(project, "my_incremental_model", expected) - - # Incremental materialization, same config, now with --full-refresh - run_dbt(["--debug", "run", "--full-refresh"]) - assert len(results) == 1 - # whether grants or revokes happened will vary by adapter - self.assert_expected_grants_match_actual(project, "my_incremental_model", expected) - - # Now drop the schema (with the table in it) - adapter = project.adapter - relation = relation_from_name(adapter, "my_incremental_model") - with get_connection(adapter): - adapter.drop_schema(relation) - - # Incremental materialization, same config, rebuild now that table is missing - (results, log_output) = run_dbt_and_capture(["--debug", "run"]) - assert len(results) == 1 - assert "grant " in log_output - assert "revoke " not in log_output - self.assert_expected_grants_match_actual(project, "my_incremental_model", expected) - - -class TestIncrementalGrants(BaseIncrementalGrants): - pass diff --git a/tests/adapter/dbt/tests/adapter/grants/test_invalid_grants.py b/tests/adapter/dbt/tests/adapter/grants/test_invalid_grants.py deleted file mode 100644 index b16cedaac84..00000000000 --- a/tests/adapter/dbt/tests/adapter/grants/test_invalid_grants.py +++ /dev/null @@ -1,68 +0,0 @@ -import pytest -from dbt.tests.util import ( - run_dbt_and_capture, - write_file, -) -from dbt.tests.adapter.grants.base_grants import BaseGrants - -my_invalid_model_sql = """ - select 1 as fun -""" - -invalid_user_table_model_schema_yml = """ -version: 2 -models: - - name: my_invalid_model - config: - materialized: table - grants: - select: ['invalid_user'] -""" - -invalid_privilege_table_model_schema_yml = """ -version: 2 -models: - - name: my_invalid_model - config: - materialized: table - grants: - fake_privilege: ["{{ env_var('DBT_TEST_USER_2') }}"] -""" - - -class BaseInvalidGrants(BaseGrants): - # The purpose of this test is to understand the user experience when providing - # an invalid 'grants' configuration. dbt will *not* try to intercept or interpret - # the database's own error at runtime -- it will just return those error messages. - # Hopefully they're helpful! - - @pytest.fixture(scope="class") - def models(self): - return { - "my_invalid_model.sql": my_invalid_model_sql, - } - - # Adapters will need to reimplement these methods with the specific - # language of their database - def grantee_does_not_exist_error(self): - return "does not exist" - - def privilege_does_not_exist_error(self): - return "unrecognized privilege" - - def test_invalid_grants(self, project, get_test_users, logs_dir): - # failure when grant to a user/role that doesn't exist - yaml_file = self.interpolate_name_overrides(invalid_user_table_model_schema_yml) - write_file(yaml_file, project.project_root, "models", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "run"], expect_pass=False) - assert self.grantee_does_not_exist_error() in log_output - - # failure when grant to a privilege that doesn't exist - yaml_file = self.interpolate_name_overrides(invalid_privilege_table_model_schema_yml) - write_file(yaml_file, project.project_root, "models", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "run"], expect_pass=False) - assert self.privilege_does_not_exist_error() in log_output - - -class TestInvalidGrants(BaseInvalidGrants): - pass diff --git a/tests/adapter/dbt/tests/adapter/grants/test_model_grants.py b/tests/adapter/dbt/tests/adapter/grants/test_model_grants.py deleted file mode 100644 index db2fe379f5b..00000000000 --- a/tests/adapter/dbt/tests/adapter/grants/test_model_grants.py +++ /dev/null @@ -1,156 +0,0 @@ -import pytest -from dbt.tests.util import ( - run_dbt_and_capture, - get_manifest, - write_file, -) -from dbt.tests.adapter.grants.base_grants import BaseGrants - -my_model_sql = """ - select 1 as fun -""" - -model_schema_yml = """ -version: 2 -models: - - name: my_model - config: - grants: - select: ["{{ env_var('DBT_TEST_USER_1') }}"] -""" - -user2_model_schema_yml = """ -version: 2 -models: - - name: my_model - config: - grants: - select: ["{{ env_var('DBT_TEST_USER_2') }}"] -""" - -table_model_schema_yml = """ -version: 2 -models: - - name: my_model - config: - materialized: table - grants: - select: ["{{ env_var('DBT_TEST_USER_1') }}"] -""" - -user2_table_model_schema_yml = """ -version: 2 -models: - - name: my_model - config: - materialized: table - grants: - select: ["{{ env_var('DBT_TEST_USER_2') }}"] -""" - -multiple_users_table_model_schema_yml = """ -version: 2 -models: - - name: my_model - config: - materialized: table - grants: - select: ["{{ env_var('DBT_TEST_USER_1') }}", "{{ env_var('DBT_TEST_USER_2') }}"] -""" - -multiple_privileges_table_model_schema_yml = """ -version: 2 -models: - - name: my_model - config: - materialized: table - grants: - select: ["{{ env_var('DBT_TEST_USER_1') }}"] - insert: ["{{ env_var('DBT_TEST_USER_2') }}"] -""" - - -class BaseModelGrants(BaseGrants): - @pytest.fixture(scope="class") - def models(self): - updated_schema = self.interpolate_name_overrides(model_schema_yml) - return { - "my_model.sql": my_model_sql, - "schema.yml": updated_schema, - } - - def test_view_table_grants(self, project, get_test_users): - # we want the test to fail, not silently skip - test_users = get_test_users - select_privilege_name = self.privilege_grantee_name_overrides()["select"] - insert_privilege_name = self.privilege_grantee_name_overrides()["insert"] - assert len(test_users) == 3 - - # View materialization, single select grant - (results, log_output) = run_dbt_and_capture(["--debug", "run"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - model_id = "model.test.my_model" - model = manifest.nodes[model_id] - expected = {select_privilege_name: [test_users[0]]} - assert model.config.grants == expected - assert model.config.materialized == "view" - self.assert_expected_grants_match_actual(project, "my_model", expected) - - # View materialization, change select grant user - updated_yaml = self.interpolate_name_overrides(user2_model_schema_yml) - write_file(updated_yaml, project.project_root, "models", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "run"]) - assert len(results) == 1 - - expected = {select_privilege_name: [get_test_users[1]]} - self.assert_expected_grants_match_actual(project, "my_model", expected) - - # Table materialization, single select grant - updated_yaml = self.interpolate_name_overrides(table_model_schema_yml) - write_file(updated_yaml, project.project_root, "models", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "run"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - model_id = "model.test.my_model" - model = manifest.nodes[model_id] - assert model.config.materialized == "table" - expected = {select_privilege_name: [test_users[0]]} - self.assert_expected_grants_match_actual(project, "my_model", expected) - - # Table materialization, change select grant user - updated_yaml = self.interpolate_name_overrides(user2_table_model_schema_yml) - write_file(updated_yaml, project.project_root, "models", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "run"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - model = manifest.nodes[model_id] - assert model.config.materialized == "table" - expected = {select_privilege_name: [test_users[1]]} - self.assert_expected_grants_match_actual(project, "my_model", expected) - - # Table materialization, multiple grantees - updated_yaml = self.interpolate_name_overrides(multiple_users_table_model_schema_yml) - write_file(updated_yaml, project.project_root, "models", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "run"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - model = manifest.nodes[model_id] - assert model.config.materialized == "table" - expected = {select_privilege_name: [test_users[0], test_users[1]]} - self.assert_expected_grants_match_actual(project, "my_model", expected) - - # Table materialization, multiple privileges - updated_yaml = self.interpolate_name_overrides(multiple_privileges_table_model_schema_yml) - write_file(updated_yaml, project.project_root, "models", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "run"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - model = manifest.nodes[model_id] - assert model.config.materialized == "table" - expected = {select_privilege_name: [test_users[0]], insert_privilege_name: [test_users[1]]} - self.assert_expected_grants_match_actual(project, "my_model", expected) - - -class TestModelGrants(BaseModelGrants): - pass diff --git a/tests/adapter/dbt/tests/adapter/grants/test_seed_grants.py b/tests/adapter/dbt/tests/adapter/grants/test_seed_grants.py deleted file mode 100644 index aff20c65cad..00000000000 --- a/tests/adapter/dbt/tests/adapter/grants/test_seed_grants.py +++ /dev/null @@ -1,143 +0,0 @@ -import pytest -from dbt.tests.util import ( - run_dbt, - run_dbt_and_capture, - get_manifest, - write_file, -) -from dbt.tests.adapter.grants.base_grants import BaseGrants - -seeds__my_seed_csv = """ -id,name,some_date -1,Easton,1981-05-20T06:46:51 -2,Lillian,1978-09-03T18:10:33 -""".lstrip() - -schema_base_yml = """ -version: 2 -seeds: - - name: my_seed - config: - grants: - select: ["{{ env_var('DBT_TEST_USER_1') }}"] -""" - -user2_schema_base_yml = """ -version: 2 -seeds: - - name: my_seed - config: - grants: - select: ["{{ env_var('DBT_TEST_USER_2') }}"] -""" - -ignore_grants_yml = """ -version: 2 -seeds: - - name: my_seed - config: - grants: {} -""" - -zero_grants_yml = """ -version: 2 -seeds: - - name: my_seed - config: - grants: - select: [] -""" - - -class BaseSeedGrants(BaseGrants): - def seeds_support_partial_refresh(self): - return True - - @pytest.fixture(scope="class") - def seeds(self): - updated_schema = self.interpolate_name_overrides(schema_base_yml) - return { - "my_seed.csv": seeds__my_seed_csv, - "schema.yml": updated_schema, - } - - def test_seed_grants(self, project, get_test_users): - test_users = get_test_users - select_privilege_name = self.privilege_grantee_name_overrides()["select"] - - # seed command - (results, log_output) = run_dbt_and_capture(["--debug", "seed"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - seed_id = "seed.test.my_seed" - seed = manifest.nodes[seed_id] - expected = {select_privilege_name: [test_users[0]]} - assert seed.config.grants == expected - assert "grant " in log_output - self.assert_expected_grants_match_actual(project, "my_seed", expected) - - # run it again, with no config changes - (results, log_output) = run_dbt_and_capture(["--debug", "seed"]) - assert len(results) == 1 - if self.seeds_support_partial_refresh(): - # grants carried over -- nothing should have changed - assert "revoke " not in log_output - assert "grant " not in log_output - else: - # seeds are always full-refreshed on this adapter, so we need to re-grant - assert "grant " in log_output - self.assert_expected_grants_match_actual(project, "my_seed", expected) - - # change the grantee, assert it updates - updated_yaml = self.interpolate_name_overrides(user2_schema_base_yml) - write_file(updated_yaml, project.project_root, "seeds", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "seed"]) - assert len(results) == 1 - expected = {select_privilege_name: [test_users[1]]} - self.assert_expected_grants_match_actual(project, "my_seed", expected) - - # run it again, with --full-refresh, grants should be the same - run_dbt(["seed", "--full-refresh"]) - self.assert_expected_grants_match_actual(project, "my_seed", expected) - - # change config to 'grants: {}' -- should be completely ignored - updated_yaml = self.interpolate_name_overrides(ignore_grants_yml) - write_file(updated_yaml, project.project_root, "seeds", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "seed"]) - assert len(results) == 1 - assert "revoke " not in log_output - assert "grant " not in log_output - manifest = get_manifest(project.project_root) - seed_id = "seed.test.my_seed" - seed = manifest.nodes[seed_id] - expected_config = {} - expected_actual = {select_privilege_name: [test_users[1]]} - assert seed.config.grants == expected_config - if self.seeds_support_partial_refresh(): - # ACTUAL grants will NOT match expected grants - self.assert_expected_grants_match_actual(project, "my_seed", expected_actual) - else: - # there should be ZERO grants on the seed - self.assert_expected_grants_match_actual(project, "my_seed", expected_config) - - # now run with ZERO grants -- all grants should be removed - # whether explicitly (revoke) or implicitly (recreated without any grants added on) - updated_yaml = self.interpolate_name_overrides(zero_grants_yml) - write_file(updated_yaml, project.project_root, "seeds", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "seed"]) - assert len(results) == 1 - if self.seeds_support_partial_refresh(): - assert "revoke " in log_output - expected = {} - self.assert_expected_grants_match_actual(project, "my_seed", expected) - - # run it again -- dbt shouldn't try to grant or revoke anything - (results, log_output) = run_dbt_and_capture(["--debug", "seed"]) - assert len(results) == 1 - assert "revoke " not in log_output - assert "grant " not in log_output - self.assert_expected_grants_match_actual(project, "my_seed", expected) - - -class TestSeedGrants(BaseSeedGrants): - pass diff --git a/tests/adapter/dbt/tests/adapter/grants/test_snapshot_grants.py b/tests/adapter/dbt/tests/adapter/grants/test_snapshot_grants.py deleted file mode 100644 index 6bf69b3bb94..00000000000 --- a/tests/adapter/dbt/tests/adapter/grants/test_snapshot_grants.py +++ /dev/null @@ -1,78 +0,0 @@ -import pytest -from dbt.tests.util import ( - run_dbt, - run_dbt_and_capture, - get_manifest, - write_file, -) -from dbt.tests.adapter.grants.base_grants import BaseGrants - -my_snapshot_sql = """ -{% snapshot my_snapshot %} - {{ config( - check_cols='all', unique_key='id', strategy='check', - target_database=database, target_schema=schema - ) }} - select 1 as id, cast('blue' as {{ type_string() }}) as color -{% endsnapshot %} -""".strip() - -snapshot_schema_yml = """ -version: 2 -snapshots: - - name: my_snapshot - config: - grants: - select: ["{{ env_var('DBT_TEST_USER_1') }}"] -""" - -user2_snapshot_schema_yml = """ -version: 2 -snapshots: - - name: my_snapshot - config: - grants: - select: ["{{ env_var('DBT_TEST_USER_2') }}"] -""" - - -class BaseSnapshotGrants(BaseGrants): - @pytest.fixture(scope="class") - def snapshots(self): - return { - "my_snapshot.sql": my_snapshot_sql, - "schema.yml": self.interpolate_name_overrides(snapshot_schema_yml), - } - - def test_snapshot_grants(self, project, get_test_users): - test_users = get_test_users - select_privilege_name = self.privilege_grantee_name_overrides()["select"] - - # run the snapshot - results = run_dbt(["snapshot"]) - assert len(results) == 1 - manifest = get_manifest(project.project_root) - snapshot_id = "snapshot.test.my_snapshot" - snapshot = manifest.nodes[snapshot_id] - expected = {select_privilege_name: [test_users[0]]} - assert snapshot.config.grants == expected - self.assert_expected_grants_match_actual(project, "my_snapshot", expected) - - # run it again, nothing should have changed - (results, log_output) = run_dbt_and_capture(["--debug", "snapshot"]) - assert len(results) == 1 - assert "revoke " not in log_output - assert "grant " not in log_output - self.assert_expected_grants_match_actual(project, "my_snapshot", expected) - - # change the grantee, assert it updates - updated_yaml = self.interpolate_name_overrides(user2_snapshot_schema_yml) - write_file(updated_yaml, project.project_root, "snapshots", "schema.yml") - (results, log_output) = run_dbt_and_capture(["--debug", "snapshot"]) - assert len(results) == 1 - expected = {select_privilege_name: [test_users[1]]} - self.assert_expected_grants_match_actual(project, "my_snapshot", expected) - - -class TestSnapshotGrants(BaseSnapshotGrants): - pass diff --git a/tests/adapter/setup.py b/tests/adapter/setup.py deleted file mode 100644 index 9f700e6531d..00000000000 --- a/tests/adapter/setup.py +++ /dev/null @@ -1,57 +0,0 @@ -#!/usr/bin/env python -import os -import sys - -if sys.version_info < (3, 8): - print("Error: dbt does not support this version of Python.") - print("Please upgrade to Python 3.8 or higher.") - sys.exit(1) - - -from setuptools import setup - -try: - from setuptools import find_namespace_packages -except ImportError: - # the user has a downlevel version of setuptools. - print("Error: dbt requires setuptools v40.1.0 or higher.") - print('Please upgrade setuptools with "pip install --upgrade setuptools" ' "and try again") - sys.exit(1) - - -package_name = "dbt-tests-adapter" -package_version = "1.7.0a1" -description = """The dbt adapter tests for adapter plugins""" - -this_directory = os.path.abspath(os.path.dirname(__file__)) -with open(os.path.join(this_directory, "README.md")) as f: - long_description = f.read() - -setup( - name=package_name, - version=package_version, - description=description, - long_description=long_description, - long_description_content_type="text/markdown", - author="dbt Labs", - author_email="info@dbtlabs.com", - url="https://github.com/dbt-labs/dbt-core/tree/main/tests/adapter", - packages=find_namespace_packages(include=["dbt", "dbt.*"]), - install_requires=[ - "dbt-core=={}".format(package_version), - "pytest>=7.0.0", - ], - zip_safe=False, - classifiers=[ - "Development Status :: 5 - Production/Stable", - "License :: OSI Approved :: Apache Software License", - "Operating System :: Microsoft :: Windows", - "Operating System :: MacOS :: MacOS X", - "Operating System :: POSIX :: Linux", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - ], - python_requires=">=3.8", -) diff --git a/tests/fixtures/dbt_integration_project.py b/tests/fixtures/dbt_integration_project.py index 688bc8a4a6f..bef760a3add 100644 --- a/tests/fixtures/dbt_integration_project.py +++ b/tests/fixtures/dbt_integration_project.py @@ -39,7 +39,7 @@ - name: table_model columns: - name: id - tests: + data_tests: - unique """ diff --git a/tests/fixtures/jaffle_shop.py b/tests/fixtures/jaffle_shop.py index 74fdb6dce54..9b366ed2d5a 100644 --- a/tests/fixtures/jaffle_shop.py +++ b/tests/fixtures/jaffle_shop.py @@ -1,5 +1,7 @@ -import pytest import os + +import pytest + from dbt.tests.util import read_file # models/customers.sql @@ -179,7 +181,7 @@ columns: - name: customer_id description: This is a unique identifier for a customer - tests: + data_tests: - unique - not_null @@ -206,14 +208,14 @@ columns: - name: order_id - tests: + data_tests: - unique - not_null description: This is a unique identifier for an order - name: customer_id description: Foreign key to the customers table - tests: + data_tests: - not_null - relationships: to: ref('customers') @@ -224,33 +226,33 @@ - name: status description: '{{ doc("orders_status") }}' - tests: + data_tests: - accepted_values: values: ['placed', 'shipped', 'completed', 'return_pending', 'returned'] - name: amount description: Total amount (AUD) of the order - tests: + data_tests: - not_null - name: credit_card_amount description: Amount of the order (AUD) paid for by credit card - tests: + data_tests: - not_null - name: coupon_amount description: Amount of the order (AUD) paid for by coupon - tests: + data_tests: - not_null - name: bank_transfer_amount description: Amount of the order (AUD) paid for by bank transfer - tests: + data_tests: - not_null - name: gift_card_amount description: Amount of the order (AUD) paid for by gift card - tests: + data_tests: - not_null """ @@ -262,29 +264,29 @@ - name: stg_customers columns: - name: customer_id - tests: + data_tests: - unique - not_null - name: stg_orders columns: - name: order_id - tests: + data_tests: - unique - not_null - name: status - tests: + data_tests: - accepted_values: values: ['placed', 'shipped', 'completed', 'return_pending', 'returned'] - name: stg_payments columns: - name: payment_id - tests: + data_tests: - unique - not_null - name: payment_method - tests: + data_tests: - accepted_values: values: ['credit_card', 'coupon', 'bank_transfer', 'gift_card'] """ diff --git a/tests/functional/access/test_access.py b/tests/functional/access/test_access.py index 424616970f7..5ef10c0e100 100644 --- a/tests/functional/access/test_access.py +++ b/tests/functional/access/test_access.py @@ -1,10 +1,10 @@ import pytest +from dbt.exceptions import DbtReferenceError, InvalidAccessTypeError +from dbt.node_types import AccessType from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import get_manifest, rm_file, run_dbt, write_file from tests.fixtures.dbt_integration_project import dbt_integration_project # noqa: F401 -from dbt.tests.util import run_dbt, get_manifest, write_file, rm_file -from dbt.node_types import AccessType -from dbt.exceptions import InvalidAccessTypeError, DbtReferenceError my_model_sql = "select 1 as fun" @@ -104,9 +104,6 @@ group: analytics - name: another_model description: "yet another model" - - name: ref_my_model - description: "a model that refs my_model" - group: marts - name: ref_my_model description: "a model that refs my_model" group: analytics @@ -116,6 +113,26 @@ group: analytics """ +v6_schema_yml = """ +models: + - name: my_model + description: "my model" + config: + access: private + group: analytics + - name: another_model + description: "yet another model" + - name: ref_my_model + description: "a model that refs my_model" + config: + group: analytics + - name: people_model + description: "some people" + config: + access: public + group: analytics +""" + people_model_sql = """ select 1 as id, 'Drew' as first_name, 'Banin' as last_name, 'yellow' as favorite_color, true as loves_dbt, 5 as tenure, current_timestamp as created_at union all @@ -313,10 +330,22 @@ def test_access_attribute(self, project): # Should succeed manifest = run_dbt(["parse"]) assert len(manifest.nodes) == 5 - manifest = get_manifest(project.project_root) metric_id = "metric.test.number_of_people" assert manifest.metrics[metric_id].group == "analytics" + # Use access and group in config + write_file(v5_schema_yml, project.project_root, "models", "schema.yml") + manifest = run_dbt(["parse"]) + assert len(manifest.nodes) == 5 + assert manifest.nodes["model.test.my_model"].access == AccessType.Private + assert manifest.nodes["model.test.my_model"].group == "analytics" + assert manifest.nodes["model.test.ref_my_model"].access == AccessType.Protected + assert manifest.nodes["model.test.ref_my_model"].group == "analytics" + assert manifest.nodes["model.test.people_model"].access == AccessType.Public + assert manifest.nodes["model.test.people_model"].group == "analytics" + assert manifest.nodes["model.test.another_model"].access == AccessType.Protected + assert manifest.nodes["model.test.another_model"].group is None + class TestUnrestrictedPackageAccess: @pytest.fixture(scope="class", autouse=True) @@ -398,3 +427,46 @@ def test_restricted_private_ref(self, project): with pytest.raises(DbtReferenceError): run_dbt(["parse"]) + + +dbt_project_yml = """ +models: + test: + subdir_one: + +group: analytics + +access: private + subdir_two: + +group: marts + +access: public +""" + + +class TestAccessDbtProjectConfig: + @pytest.fixture(scope="class") + def models(self): + return { + "model_one.sql": my_model_sql, + "subdir_one": { + "model_two.sql": my_model_sql, + }, + "subdir_two": { + "model_three.sql": my_model_sql, + }, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return dbt_project_yml + + def test_dbt_project_access_config(self, project): + write_file(groups_yml, project.project_root, "models", "groups.yml") + manifest = run_dbt(["parse"]) + model_one = manifest.nodes["model.test.model_one"] + model_two = manifest.nodes["model.test.model_two"] + model_three = manifest.nodes["model.test.model_three"] + assert model_one.group is None + assert model_one.access == AccessType.Protected + assert model_two.group == "analytics" + assert model_two.access == AccessType.Private + assert model_three.group == "marts" + assert model_three.access == AccessType.Public diff --git a/tests/functional/adapter/__init__.py b/tests/functional/adapter/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional/adapter/aliases/__init__.py b/tests/functional/adapter/aliases/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/aliases/fixtures.py b/tests/functional/adapter/aliases/fixtures.py similarity index 96% rename from tests/adapter/dbt/tests/adapter/aliases/fixtures.py rename to tests/functional/adapter/aliases/fixtures.py index 948d6554b6c..1bdb88628ec 100644 --- a/tests/adapter/dbt/tests/adapter/aliases/fixtures.py +++ b/tests/functional/adapter/aliases/fixtures.py @@ -31,22 +31,22 @@ version: 2 models: - name: foo_alias - tests: + data_tests: - expect_value: field: tablename value: foo - name: ref_foo_alias - tests: + data_tests: - expect_value: field: tablename value: ref_foo_alias - name: alias_in_project - tests: + data_tests: - expect_value: field: tablename value: project_alias - name: alias_in_project_with_override - tests: + data_tests: - expect_value: field: tablename value: override_alias @@ -128,12 +128,12 @@ version: 2 models: - name: model_a - tests: + data_tests: - expect_value: field: tablename value: duped_alias - name: model_b - tests: + data_tests: - expect_value: field: tablename value: duped_alias @@ -161,17 +161,17 @@ version: 2 models: - name: model_a - tests: + data_tests: - expect_value: field: tablename value: duped_alias - name: model_b - tests: + data_tests: - expect_value: field: tablename value: duped_alias - name: model_c - tests: + data_tests: - expect_value: field: tablename value: duped_alias diff --git a/tests/adapter/dbt/tests/adapter/aliases/test_aliases.py b/tests/functional/adapter/aliases/test_aliases.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/aliases/test_aliases.py rename to tests/functional/adapter/aliases/test_aliases.py index d9ff6b5b28f..3e35518f06c 100644 --- a/tests/adapter/dbt/tests/adapter/aliases/test_aliases.py +++ b/tests/functional/adapter/aliases/test_aliases.py @@ -1,22 +1,23 @@ import pytest + from dbt.tests.util import run_dbt -from dbt.tests.adapter.aliases.fixtures import ( +from tests.functional.adapter.aliases.fixtures import ( MACROS__CAST_SQL, MACROS__EXPECT_VALUE_SQL, - MODELS__SCHEMA_YML, - MODELS__FOO_ALIAS_SQL, MODELS__ALIAS_IN_PROJECT_SQL, MODELS__ALIAS_IN_PROJECT_WITH_OVERRIDE_SQL, + MODELS__FOO_ALIAS_SQL, MODELS__REF_FOO_ALIAS_SQL, + MODELS__SCHEMA_YML, MODELS_DUPE__MODEL_A_SQL, MODELS_DUPE__MODEL_B_SQL, - MODELS_DUPE_CUSTOM_SCHEMA__SCHEMA_YML, + MODELS_DUPE_CUSTOM_DATABASE__MODEL_A_SQL, + MODELS_DUPE_CUSTOM_DATABASE__MODEL_B_SQL, + MODELS_DUPE_CUSTOM_DATABASE__SCHEMA_YML, MODELS_DUPE_CUSTOM_SCHEMA__MODEL_A_SQL, MODELS_DUPE_CUSTOM_SCHEMA__MODEL_B_SQL, MODELS_DUPE_CUSTOM_SCHEMA__MODEL_C_SQL, - MODELS_DUPE_CUSTOM_DATABASE__SCHEMA_YML, - MODELS_DUPE_CUSTOM_DATABASE__MODEL_A_SQL, - MODELS_DUPE_CUSTOM_DATABASE__MODEL_B_SQL, + MODELS_DUPE_CUSTOM_SCHEMA__SCHEMA_YML, ) diff --git a/tests/adapter/dbt/tests/adapter/basic/__init__.py b/tests/functional/adapter/basic/__init__.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/basic/__init__.py rename to tests/functional/adapter/basic/__init__.py diff --git a/tests/adapter/dbt/tests/adapter/basic/expected_catalog.py b/tests/functional/adapter/basic/expected_catalog.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/basic/expected_catalog.py rename to tests/functional/adapter/basic/expected_catalog.py diff --git a/tests/adapter/dbt/tests/adapter/basic/files.py b/tests/functional/adapter/basic/files.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/basic/files.py rename to tests/functional/adapter/basic/files.py index b035766d641..751b01a0b8a 100644 --- a/tests/adapter/dbt/tests/adapter/basic/files.py +++ b/tests/functional/adapter/basic/files.py @@ -59,7 +59,7 @@ - name: base columns: - name: id - tests: + data_tests: - not_null """ @@ -69,7 +69,7 @@ - name: view_model columns: - name: id - tests: + data_tests: - not_null """ @@ -79,7 +79,7 @@ - name: table_model columns: - name: id - tests: + data_tests: - not_null """ diff --git a/tests/adapter/dbt/tests/adapter/basic/test_adapter_methods.py b/tests/functional/adapter/basic/test_adapter_methods.py similarity index 85% rename from tests/adapter/dbt/tests/adapter/basic/test_adapter_methods.py rename to tests/functional/adapter/basic/test_adapter_methods.py index 635ed28d9dd..62f70b4f0f1 100644 --- a/tests/adapter/dbt/tests/adapter/basic/test_adapter_methods.py +++ b/tests/functional/adapter/basic/test_adapter_methods.py @@ -1,8 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal -from dbt.tests.fixtures.project import write_project_files - +from dbt.tests.util import check_relations_equal, run_dbt tests__get_columns_in_relation_sql = """ {% set columns = adapter.get_columns_in_relation(ref('model')) %} @@ -74,22 +72,23 @@ def models(self): "model.sql": models__model_sql, } - @pytest.fixture(scope="class") - def project_files( - self, - project_root, - tests, - models, - ): - write_project_files(project_root, "tests", tests) - write_project_files(project_root, "models", models) - @pytest.fixture(scope="class") def project_config_update(self): return { "name": "adapter_methods", } + @pytest.fixture(autouse=True) + def clean_up(self, project): + yield + with project.adapter.connection_named("__test"): + relation = project.adapter.Relation.create( + database=project.database, schema=project.test_schema + ) + project.adapter.drop_schema(relation) + + pass + # snowflake need all tables in CAP name @pytest.fixture(scope="class") def equal_tables(self): diff --git a/tests/adapter/dbt/tests/adapter/basic/test_base.py b/tests/functional/adapter/basic/test_base.py similarity index 89% rename from tests/adapter/dbt/tests/adapter/basic/test_base.py rename to tests/functional/adapter/basic/test_base.py index d3926b9f6a9..98edd9926cc 100644 --- a/tests/adapter/dbt/tests/adapter/basic/test_base.py +++ b/tests/functional/adapter/basic/test_base.py @@ -1,17 +1,18 @@ import pytest + from dbt.tests.util import ( - run_dbt, - check_result_nodes_by_name, - relation_from_name, check_relation_types, check_relations_equal, + check_result_nodes_by_name, + relation_from_name, + run_dbt, ) -from dbt.tests.adapter.basic.files import ( - seeds_base_csv, - base_view_sql, - base_table_sql, +from tests.functional.adapter.basic.files import ( base_materialized_var_sql, + base_table_sql, + base_view_sql, schema_base_yml, + seeds_base_csv, ) @@ -37,6 +38,17 @@ def project_config_update(self): "name": "base", } + @pytest.fixture(autouse=True) + def clean_up(self, project): + yield + with project.adapter.connection_named("__test"): + relation = project.adapter.Relation.create( + database=project.database, schema=project.test_schema + ) + project.adapter.drop_schema(relation) + + pass + def test_base(self, project): # seed command diff --git a/tests/adapter/dbt/tests/adapter/basic/test_docs_generate.py b/tests/functional/adapter/basic/test_docs_generate.py similarity index 93% rename from tests/adapter/dbt/tests/adapter/basic/test_docs_generate.py rename to tests/functional/adapter/basic/test_docs_generate.py index fb10be3736a..c6c070bf2a3 100644 --- a/tests/adapter/dbt/tests/adapter/basic/test_docs_generate.py +++ b/tests/functional/adapter/basic/test_docs_generate.py @@ -1,14 +1,15 @@ -import pytest import os from datetime import datetime -import dbt -from dbt.tests.util import run_dbt, rm_file, get_artifact, check_datetime_between +import pytest + +import dbt from dbt.tests.fixtures.project import write_project_files -from dbt.tests.adapter.basic.expected_catalog import ( +from dbt.tests.util import check_datetime_between, get_artifact, rm_file, run_dbt +from tests.functional.adapter.basic.expected_catalog import ( base_expected_catalog, - no_stats, expected_references_catalog, + no_stats, ) models__schema_yml = """ @@ -22,7 +23,7 @@ columns: - name: id description: The user ID number - tests: + data_tests: - unique - not_null - name: first_name @@ -33,7 +34,7 @@ description: The user's IP address - name: updated_at description: The last time this user's email was updated - tests: + data_tests: - test.nothing - name: second_model @@ -428,6 +429,18 @@ def expected_catalog(self, project, profile_user): model_stats=no_stats(), ) + @pytest.fixture(autouse=True) + def clean_up(self, project): + yield + with project.adapter.connection_named("__test"): + alternate_schema = f"{project.test_schema}_test" + relation = project.adapter.Relation.create( + database=project.database, schema=alternate_schema + ) + project.adapter.drop_schema(relation) + + pass + # Test "--no-compile" flag works and produces no manifest.json def test_run_and_generate_no_compile(self, project, expected_catalog): start_time = run_and_generate(project, ["--no-compile"]) @@ -440,9 +453,9 @@ def test_run_and_generate(self, project, expected_catalog): verify_catalog(project, expected_catalog, start_time) # Check that assets have been copied to the target directory for use in the docs html page - assert os.path.exists(os.path.join(".", "target", "assets")) - assert os.path.exists(os.path.join(".", "target", "assets", "lorem-ipsum.txt")) - assert not os.path.exists(os.path.join(".", "target", "non-existent-assets")) + assert os.path.exists(os.path.join("", "target", "assets")) + assert os.path.exists(os.path.join("", "target", "assets", "lorem-ipsum.txt")) + assert not os.path.exists(os.path.join("", "target", "non-existent-assets")) class TestDocsGenerate(BaseDocsGenerate): diff --git a/tests/adapter/dbt/tests/adapter/basic/test_empty.py b/tests/functional/adapter/basic/test_empty.py similarity index 99% rename from tests/adapter/dbt/tests/adapter/basic/test_empty.py rename to tests/functional/adapter/basic/test_empty.py index 8bfe928686f..49941ae6f67 100644 --- a/tests/adapter/dbt/tests/adapter/basic/test_empty.py +++ b/tests/functional/adapter/basic/test_empty.py @@ -1,6 +1,7 @@ -from dbt.tests.util import run_dbt import os +from dbt.tests.util import run_dbt + class BaseEmpty: def test_empty(self, project): diff --git a/tests/adapter/dbt/tests/adapter/basic/test_ephemeral.py b/tests/functional/adapter/basic/test_ephemeral.py similarity index 97% rename from tests/adapter/dbt/tests/adapter/basic/test_ephemeral.py rename to tests/functional/adapter/basic/test_ephemeral.py index 311d43651a9..9b330b896cd 100644 --- a/tests/adapter/dbt/tests/adapter/basic/test_ephemeral.py +++ b/tests/functional/adapter/basic/test_ephemeral.py @@ -1,18 +1,20 @@ -import pytest import os + +import pytest + from dbt.tests.util import ( - run_dbt, - get_manifest, check_relations_equal, check_result_nodes_by_name, + get_manifest, relation_from_name, + run_dbt, ) -from dbt.tests.adapter.basic.files import ( - seeds_base_csv, +from tests.functional.adapter.basic.files import ( base_ephemeral_sql, - ephemeral_view_sql, ephemeral_table_sql, + ephemeral_view_sql, schema_base_yml, + seeds_base_csv, ) diff --git a/tests/adapter/dbt/tests/adapter/basic/test_generic_tests.py b/tests/functional/adapter/basic/test_generic_tests.py similarity index 77% rename from tests/adapter/dbt/tests/adapter/basic/test_generic_tests.py rename to tests/functional/adapter/basic/test_generic_tests.py index 8f1a3e5f0e2..12c564609ab 100644 --- a/tests/adapter/dbt/tests/adapter/basic/test_generic_tests.py +++ b/tests/functional/adapter/basic/test_generic_tests.py @@ -1,13 +1,14 @@ import pytest + from dbt.tests.util import run_dbt -from dbt.tests.adapter.basic.files import ( - seeds_base_csv, - generic_test_seed_yml, - base_view_sql, +from tests.functional.adapter.basic.files import ( base_table_sql, - schema_base_yml, - generic_test_view_yml, + base_view_sql, + generic_test_seed_yml, generic_test_table_yml, + generic_test_view_yml, + schema_base_yml, + seeds_base_csv, ) @@ -33,6 +34,17 @@ def models(self): "schema_table.yml": generic_test_table_yml, } + @pytest.fixture(autouse=True) + def clean_up(self, project): + yield + with project.adapter.connection_named("__test"): + relation = project.adapter.Relation.create( + database=project.database, schema=project.test_schema + ) + project.adapter.drop_schema(relation) + + pass + def test_generic_tests(self, project): # seed command results = run_dbt(["seed"]) diff --git a/tests/adapter/dbt/tests/adapter/basic/test_incremental.py b/tests/functional/adapter/basic/test_incremental.py similarity index 83% rename from tests/adapter/dbt/tests/adapter/basic/test_incremental.py rename to tests/functional/adapter/basic/test_incremental.py index 6852bbda7d6..11eadddbb3f 100644 --- a/tests/adapter/dbt/tests/adapter/basic/test_incremental.py +++ b/tests/functional/adapter/basic/test_incremental.py @@ -1,12 +1,13 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal, relation_from_name -from dbt.contracts.results import RunStatus -from dbt.tests.adapter.basic.files import ( - seeds_base_csv, - seeds_added_csv, - schema_base_yml, - incremental_sql, + +from dbt.artifacts.schemas.results import RunStatus +from dbt.tests.util import check_relations_equal, relation_from_name, run_dbt +from tests.functional.adapter.basic.files import ( incremental_not_schema_change_sql, + incremental_sql, + schema_base_yml, + seeds_added_csv, + seeds_base_csv, ) @@ -23,6 +24,17 @@ def models(self): def seeds(self): return {"base.csv": seeds_base_csv, "added.csv": seeds_added_csv} + @pytest.fixture(autouse=True) + def clean_up(self, project): + yield + with project.adapter.connection_named("__test"): + relation = project.adapter.Relation.create( + database=project.database, schema=project.test_schema + ) + project.adapter.drop_schema(relation) + + pass + def test_incremental(self, project): # seed command results = run_dbt(["seed"]) diff --git a/tests/adapter/dbt/tests/adapter/basic/test_singular_tests.py b/tests/functional/adapter/basic/test_singular_tests.py similarity index 69% rename from tests/adapter/dbt/tests/adapter/basic/test_singular_tests.py rename to tests/functional/adapter/basic/test_singular_tests.py index 4cbf70094a3..5f72adc0588 100644 --- a/tests/adapter/dbt/tests/adapter/basic/test_singular_tests.py +++ b/tests/functional/adapter/basic/test_singular_tests.py @@ -1,9 +1,7 @@ import pytest -from dbt.tests.adapter.basic.files import ( - test_passing_sql, - test_failing_sql, -) + from dbt.tests.util import check_result_nodes_by_name, run_dbt +from tests.functional.adapter.basic.files import test_failing_sql, test_passing_sql class BaseSingularTests: @@ -18,6 +16,17 @@ def tests(self): def project_config_update(self): return {"name": "singular_tests"} + @pytest.fixture(autouse=True) + def clean_up(self, project): + yield + with project.adapter.connection_named("__test"): + relation = project.adapter.Relation.create( + database=project.database, schema=project.test_schema + ) + project.adapter.drop_schema(relation) + + pass + def test_singular_tests(self, project): # test command results = run_dbt(["test"], expect_pass=False) diff --git a/tests/adapter/dbt/tests/adapter/basic/test_singular_tests_ephemeral.py b/tests/functional/adapter/basic/test_singular_tests_ephemeral.py similarity index 80% rename from tests/adapter/dbt/tests/adapter/basic/test_singular_tests_ephemeral.py rename to tests/functional/adapter/basic/test_singular_tests_ephemeral.py index caaa629a7a2..43f693ab69c 100644 --- a/tests/adapter/dbt/tests/adapter/basic/test_singular_tests_ephemeral.py +++ b/tests/functional/adapter/basic/test_singular_tests_ephemeral.py @@ -1,12 +1,12 @@ import pytest -from dbt.tests.util import run_dbt, check_result_nodes_by_name -from dbt.tests.adapter.basic.files import ( - seeds_base_csv, +from dbt.tests.util import check_result_nodes_by_name, run_dbt +from tests.functional.adapter.basic.files import ( ephemeral_with_cte_sql, - test_ephemeral_passing_sql, - test_ephemeral_failing_sql, schema_base_yml, + seeds_base_csv, + test_ephemeral_failing_sql, + test_ephemeral_passing_sql, ) @@ -39,6 +39,17 @@ def project_config_update(self): "name": "singular_tests_ephemeral", } + @pytest.fixture(autouse=True) + def clean_up(self, project): + yield + with project.adapter.connection_named("__test"): + relation = project.adapter.Relation.create( + database=project.database, schema=project.test_schema + ) + project.adapter.drop_schema(relation) + + pass + def test_singular_tests_ephemeral(self, project): # check results from seed command results = run_dbt(["seed"]) diff --git a/tests/adapter/dbt/tests/adapter/basic/test_snapshot_check_cols.py b/tests/functional/adapter/basic/test_snapshot_check_cols.py similarity index 89% rename from tests/adapter/dbt/tests/adapter/basic/test_snapshot_check_cols.py rename to tests/functional/adapter/basic/test_snapshot_check_cols.py index a315ee16e30..18c2e9f5fd5 100644 --- a/tests/adapter/dbt/tests/adapter/basic/test_snapshot_check_cols.py +++ b/tests/functional/adapter/basic/test_snapshot_check_cols.py @@ -1,11 +1,12 @@ import pytest -from dbt.tests.util import run_dbt, update_rows, relation_from_name -from dbt.tests.adapter.basic.files import ( - seeds_base_csv, - seeds_added_csv, + +from dbt.tests.util import relation_from_name, run_dbt, update_rows +from tests.functional.adapter.basic.files import ( cc_all_snapshot_sql, cc_date_snapshot_sql, cc_name_snapshot_sql, + seeds_added_csv, + seeds_base_csv, ) @@ -35,6 +36,17 @@ def snapshots(self): "cc_name_snapshot.sql": cc_name_snapshot_sql, } + @pytest.fixture(autouse=True) + def clean_up(self, project): + yield + with project.adapter.connection_named("__test"): + relation = project.adapter.Relation.create( + database=project.database, schema=project.test_schema + ) + project.adapter.drop_schema(relation) + + pass + def test_snapshot_check_cols(self, project): # seed command results = run_dbt(["seed"]) diff --git a/tests/adapter/dbt/tests/adapter/basic/test_snapshot_timestamp.py b/tests/functional/adapter/basic/test_snapshot_timestamp.py similarity index 85% rename from tests/adapter/dbt/tests/adapter/basic/test_snapshot_timestamp.py rename to tests/functional/adapter/basic/test_snapshot_timestamp.py index b53332cdd30..7211ce28663 100644 --- a/tests/adapter/dbt/tests/adapter/basic/test_snapshot_timestamp.py +++ b/tests/functional/adapter/basic/test_snapshot_timestamp.py @@ -1,9 +1,10 @@ import pytest -from dbt.tests.util import run_dbt, relation_from_name, update_rows -from dbt.tests.adapter.basic.files import ( + +from dbt.tests.util import relation_from_name, run_dbt, update_rows +from tests.functional.adapter.basic.files import ( + seeds_added_csv, seeds_base_csv, seeds_newcolumns_csv, - seeds_added_csv, ts_snapshot_sql, ) @@ -33,6 +34,17 @@ def snapshots(self): def project_config_update(self): return {"name": "snapshot_strategy_timestamp"} + @pytest.fixture(autouse=True) + def clean_up(self, project): + yield + with project.adapter.connection_named("__test"): + relation = project.adapter.Relation.create( + database=project.database, schema=project.test_schema + ) + project.adapter.drop_schema(relation) + + pass + def test_snapshot_timestamp(self, project): # seed command results = run_dbt(["seed"]) diff --git a/tests/adapter/dbt/tests/adapter/basic/test_table_materialization.py b/tests/functional/adapter/basic/test_table_materialization.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/basic/test_table_materialization.py rename to tests/functional/adapter/basic/test_table_materialization.py index 279152d6985..ec07e4f2c0b 100644 --- a/tests/adapter/dbt/tests/adapter/basic/test_table_materialization.py +++ b/tests/functional/adapter/basic/test_table_materialization.py @@ -1,7 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal - +from dbt.tests.util import check_relations_equal, run_dbt seeds__seed_csv = """id,first_name,last_name,email,gender,ip_address 1,Jack,Hunter,jhunter0@pbs.org,Male,59.80.20.168 diff --git a/tests/adapter/dbt/tests/adapter/basic/test_validate_connection.py b/tests/functional/adapter/basic/test_validate_connection.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/basic/test_validate_connection.py rename to tests/functional/adapter/basic/test_validate_connection.py diff --git a/tests/functional/adapter/caching/__init__.py b/tests/functional/adapter/caching/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/caching/test_caching.py b/tests/functional/adapter/caching/test_caching.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/caching/test_caching.py rename to tests/functional/adapter/caching/test_caching.py diff --git a/tests/functional/adapter/catalog/__init__.py b/tests/functional/adapter/catalog/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional/adapter/catalog/files.py b/tests/functional/adapter/catalog/files.py new file mode 100644 index 00000000000..9c19522e7f9 --- /dev/null +++ b/tests/functional/adapter/catalog/files.py @@ -0,0 +1,33 @@ +MY_SEED = """ +id,value,record_valid_date +1,100,2023-01-01 00:00:00 +2,200,2023-01-02 00:00:00 +3,300,2023-01-02 00:00:00 +""".strip() + + +MY_TABLE = """ +{{ config( + materialized='table', +) }} +select * +from {{ ref('my_seed') }} +""" + + +MY_VIEW = """ +{{ config( + materialized='view', +) }} +select * +from {{ ref('my_seed') }} +""" + + +MY_MATERIALIZED_VIEW = """ +{{ config( + materialized='materialized_view', +) }} +select * +from {{ ref('my_seed') }} +""" diff --git a/tests/functional/adapter/catalog/relation_types.py b/tests/functional/adapter/catalog/relation_types.py new file mode 100644 index 00000000000..a73972b534d --- /dev/null +++ b/tests/functional/adapter/catalog/relation_types.py @@ -0,0 +1,84 @@ +import pytest + +from dbt.artifacts.schemas.catalog import CatalogArtifact +from dbt.tests.util import run_dbt +from tests.functional.adapter.catalog import files + + +class CatalogRelationTypes: + """ + Many adapters can use this test as-is. However, if your adapter contains different + relation types or uses different strings to describe the node (e.g. 'table' instead of 'BASE TABLE'), + then you'll need to configure this test. + + To configure this test, you'll most likely need to update either `models` + and/or `test_relation_types_populate_correctly`. For example, `dbt-snowflake` + supports dynamic tables and does not support materialized views. It's implementation + might look like this: + + class TestCatalogRelationTypes: + @pytest.fixture(scope="class", autouse=True) + def models(self): + yield { + "my_table.sql": files.MY_TABLE, + "my_view.sql": files.MY_VIEW, + "my_dynamic_table.sql": files.MY_DYNAMIC_TABLE, + } + + @pytest.mark.parametrize( + "node_name,relation_type", + [ + ("seed.test.my_seed", "BASE TABLE"), + ("model.test.my_table", "BASE TABLE"), + ("model.test.my_view", "VIEW"), + ("model.test.my_dynamic_table", "DYNAMIC TABLE"), + ], + ) + def test_relation_types_populate_correctly( + self, docs: CatalogArtifact, node_name: str, relation_type: str + ): + super().test_relation_types_populate_correctly( + docs, node_name, relation_type + ) + + Note that we're able to configure the test case using pytest parameterization + and call back to the original test. That way any updates to the test are incorporated + into your adapter. + """ + + @pytest.fixture(scope="class", autouse=True) + def seeds(self): + return {"my_seed.csv": files.MY_SEED} + + @pytest.fixture(scope="class", autouse=True) + def models(self): + yield { + "my_table.sql": files.MY_TABLE, + "my_view.sql": files.MY_VIEW, + "my_materialized_view.sql": files.MY_MATERIALIZED_VIEW, + } + + @pytest.fixture(scope="class", autouse=True) + def docs(self, project): + run_dbt(["seed"]) + run_dbt(["run"]) + yield run_dbt(["docs", "generate"]) + + @pytest.mark.parametrize( + "node_name,relation_type", + [ + ("seed.test.my_seed", "BASE TABLE"), + ("model.test.my_table", "BASE TABLE"), + ("model.test.my_view", "VIEW"), + ("model.test.my_materialized_view", "MATERIALIZED VIEW"), + ], + ) + def test_relation_types_populate_correctly( + self, docs: CatalogArtifact, node_name: str, relation_type: str + ): + """ + This test addresses: https://github.com/dbt-labs/dbt-core/issues/8864 + """ + assert node_name in docs.nodes + node = docs.nodes[node_name] + assert node.metadata.type == relation_type diff --git a/tests/functional/adapter/column_types/__init__.py b/tests/functional/adapter/column_types/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/column_types/fixtures.py b/tests/functional/adapter/column_types/fixtures.py similarity index 99% rename from tests/adapter/dbt/tests/adapter/column_types/fixtures.py rename to tests/functional/adapter/column_types/fixtures.py index 97a61c2b6f5..64494cee2c7 100644 --- a/tests/adapter/dbt/tests/adapter/column_types/fixtures.py +++ b/tests/functional/adapter/column_types/fixtures.py @@ -99,7 +99,7 @@ version: 2 models: - name: model - tests: + data_tests: - is_type: column_map: smallint_col: ['integer', 'number'] diff --git a/tests/adapter/dbt/tests/adapter/column_types/test_column_types.py b/tests/functional/adapter/column_types/test_column_types.py similarity index 83% rename from tests/adapter/dbt/tests/adapter/column_types/test_column_types.py rename to tests/functional/adapter/column_types/test_column_types.py index cc213d36a4b..7e028b33c5b 100644 --- a/tests/adapter/dbt/tests/adapter/column_types/test_column_types.py +++ b/tests/functional/adapter/column_types/test_column_types.py @@ -1,6 +1,11 @@ import pytest + from dbt.tests.util import run_dbt -from dbt.tests.adapter.column_types.fixtures import macro_test_is_type_sql, model_sql, schema_yml +from tests.functional.adapter.column_types.fixtures import ( + macro_test_is_type_sql, + model_sql, + schema_yml, +) class BaseColumnTypes: diff --git a/tests/functional/adapter/concurrency/__init__.py b/tests/functional/adapter/concurrency/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/concurrency/test_concurrency.py b/tests/functional/adapter/concurrency/test_concurrency.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/concurrency/test_concurrency.py rename to tests/functional/adapter/concurrency/test_concurrency.py index 898deafa587..65932f95ea7 100644 --- a/tests/adapter/dbt/tests/adapter/concurrency/test_concurrency.py +++ b/tests/functional/adapter/concurrency/test_concurrency.py @@ -1,4 +1,5 @@ import pytest + from dbt.tests.util import ( check_relations_equal, check_table_does_not_exist, @@ -8,7 +9,6 @@ write_file, ) - models__invalid_sql = """ {{ config( diff --git a/tests/functional/adapter/constraints/__init__.py b/tests/functional/adapter/constraints/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/constraints/fixtures.py b/tests/functional/adapter/constraints/fixtures.py similarity index 89% rename from tests/adapter/dbt/tests/adapter/constraints/fixtures.py rename to tests/functional/adapter/constraints/fixtures.py index c8c98b26bd5..cfbd5379683 100644 --- a/tests/adapter/dbt/tests/adapter/constraints/fixtures.py +++ b/tests/functional/adapter/constraints/fixtures.py @@ -279,7 +279,7 @@ expression: (id > 0) - type: check expression: id >= 1 - tests: + data_tests: - unique - name: color data_type: text @@ -298,7 +298,7 @@ - type: primary_key - type: check expression: (id > 0) - tests: + data_tests: - unique - name: color data_type: text @@ -317,7 +317,7 @@ - type: primary_key - type: check expression: (id > 0) - tests: + data_tests: - unique - name: color data_type: text @@ -336,7 +336,7 @@ - type: primary_key - type: check expression: (id > 0) - tests: + data_tests: - unique - name: color data_type: text @@ -365,7 +365,7 @@ - type: foreign_key expression: {schema}.foreign_key_model (id) - type: unique - tests: + data_tests: - unique - name: color data_type: text @@ -384,7 +384,7 @@ - type: primary_key - type: check expression: (id > 0) - tests: + data_tests: - unique - name: color data_type: text @@ -403,7 +403,7 @@ - type: primary_key - type: check expression: (id > 0) - tests: + data_tests: - unique - name: color data_type: text @@ -422,7 +422,7 @@ - type: primary_key - type: check expression: (id > 0) - tests: + data_tests: - unique - name: color data_type: text @@ -466,7 +466,7 @@ description: hello constraints: - type: not_null - tests: + data_tests: - unique - name: color data_type: text @@ -517,7 +517,7 @@ description: hello constraints: - type: not_null - tests: + data_tests: - unique - name: from # reserved word quote: true @@ -539,3 +539,47 @@ - name: column_name data_type: text """ + +create_table_macro_sql = """ +{% macro create_table_macro() %} +create table if not exists numbers (n int not null primary key) +{% endmacro %} +""" + +incremental_foreign_key_schema_yml = """ +version: 2 + +models: + - name: raw_numbers + config: + contract: + enforced: true + materialized: table + columns: + - name: n + data_type: integer + constraints: + - type: primary_key + - type: not_null + - name: stg_numbers + config: + contract: + enforced: true + materialized: incremental + on_schema_change: append_new_columns + unique_key: n + columns: + - name: n + data_type: integer + constraints: + - type: foreign_key + expression: {schema}.raw_numbers (n) +""" + +incremental_foreign_key_model_raw_numbers_sql = """ +select 1 as n +""" + +incremental_foreign_key_model_stg_numbers_sql = """ +select * from {{ ref('raw_numbers') }} +""" diff --git a/tests/adapter/dbt/tests/adapter/constraints/test_constraints.py b/tests/functional/adapter/constraints/test_constraints.py similarity index 93% rename from tests/adapter/dbt/tests/adapter/constraints/test_constraints.py rename to tests/functional/adapter/constraints/test_constraints.py index 188c3793032..601c88f05a3 100644 --- a/tests/adapter/dbt/tests/adapter/constraints/test_constraints.py +++ b/tests/functional/adapter/constraints/test_constraints.py @@ -1,39 +1,43 @@ -import pytest import re +import pytest + from dbt.tests.util import ( - run_dbt, get_manifest, - run_dbt_and_capture, - write_file, read_file, relation_from_name, + run_dbt, + run_dbt_and_capture, + write_file, ) - -from dbt.tests.adapter.constraints.fixtures import ( - my_model_sql, +from tests.functional.adapter.constraints.fixtures import ( + constrained_model_schema_yml, + create_table_macro_sql, + foreign_key_model_sql, + incremental_foreign_key_model_raw_numbers_sql, + incremental_foreign_key_model_stg_numbers_sql, + incremental_foreign_key_schema_yml, + model_contract_header_schema_yml, + model_data_type_schema_yml, + model_fk_constraint_schema_yml, + model_quoted_column_schema_yml, + model_schema_yml, my_incremental_model_sql, - my_model_wrong_order_sql, - my_model_wrong_name_sql, + my_model_contract_sql_header_sql, my_model_data_type_sql, - model_data_type_schema_yml, - my_model_view_wrong_order_sql, - my_model_view_wrong_name_sql, - my_model_incremental_wrong_order_sql, + my_model_incremental_contract_sql_header_sql, + my_model_incremental_with_nulls_sql, my_model_incremental_wrong_name_sql, + my_model_incremental_wrong_order_depends_on_fk_sql, + my_model_incremental_wrong_order_sql, + my_model_sql, + my_model_view_wrong_name_sql, + my_model_view_wrong_order_sql, my_model_with_nulls_sql, - my_model_incremental_with_nulls_sql, my_model_with_quoted_column_name_sql, - model_schema_yml, - model_fk_constraint_schema_yml, - constrained_model_schema_yml, - model_quoted_column_schema_yml, - foreign_key_model_sql, + my_model_wrong_name_sql, my_model_wrong_order_depends_on_fk_sql, - my_model_incremental_wrong_order_depends_on_fk_sql, - my_model_contract_sql_header_sql, - my_model_incremental_contract_sql_header_sql, - model_contract_header_schema_yml, + my_model_wrong_order_sql, ) @@ -100,7 +104,7 @@ def test__constraints_wrong_column_names(self, project, string_type, int_type): def test__constraints_wrong_column_data_types( self, project, string_type, int_type, schema_string_type, schema_int_type, data_types ): - for (sql_column_value, schema_data_type, error_data_type) in data_types: + for sql_column_value, schema_data_type, error_data_type in data_types: # Write parametrized data_type to sql file write_file( my_model_data_type_sql.format(sql_value=sql_column_value), @@ -142,7 +146,7 @@ def test__constraints_wrong_column_data_types( assert all([(exp in log_output or exp.upper() in log_output) for exp in expected]) def test__constraints_correct_column_data_types(self, project, data_types): - for (sql_column_value, schema_data_type, _) in data_types: + for sql_column_value, schema_data_type, _ in data_types: # Write parametrized data_type to sql file write_file( my_model_data_type_sql.format(sql_value=sql_column_value), @@ -530,3 +534,31 @@ def expected_sql(self): class TestConstraintQuotedColumn(BaseConstraintQuotedColumn): pass + + +class TestIncrementalForeignKeyConstraint: + @pytest.fixture(scope="class") + def macros(self): + return { + "create_table.sql": create_table_macro_sql, + } + + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": incremental_foreign_key_schema_yml, + "raw_numbers.sql": incremental_foreign_key_model_raw_numbers_sql, + "stg_numbers.sql": incremental_foreign_key_model_stg_numbers_sql, + } + + def test_incremental_foreign_key_constraint(self, project): + unformatted_constraint_schema_yml = read_file("models", "schema.yml") + write_file( + unformatted_constraint_schema_yml.format(schema=project.test_schema), + "models", + "schema.yml", + ) + + run_dbt(["run", "--select", "raw_numbers"]) + run_dbt(["run", "--select", "stg_numbers"]) + run_dbt(["run", "--select", "stg_numbers"]) diff --git a/tests/functional/adapter/dbt_clone/__init__.py b/tests/functional/adapter/dbt_clone/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/dbt_clone/fixtures.py b/tests/functional/adapter/dbt_clone/fixtures.py similarity index 99% rename from tests/adapter/dbt/tests/adapter/dbt_clone/fixtures.py rename to tests/functional/adapter/dbt_clone/fixtures.py index aa1d9bf80b2..cb23d2322a0 100644 --- a/tests/adapter/dbt/tests/adapter/dbt_clone/fixtures.py +++ b/tests/functional/adapter/dbt_clone/fixtures.py @@ -40,7 +40,7 @@ - name: view_model columns: - name: id - tests: + data_tests: - unique: severity: error - not_null diff --git a/tests/adapter/dbt/tests/adapter/dbt_clone/test_dbt_clone.py b/tests/functional/adapter/dbt_clone/test_dbt_clone.py similarity index 91% rename from tests/adapter/dbt/tests/adapter/dbt_clone/test_dbt_clone.py rename to tests/functional/adapter/dbt_clone/test_dbt_clone.py index a7d1c6cd400..9e5cb40a926 100644 --- a/tests/adapter/dbt/tests/adapter/dbt_clone/test_dbt_clone.py +++ b/tests/functional/adapter/dbt_clone/test_dbt_clone.py @@ -1,22 +1,24 @@ -import pytest import os import shutil -from copy import deepcopy from collections import Counter +from copy import deepcopy + +import pytest + from dbt.exceptions import DbtRuntimeError -from dbt.tests.util import run_dbt -from dbt.tests.adapter.dbt_clone.fixtures import ( - seed_csv, - table_model_sql, - view_model_sql, +from dbt.tests.util import run_dbt, run_dbt_and_capture +from tests.functional.adapter.dbt_clone.fixtures import ( + custom_can_clone_tables_false_macros_sql, ephemeral_model_sql, exposures_yml, - schema_yml, - snapshot_sql, get_schema_name_sql, - macros_sql, infinite_macros_sql, - custom_can_clone_tables_false_macros_sql, + macros_sql, + schema_yml, + seed_csv, + snapshot_sql, + table_model_sql, + view_model_sql, ) @@ -83,17 +85,14 @@ def copy_state(self, project_root): def run_and_save_state(self, project_root, with_snapshot=False): results = run_dbt(["seed"]) assert len(results) == 1 - assert not any(r.node.deferred for r in results) results = run_dbt(["run"]) assert len(results) == 2 - assert not any(r.node.deferred for r in results) results = run_dbt(["test"]) assert len(results) == 2 if with_snapshot: results = run_dbt(["snapshot"]) assert len(results) == 1 - assert not any(r.node.deferred for r in results) # copy files self.copy_state(project_root) @@ -215,3 +214,19 @@ def clean_up(self, project): project.adapter.drop_schema(relation) pass + + +class TestCloneSameTargetAndState(BaseClone): + def test_clone_same_target_and_state(self, project, unique_schema, other_schema): + project.create_test_schema(other_schema) + self.run_and_save_state(project.project_root) + + clone_args = [ + "clone", + "--defer", + "--state", + "target", + ] + + results, output = run_dbt_and_capture(clone_args, expect_pass=False) + assert "Warning: The state and target directories are the same: 'target'" in output diff --git a/tests/functional/adapter/dbt_debug/__init__.py b/tests/functional/adapter/dbt_debug/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/dbt_debug/test_dbt_debug.py b/tests/functional/adapter/dbt_debug/test_dbt_debug.py similarity index 99% rename from tests/adapter/dbt/tests/adapter/dbt_debug/test_dbt_debug.py rename to tests/functional/adapter/dbt_debug/test_dbt_debug.py index 3ad39e9c2ab..206c7598258 100644 --- a/tests/adapter/dbt/tests/adapter/dbt_debug/test_dbt_debug.py +++ b/tests/functional/adapter/dbt_debug/test_dbt_debug.py @@ -1,6 +1,7 @@ -import pytest import os import re + +import pytest import yaml from dbt.cli.exceptions import DbtUsageException diff --git a/tests/functional/adapter/dbt_show/__init__.py b/tests/functional/adapter/dbt_show/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional/adapter/dbt_show/fixtures.py b/tests/functional/adapter/dbt_show/fixtures.py new file mode 100644 index 00000000000..c5480ba16d2 --- /dev/null +++ b/tests/functional/adapter/dbt_show/fixtures.py @@ -0,0 +1,36 @@ +models__sql_header = """ +{% call set_sql_header(config) %} +with _variables as ( + select 1 as my_variable +) +{%- endcall %} +select my_variable from _variables +""" + +models__ephemeral_model = """ +{{ config(materialized = 'ephemeral') }} +select + coalesce(sample_num, 0) + 10 as col_deci +from {{ ref('sample_model') }} +""" + +models__second_ephemeral_model = """ +{{ config(materialized = 'ephemeral') }} +select + col_deci + 100 as col_hundo +from {{ ref('ephemeral_model') }} +""" + +models__sample_model = """ +select * from {{ ref('sample_seed') }} +""" + +seeds__sample_seed = """sample_num,sample_bool +1,true +2,false +3,true +4,false +5,true +6,false +7,true +""" diff --git a/tests/functional/adapter/dbt_show/test_dbt_show.py b/tests/functional/adapter/dbt_show/test_dbt_show.py new file mode 100644 index 00000000000..08a494e031a --- /dev/null +++ b/tests/functional/adapter/dbt_show/test_dbt_show.py @@ -0,0 +1,61 @@ +import pytest + +from dbt.tests.util import run_dbt +from tests.functional.adapter.dbt_show.fixtures import ( + models__ephemeral_model, + models__sample_model, + models__second_ephemeral_model, + models__sql_header, + seeds__sample_seed, +) + + +# -- Below we define base classes for tests you import based on if your adapter supports dbt show or not -- +class BaseShowLimit: + @pytest.fixture(scope="class") + def models(self): + return { + "sample_model.sql": models__sample_model, + "ephemeral_model.sql": models__ephemeral_model, + } + + @pytest.fixture(scope="class") + def seeds(self): + return {"sample_seed.csv": seeds__sample_seed} + + @pytest.mark.parametrize( + "args,expected", + [ + ([], 5), # default limit + (["--limit", 3], 3), # fetch 3 rows + (["--limit", -1], 7), # fetch all rows + ], + ) + def test_limit(self, project, args, expected): + run_dbt(["build"]) + dbt_args = ["show", "--inline", models__second_ephemeral_model, *args] + results = run_dbt(dbt_args) + assert len(results.results[0].agate_table) == expected + # ensure limit was injected in compiled_code when limit specified in command args + limit = results.args.get("limit") + if limit > 0: + assert f"limit {limit}" in results.results[0].node.compiled_code + + +class BaseShowSqlHeader: + @pytest.fixture(scope="class") + def models(self): + return { + "sql_header.sql": models__sql_header, + } + + def test_sql_header(self, project): + run_dbt(["show", "--select", "sql_header", "--vars", "timezone: Asia/Kolkata"]) + + +class TestPostgresShowSqlHeader(BaseShowSqlHeader): + pass + + +class TestPostgresShowLimit(BaseShowLimit): + pass diff --git a/tests/functional/adapter/ephemeral/__init__.py b/tests/functional/adapter/ephemeral/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/ephemeral/test_ephemeral.py b/tests/functional/adapter/ephemeral/test_ephemeral.py similarity index 99% rename from tests/adapter/dbt/tests/adapter/ephemeral/test_ephemeral.py rename to tests/functional/adapter/ephemeral/test_ephemeral.py index 22491f3b45d..665a12399a2 100644 --- a/tests/adapter/dbt/tests/adapter/ephemeral/test_ephemeral.py +++ b/tests/functional/adapter/ephemeral/test_ephemeral.py @@ -1,8 +1,9 @@ -import pytest -import re import os -from dbt.tests.util import run_dbt, check_relations_equal +import re + +import pytest +from dbt.tests.util import check_relations_equal, run_dbt models__dependent_sql = """ diff --git a/tests/functional/adapter/hooks/__init__.py b/tests/functional/adapter/hooks/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/hooks/data/seed_model.sql b/tests/functional/adapter/hooks/data/seed_model.sql similarity index 100% rename from tests/adapter/dbt/tests/adapter/hooks/data/seed_model.sql rename to tests/functional/adapter/hooks/data/seed_model.sql diff --git a/tests/adapter/dbt/tests/adapter/hooks/data/seed_run.sql b/tests/functional/adapter/hooks/data/seed_run.sql similarity index 100% rename from tests/adapter/dbt/tests/adapter/hooks/data/seed_run.sql rename to tests/functional/adapter/hooks/data/seed_run.sql diff --git a/tests/adapter/dbt/tests/adapter/hooks/fixtures.py b/tests/functional/adapter/hooks/fixtures.py similarity index 92% rename from tests/adapter/dbt/tests/adapter/hooks/fixtures.py rename to tests/functional/adapter/hooks/fixtures.py index 1fd3962614e..a7c99a02bc3 100644 --- a/tests/adapter/dbt/tests/adapter/hooks/fixtures.py +++ b/tests/functional/adapter/hooks/fixtures.py @@ -27,9 +27,9 @@ """ macros__before_and_after = """ -{% macro custom_run_hook(state, target, run_started_at, invocation_id) %} +{% macro custom_run_hook(state, target, run_started_at, invocation_id, table_name="on_run_hook") %} - insert into {{ target.schema }}.on_run_hook ( + insert into {{ target.schema }}.{{ table_name }} ( test_state, target_dbname, target_host, @@ -341,7 +341,7 @@ - name: example_seed columns: - name: new_col - tests: + data_tests: - not_null """ @@ -351,10 +351,30 @@ - name: example_snapshot columns: - name: new_col - tests: + data_tests: - not_null """ +properties__model_hooks = """ +version: 2 +models: + - name: hooks + config: + pre_hook: "{{ custom_run_hook('start', target, run_started_at, invocation_id, table_name='on_model_hook') }}" + post_hook: "{{ custom_run_hook('end', target, run_started_at, invocation_id, table_name='on_model_hook') }}" +""" + +properties__model_hooks_list = """ +version: 2 +models: + - name: hooks + config: + pre_hook: + - "{{ custom_run_hook('start', target, run_started_at, invocation_id, table_name='on_model_hook') }}" + post_hook: + - "{{ custom_run_hook('end', target, run_started_at, invocation_id, table_name='on_model_hook') }}" +""" + seeds__example_seed_csv = """a,b,c 1,2,3 4,5,6 diff --git a/tests/adapter/dbt/tests/adapter/hooks/test_model_hooks.py b/tests/functional/adapter/hooks/test_model_hooks.py similarity index 93% rename from tests/adapter/dbt/tests/adapter/hooks/test_model_hooks.py rename to tests/functional/adapter/hooks/test_model_hooks.py index 73995be3b33..ff397993f3e 100644 --- a/tests/adapter/dbt/tests/adapter/hooks/test_model_hooks.py +++ b/tests/functional/adapter/hooks/test_model_hooks.py @@ -1,15 +1,12 @@ -import pytest - from pathlib import Path -from dbt.exceptions import CompilationError, ParsingError - -from dbt.tests.util import ( - run_dbt, - write_file, -) +import pytest -from dbt.tests.adapter.hooks.fixtures import ( +from dbt.exceptions import ParsingError +from dbt.tests.util import run_dbt, write_file +from dbt_common.exceptions import CompilationError +from tests.functional.adapter.hooks.fixtures import ( + macros__before_and_after, models__hooked, models__hooks, models__hooks_configured, @@ -17,6 +14,8 @@ models__hooks_kwargs, models__post, models__pre, + properties__model_hooks, + properties__model_hooks_list, properties__seed_models, properties__test_snapshot_models, seeds__example_seed_csv, @@ -261,6 +260,27 @@ def test_hooks_on_seeds(self, project): assert len(res) == 1, "Expected exactly one item" +class TestPrePostModelHooksWithMacros(BaseTestPrePost): + @pytest.fixture(scope="class") + def macros(self): + return {"before-and-after.sql": macros__before_and_after} + + @pytest.fixture(scope="class") + def models(self): + return {"schema.yml": properties__model_hooks, "hooks.sql": models__hooks} + + def test_pre_and_post_run_hooks(self, project, dbt_profile_target): + run_dbt() + self.check_hooks("start", project, dbt_profile_target.get("host", None)) + self.check_hooks("end", project, dbt_profile_target.get("host", None)) + + +class TestPrePostModelHooksListWithMacros(TestPrePostModelHooksWithMacros): + @pytest.fixture(scope="class") + def models(self): + return {"schema.yml": properties__model_hooks_list, "hooks.sql": models__hooks} + + class TestHooksRefsOnSeeds: """ This should not succeed, and raise an explicit error diff --git a/tests/adapter/dbt/tests/adapter/hooks/test_run_hooks.py b/tests/functional/adapter/hooks/test_run_hooks.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/hooks/test_run_hooks.py rename to tests/functional/adapter/hooks/test_run_hooks.py index e508d152450..f8bec5c6aeb 100644 --- a/tests/adapter/dbt/tests/adapter/hooks/test_run_hooks.py +++ b/tests/functional/adapter/hooks/test_run_hooks.py @@ -1,20 +1,16 @@ import os -import pytest - from pathlib import Path -from dbt.tests.adapter.hooks.fixtures import ( - macros__hook, +import pytest + +from dbt.tests.util import check_table_does_not_exist, run_dbt +from tests.functional.adapter.hooks.fixtures import ( macros__before_and_after, - models__hooks, - seeds__example_seed_csv, + macros__hook, macros_missing_column, + models__hooks, models__missing_column, -) - -from dbt.tests.util import ( - check_table_does_not_exist, - run_dbt, + seeds__example_seed_csv, ) diff --git a/tests/functional/adapter/incremental/__init__.py b/tests/functional/adapter/incremental/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/incremental/fixtures.py b/tests/functional/adapter/incremental/fixtures.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/incremental/fixtures.py rename to tests/functional/adapter/incremental/fixtures.py diff --git a/tests/adapter/dbt/tests/adapter/incremental/test_incremental_merge_exclude_columns.py b/tests/functional/adapter/incremental/test_incremental_merge_exclude_columns.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/incremental/test_incremental_merge_exclude_columns.py rename to tests/functional/adapter/incremental/test_incremental_merge_exclude_columns.py index c283ca9f36c..79e9100adcf 100644 --- a/tests/adapter/dbt/tests/adapter/incremental/test_incremental_merge_exclude_columns.py +++ b/tests/functional/adapter/incremental/test_incremental_merge_exclude_columns.py @@ -1,7 +1,8 @@ -import pytest -from dbt.tests.util import run_dbt, check_relations_equal from collections import namedtuple +import pytest + +from dbt.tests.util import check_relations_equal, run_dbt models__merge_exclude_columns_sql = """ {{ config( diff --git a/tests/adapter/dbt/tests/adapter/incremental/test_incremental_on_schema_change.py b/tests/functional/adapter/incremental/test_incremental_on_schema_change.py similarity index 97% rename from tests/adapter/dbt/tests/adapter/incremental/test_incremental_on_schema_change.py rename to tests/functional/adapter/incremental/test_incremental_on_schema_change.py index 4fbefbe7651..8182e35dd38 100644 --- a/tests/adapter/dbt/tests/adapter/incremental/test_incremental_on_schema_change.py +++ b/tests/functional/adapter/incremental/test_incremental_on_schema_change.py @@ -1,23 +1,19 @@ import pytest -from dbt.tests.util import ( - check_relations_equal, - run_dbt, -) - -from dbt.tests.adapter.incremental.fixtures import ( - _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY, +from dbt.tests.util import check_relations_equal, run_dbt +from tests.functional.adapter.incremental.fixtures import ( + _MODELS__A, + _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS, + _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE, + _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE_TARGET, + _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_TARGET, + _MODELS__INCREMENTAL_FAIL, _MODELS__INCREMENTAL_IGNORE, - _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY_TARGET, _MODELS__INCREMENTAL_IGNORE_TARGET, - _MODELS__INCREMENTAL_FAIL, _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS, - _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE, - _MODELS__A, - _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_TARGET, - _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS, _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS_TARGET, - _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE_TARGET, + _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY, + _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY_TARGET, ) diff --git a/tests/adapter/dbt/tests/adapter/incremental/test_incremental_predicates.py b/tests/functional/adapter/incremental/test_incremental_predicates.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/incremental/test_incremental_predicates.py rename to tests/functional/adapter/incremental/test_incremental_predicates.py index 2060e9eb6d4..f6eaf67fb17 100644 --- a/tests/adapter/dbt/tests/adapter/incremental/test_incremental_predicates.py +++ b/tests/functional/adapter/incremental/test_incremental_predicates.py @@ -1,7 +1,8 @@ -import pytest -from dbt.tests.util import run_dbt, check_relations_equal from collections import namedtuple +import pytest + +from dbt.tests.util import check_relations_equal, run_dbt models__delete_insert_incremental_predicates_sql = """ {{ config( diff --git a/tests/adapter/dbt/tests/adapter/incremental/test_incremental_unique_id.py b/tests/functional/adapter/incremental/test_incremental_unique_id.py similarity index 97% rename from tests/adapter/dbt/tests/adapter/incremental/test_incremental_unique_id.py rename to tests/functional/adapter/incremental/test_incremental_unique_id.py index 0900479735b..194d35db56f 100644 --- a/tests/adapter/dbt/tests/adapter/incremental/test_incremental_unique_id.py +++ b/tests/functional/adapter/incremental/test_incremental_unique_id.py @@ -1,9 +1,11 @@ -import pytest -from dbt.tests.util import run_dbt, check_relations_equal -from dbt.contracts.results import RunStatus from collections import namedtuple from pathlib import Path +import pytest + +from dbt.artifacts.schemas.results import RunStatus +from dbt.tests.util import check_relations_equal, run_dbt + models__trinary_unique_key_list_sql = """ -- a multi-argument unique key list should see overwriting on rows in the model -- where all unique key fields apply @@ -344,6 +346,17 @@ def seeds(self): "add_new_rows.sql": seeds__add_new_rows_sql, } + @pytest.fixture(autouse=True) + def clean_up(self, project): + yield + with project.adapter.connection_named("__test"): + relation = project.adapter.Relation.create( + database=project.database, schema=project.test_schema + ) + project.adapter.drop_schema(relation) + + pass + def update_incremental_model(self, incremental_model): """update incremental model after the seed table has been updated""" model_result_set = run_dbt(["run", "--select", incremental_model]) diff --git a/tests/functional/adapter/materialized_view/__init__.py b/tests/functional/adapter/materialized_view/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/materialized_view/basic.py b/tests/functional/adapter/materialized_view/basic.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/materialized_view/basic.py rename to tests/functional/adapter/materialized_view/basic.py index ec90d503650..1d82977ae64 100644 --- a/tests/adapter/dbt/tests/adapter/materialized_view/basic.py +++ b/tests/functional/adapter/materialized_view/basic.py @@ -3,7 +3,7 @@ import pytest from dbt.adapters.base.relation import BaseRelation -from dbt.contracts.relation import RelationType +from dbt.adapters.contracts.relation import RelationType from dbt.tests.util import ( assert_message_in_logs, get_model_file, @@ -11,8 +11,7 @@ run_dbt_and_capture, set_model_file, ) - -from dbt.tests.adapter.materialized_view.files import ( +from tests.functional.adapter.materialized_view.files import ( MY_MATERIALIZED_VIEW, MY_SEED, MY_TABLE, diff --git a/tests/adapter/dbt/tests/adapter/materialized_view/changes.py b/tests/functional/adapter/materialized_view/changes.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/materialized_view/changes.py rename to tests/functional/adapter/materialized_view/changes.py index 5fc933fbe0d..7005a5e0b26 100644 --- a/tests/adapter/dbt/tests/adapter/materialized_view/changes.py +++ b/tests/functional/adapter/materialized_view/changes.py @@ -3,8 +3,7 @@ import pytest from dbt.adapters.base.relation import BaseRelation -from dbt.contracts.graph.model_config import OnConfigurationChangeOption -from dbt.contracts.relation import RelationType +from dbt.adapters.contracts.relation import RelationType from dbt.tests.util import ( assert_message_in_logs, get_model_file, @@ -12,8 +11,8 @@ run_dbt_and_capture, set_model_file, ) - -from dbt.tests.adapter.materialized_view.files import ( +from dbt_common.contracts.config.materialization import OnConfigurationChangeOption +from tests.functional.adapter.materialized_view.files import ( MY_MATERIALIZED_VIEW, MY_SEED, ) diff --git a/tests/adapter/dbt/tests/adapter/materialized_view/files.py b/tests/functional/adapter/materialized_view/files.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/materialized_view/files.py rename to tests/functional/adapter/materialized_view/files.py diff --git a/tests/functional/adapter/persist_docs/__init__.py b/tests/functional/adapter/persist_docs/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/persist_docs/fixtures.py b/tests/functional/adapter/persist_docs/fixtures.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/persist_docs/fixtures.py rename to tests/functional/adapter/persist_docs/fixtures.py diff --git a/tests/adapter/dbt/tests/adapter/persist_docs/test_persist_docs.py b/tests/functional/adapter/persist_docs/test_persist_docs.py similarity index 99% rename from tests/adapter/dbt/tests/adapter/persist_docs/test_persist_docs.py rename to tests/functional/adapter/persist_docs/test_persist_docs.py index 99c0ef746f9..3f4521e69e5 100644 --- a/tests/adapter/dbt/tests/adapter/persist_docs/test_persist_docs.py +++ b/tests/functional/adapter/persist_docs/test_persist_docs.py @@ -1,18 +1,18 @@ import json import os + import pytest from dbt.tests.util import run_dbt - -from dbt.tests.adapter.persist_docs.fixtures import ( +from tests.functional.adapter.persist_docs.fixtures import ( _DOCS__MY_FUN_DOCS, _MODELS__MISSING_COLUMN, _MODELS__MODEL_USING_QUOTE_UTIL, _MODELS__NO_DOCS_MODEL, _MODELS__TABLE, _MODELS__VIEW, - _PROPERTIES__QUOTE_MODEL, _PROPERITES__SCHEMA_MISSING_COL, + _PROPERTIES__QUOTE_MODEL, _PROPERTIES__SCHEMA_YML, _SEEDS__SEED, ) diff --git a/tests/functional/adapter/python_model/__init__.py b/tests/functional/adapter/python_model/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/python_model/test_python_model.py b/tests/functional/adapter/python_model/test_python_model.py similarity index 99% rename from tests/adapter/dbt/tests/adapter/python_model/test_python_model.py rename to tests/functional/adapter/python_model/test_python_model.py index 259895abde9..bba537bb2fb 100644 --- a/tests/adapter/dbt/tests/adapter/python_model/test_python_model.py +++ b/tests/functional/adapter/python_model/test_python_model.py @@ -1,6 +1,8 @@ -import pytest import os + +import pytest import yaml + from dbt.tests.util import run_dbt basic_sql = """ diff --git a/tests/adapter/dbt/tests/adapter/python_model/test_spark.py b/tests/functional/adapter/python_model/test_spark.py similarity index 99% rename from tests/adapter/dbt/tests/adapter/python_model/test_spark.py rename to tests/functional/adapter/python_model/test_spark.py index 7e105a957d3..2c5148e84fd 100644 --- a/tests/adapter/dbt/tests/adapter/python_model/test_spark.py +++ b/tests/functional/adapter/python_model/test_spark.py @@ -1,4 +1,5 @@ import pytest + from dbt.tests.util import run_dbt PANDAS_MODEL = """ diff --git a/tests/functional/adapter/query_comment/__init__.py b/tests/functional/adapter/query_comment/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/query_comment/fixtures.py b/tests/functional/adapter/query_comment/fixtures.py similarity index 95% rename from tests/adapter/dbt/tests/adapter/query_comment/fixtures.py rename to tests/functional/adapter/query_comment/fixtures.py index d8848dc089e..ccaf329209c 100644 --- a/tests/adapter/dbt/tests/adapter/query_comment/fixtures.py +++ b/tests/functional/adapter/query_comment/fixtures.py @@ -10,7 +10,6 @@ {%- set comment_dict = dict( app='dbt++', macro_version='0.1.0', - dbt_version=dbt_version, message='blah: '~ message) -%} {{ return(comment_dict) }} {%- endmacro -%} diff --git a/tests/adapter/dbt/tests/adapter/query_comment/test_query_comment.py b/tests/functional/adapter/query_comment/test_query_comment.py similarity index 95% rename from tests/adapter/dbt/tests/adapter/query_comment/test_query_comment.py rename to tests/functional/adapter/query_comment/test_query_comment.py index 16d51b35e75..32dba5a1695 100644 --- a/tests/adapter/dbt/tests/adapter/query_comment/test_query_comment.py +++ b/tests/functional/adapter/query_comment/test_query_comment.py @@ -1,9 +1,13 @@ -import pytest import json + +import pytest + from dbt.exceptions import DbtRuntimeError -from dbt.version import __version__ as dbt_version from dbt.tests.util import run_dbt_and_capture -from dbt.tests.adapter.query_comment.fixtures import MACROS__MACRO_SQL, MODELS__X_SQL +from tests.functional.adapter.query_comment.fixtures import ( + MACROS__MACRO_SQL, + MODELS__X_SQL, +) class BaseDefaultQueryComments: @@ -59,7 +63,6 @@ def test_matches_comment(self, project) -> bool: logs = self.run_get_json() expected_dct = { "app": "dbt++", - "dbt_version": dbt_version, "macro_version": "0.1.0", "message": f"blah: {project.adapter.config.target_name}", } diff --git a/tests/functional/adapter/relations/__init__.py b/tests/functional/adapter/relations/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/relations/test_changing_relation_type.py b/tests/functional/adapter/relations/test_changing_relation_type.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/relations/test_changing_relation_type.py rename to tests/functional/adapter/relations/test_changing_relation_type.py index 2eeb5aea64d..a9b719f7d92 100644 --- a/tests/adapter/dbt/tests/adapter/relations/test_changing_relation_type.py +++ b/tests/functional/adapter/relations/test_changing_relation_type.py @@ -1,9 +1,9 @@ from typing import List, Optional + import pytest from dbt.tests.util import run_dbt - _DEFAULT_CHANGE_RELATION_TYPE_MODEL = """ {{ config(materialized=var('materialized')) }} diff --git a/tests/functional/adapter/relations/test_dropping_schema_named.py b/tests/functional/adapter/relations/test_dropping_schema_named.py new file mode 100644 index 00000000000..d626ba46992 --- /dev/null +++ b/tests/functional/adapter/relations/test_dropping_schema_named.py @@ -0,0 +1,35 @@ +import pytest + +from dbt.tests.util import get_connection, run_dbt + + +class BaseDropSchemaNamed: + @pytest.fixture(scope="class") + def models(self): + return { + "model_a.sql": "select 1 as id", + } + + def test_dropped_schema_named_drops_expected_schema(self, project): + + results = run_dbt(["run"]) + assert len(results) == 1 + + run_dbt( + [ + "run-operation", + "drop_schema_named", + "--args", + f"{{schema_name: {project.test_schema} }}", + ] + ) + + adapter = project.adapter + with get_connection(adapter): + schemas = adapter.list_schemas(project.database) + + assert project.test_schema not in schemas + + +class TestDropSchemaNamed(BaseDropSchemaNamed): + pass diff --git a/tests/functional/adapter/simple_copy/__init__.py b/tests/functional/adapter/simple_copy/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/simple_copy/fixtures.py b/tests/functional/adapter/simple_copy/fixtures.py similarity index 99% rename from tests/adapter/dbt/tests/adapter/simple_copy/fixtures.py rename to tests/functional/adapter/simple_copy/fixtures.py index b683bb1111c..8576f7d40c2 100644 --- a/tests/adapter/dbt/tests/adapter/simple_copy/fixtures.py +++ b/tests/functional/adapter/simple_copy/fixtures.py @@ -4,7 +4,7 @@ - name: disabled columns: - name: id - tests: + data_tests: - unique """ diff --git a/tests/adapter/dbt/tests/adapter/simple_copy/test_copy_uppercase.py b/tests/functional/adapter/simple_copy/test_copy_uppercase.py similarity index 94% rename from tests/adapter/dbt/tests/adapter/simple_copy/test_copy_uppercase.py rename to tests/functional/adapter/simple_copy/test_copy_uppercase.py index 92716e619d7..931f485924f 100644 --- a/tests/adapter/dbt/tests/adapter/simple_copy/test_copy_uppercase.py +++ b/tests/functional/adapter/simple_copy/test_copy_uppercase.py @@ -1,18 +1,18 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal -from dbt.tests.adapter.simple_copy.fixtures import ( - _PROPERTIES__SCHEMA_YML, - _SEEDS__SEED_INITIAL, +from dbt.tests.util import check_relations_equal, run_dbt +from tests.functional.adapter.simple_copy.fixtures import ( _MODELS__ADVANCED_INCREMENTAL, _MODELS__COMPOUND_SORT, _MODELS__DISABLED, _MODELS__EMPTY, - _MODELS_GET_AND_REF_UPPERCASE, _MODELS__INCREMENTAL, _MODELS__INTERLEAVED_SORT, _MODELS__MATERIALIZED, _MODELS__VIEW_MODEL, + _MODELS_GET_AND_REF_UPPERCASE, + _PROPERTIES__SCHEMA_YML, + _SEEDS__SEED_INITIAL, ) diff --git a/tests/adapter/dbt/tests/adapter/simple_copy/test_simple_copy.py b/tests/functional/adapter/simple_copy/test_simple_copy.py similarity index 96% rename from tests/adapter/dbt/tests/adapter/simple_copy/test_simple_copy.py rename to tests/functional/adapter/simple_copy/test_simple_copy.py index 0e436417873..c86e7f4b01a 100644 --- a/tests/adapter/dbt/tests/adapter/simple_copy/test_simple_copy.py +++ b/tests/functional/adapter/simple_copy/test_simple_copy.py @@ -1,16 +1,12 @@ # mix in biguery # mix in snowflake -import pytest - from pathlib import Path -from dbt.tests.util import run_dbt, rm_file, write_file, check_relations_equal +import pytest -from dbt.tests.adapter.simple_copy.fixtures import ( - _PROPERTIES__SCHEMA_YML, - _SEEDS__SEED_INITIAL, - _SEEDS__SEED_UPDATE, +from dbt.tests.util import check_relations_equal, rm_file, run_dbt, write_file +from tests.functional.adapter.simple_copy.fixtures import ( _MODELS__ADVANCED_INCREMENTAL, _MODELS__COMPOUND_SORT, _MODELS__DISABLED, @@ -20,6 +16,9 @@ _MODELS__INTERLEAVED_SORT, _MODELS__MATERIALIZED, _MODELS__VIEW_MODEL, + _PROPERTIES__SCHEMA_YML, + _SEEDS__SEED_INITIAL, + _SEEDS__SEED_UPDATE, ) diff --git a/tests/functional/adapter/simple_seed/__init__.py b/tests/functional/adapter/simple_seed/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/simple_seed/fixtures.py b/tests/functional/adapter/simple_seed/fixtures.py similarity index 86% rename from tests/adapter/dbt/tests/adapter/simple_seed/fixtures.py rename to tests/functional/adapter/simple_seed/fixtures.py index d32caa5b7d6..a551fbc74c9 100644 --- a/tests/adapter/dbt/tests/adapter/simple_seed/fixtures.py +++ b/tests/functional/adapter/simple_seed/fixtures.py @@ -34,6 +34,10 @@ models__downstream_from_seed_actual = """ select * from {{ ref('seed_actual') }} +""" +models__downstream_from_seed_pipe_separated = """ +select * from {{ ref('seed_pipe_separated') }} + """ models__from_basic_seed = """ select * from {{ this.schema }}.seed_expected @@ -50,46 +54,46 @@ - name: seed_enabled columns: - name: birthday - tests: + data_tests: - column_type: type: date - name: seed_id - tests: + data_tests: - column_type: type: text - name: seed_tricky columns: - name: seed_id - tests: + data_tests: - column_type: type: integer - name: seed_id_str - tests: + data_tests: - column_type: type: text - name: a_bool - tests: + data_tests: - column_type: type: boolean - name: looks_like_a_bool - tests: + data_tests: - column_type: type: text - name: a_date - tests: + data_tests: - column_type: type: timestamp without time zone - name: looks_like_a_date - tests: + data_tests: - column_type: type: text - name: relative - tests: + data_tests: - column_type: type: text - name: weekday - tests: + data_tests: - column_type: type: text """ diff --git a/tests/adapter/dbt/tests/adapter/simple_seed/seed_bom.csv b/tests/functional/adapter/simple_seed/seed_bom.csv similarity index 100% rename from tests/adapter/dbt/tests/adapter/simple_seed/seed_bom.csv rename to tests/functional/adapter/simple_seed/seed_bom.csv diff --git a/tests/adapter/dbt/tests/adapter/simple_seed/seeds.py b/tests/functional/adapter/simple_seed/seeds.py similarity index 70% rename from tests/adapter/dbt/tests/adapter/simple_seed/seeds.py rename to tests/functional/adapter/simple_seed/seeds.py index 35ccb0bb7a6..b377d90df13 100644 --- a/tests/adapter/dbt/tests/adapter/simple_seed/seeds.py +++ b/tests/functional/adapter/simple_seed/seeds.py @@ -509,6 +509,511 @@ 500,Paula,pshawdv@networksolutions.com,123.27.47.249,2003-10-30 21:19:20 """.lstrip() +seeds__pipe_separated_csv = """ +seed_id|first_name|email|ip_address|birthday +1|Larry|lking0@miitbeian.gov.cn|69.135.206.194|2008-09-12 19:08:31 +2|Larry|lperkins1@toplist.cz|64.210.133.162|1978-05-09 04:15:14 +3|Anna|amontgomery2@miitbeian.gov.cn|168.104.64.114|2011-10-16 04:07:57 +4|Sandra|sgeorge3@livejournal.com|229.235.252.98|1973-07-19 10:52:43 +5|Fred|fwoods4@google.cn|78.229.170.124|2012-09-30 16:38:29 +6|Stephen|shanson5@livejournal.com|182.227.157.105|1995-11-07 21:40:50 +7|William|wmartinez6@upenn.edu|135.139.249.50|1982-09-05 03:11:59 +8|Jessica|jlong7@hao123.com|203.62.178.210|1991-10-16 11:03:15 +9|Douglas|dwhite8@tamu.edu|178.187.247.1|1979-10-01 09:49:48 +10|Lisa|lcoleman9@nydailynews.com|168.234.128.249|2011-05-26 07:45:49 +11|Ralph|rfieldsa@home.pl|55.152.163.149|1972-11-18 19:06:11 +12|Louise|lnicholsb@samsung.com|141.116.153.154|2014-11-25 20:56:14 +13|Clarence|cduncanc@sfgate.com|81.171.31.133|2011-11-17 07:02:36 +14|Daniel|dfranklind@omniture.com|8.204.211.37|1980-09-13 00:09:04 +15|Katherine|klanee@auda.org.au|176.96.134.59|1997-08-22 19:36:56 +16|Billy|bwardf@wikia.com|214.108.78.85|2003-10-19 02:14:47 +17|Annie|agarzag@ocn.ne.jp|190.108.42.70|1988-10-28 15:12:35 +18|Shirley|scolemanh@fastcompany.com|109.251.164.84|1988-08-24 10:50:57 +19|Roger|rfrazieri@scribd.com|38.145.218.108|1985-12-31 15:17:15 +20|Lillian|lstanleyj@goodreads.com|47.57.236.17|1970-06-08 02:09:05 +21|Aaron|arodriguezk@nps.gov|205.245.118.221|1985-10-11 23:07:49 +22|Patrick|pparkerl@techcrunch.com|19.8.100.182|2006-03-29 12:53:56 +23|Phillip|pmorenom@intel.com|41.38.254.103|2011-11-07 15:35:43 +24|Henry|hgarcian@newsvine.com|1.191.216.252|2008-08-28 08:30:44 +25|Irene|iturnero@opera.com|50.17.60.190|1994-04-01 07:15:02 +26|Andrew|adunnp@pen.io|123.52.253.176|2000-11-01 06:03:25 +27|David|dgutierrezq@wp.com|238.23.203.42|1988-01-25 07:29:18 +28|Henry|hsanchezr@cyberchimps.com|248.102.2.185|1983-01-01 13:36:37 +29|Evelyn|epetersons@gizmodo.com|32.80.46.119|1979-07-16 17:24:12 +30|Tammy|tmitchellt@purevolume.com|249.246.167.88|2001-04-03 10:00:23 +31|Jacqueline|jlittleu@domainmarket.com|127.181.97.47|1986-02-11 21:35:50 +32|Earl|eortizv@opera.com|166.47.248.240|1996-07-06 08:16:27 +33|Juan|jgordonw@sciencedirect.com|71.77.2.200|1987-01-31 03:46:44 +34|Diane|dhowellx@nyu.edu|140.94.133.12|1994-06-11 02:30:05 +35|Randy|rkennedyy@microsoft.com|73.255.34.196|2005-05-26 20:28:39 +36|Janice|jriveraz@time.com|22.214.227.32|1990-02-09 04:16:52 +37|Laura|lperry10@diigo.com|159.148.145.73|2015-03-17 05:59:25 +38|Gary|gray11@statcounter.com|40.193.124.56|1970-01-27 10:04:51 +39|Jesse|jmcdonald12@typepad.com|31.7.86.103|2009-03-14 08:14:29 +40|Sandra|sgonzalez13@goodreads.com|223.80.168.239|1993-05-21 14:08:54 +41|Scott|smoore14@archive.org|38.238.46.83|1980-08-30 11:16:56 +42|Phillip|pevans15@cisco.com|158.234.59.34|2011-12-15 23:26:31 +43|Steven|sriley16@google.ca|90.247.57.68|2011-10-29 19:03:28 +44|Deborah|dbrown17@hexun.com|179.125.143.240|1995-04-10 14:36:07 +45|Lori|lross18@ow.ly|64.80.162.180|1980-12-27 16:49:15 +46|Sean|sjackson19@tumblr.com|240.116.183.69|1988-06-12 21:24:45 +47|Terry|tbarnes1a@163.com|118.38.213.137|1997-09-22 16:43:19 +48|Dorothy|dross1b@ebay.com|116.81.76.49|2005-02-28 13:33:24 +49|Samuel|swashington1c@house.gov|38.191.253.40|1989-01-19 21:15:48 +50|Ralph|rcarter1d@tinyurl.com|104.84.60.174|2007-08-11 10:21:49 +51|Wayne|whudson1e@princeton.edu|90.61.24.102|1983-07-03 16:58:12 +52|Rose|rjames1f@plala.or.jp|240.83.81.10|1995-06-08 11:46:23 +53|Louise|lcox1g@theglobeandmail.com|105.11.82.145|2016-09-19 14:45:51 +54|Kenneth|kjohnson1h@independent.co.uk|139.5.45.94|1976-08-17 11:26:19 +55|Donna|dbrown1i@amazon.co.uk|19.45.169.45|2006-05-27 16:51:40 +56|Johnny|jvasquez1j@trellian.com|118.202.238.23|1975-11-17 08:42:32 +57|Patrick|pramirez1k@tamu.edu|231.25.153.198|1997-08-06 11:51:09 +58|Helen|hlarson1l@prweb.com|8.40.21.39|1993-08-04 19:53:40 +59|Patricia|pspencer1m@gmpg.org|212.198.40.15|1977-08-03 16:37:27 +60|Joseph|jspencer1n@marriott.com|13.15.63.238|2005-07-23 20:22:06 +61|Phillip|pschmidt1o@blogtalkradio.com|177.98.201.190|1976-05-19 21:47:44 +62|Joan|jwebb1p@google.ru|105.229.170.71|1972-09-07 17:53:47 +63|Phyllis|pkennedy1q@imgur.com|35.145.8.244|2000-01-01 22:33:37 +64|Katherine|khunter1r@smh.com.au|248.168.205.32|1991-01-09 06:40:24 +65|Laura|lvasquez1s@wiley.com|128.129.115.152|1997-10-23 12:04:56 +66|Juan|jdunn1t@state.gov|44.228.124.51|2004-11-10 05:07:35 +67|Judith|jholmes1u@wiley.com|40.227.179.115|1977-08-02 17:01:45 +68|Beverly|bbaker1v@wufoo.com|208.34.84.59|2016-03-06 20:07:23 +69|Lawrence|lcarr1w@flickr.com|59.158.212.223|1988-09-13 06:07:21 +70|Gloria|gwilliams1x@mtv.com|245.231.88.33|1995-03-18 22:32:46 +71|Steven|ssims1y@cbslocal.com|104.50.58.255|2001-08-05 21:26:20 +72|Betty|bmills1z@arstechnica.com|103.177.214.220|1981-12-14 21:26:54 +73|Mildred|mfuller20@prnewswire.com|151.158.8.130|2000-04-19 10:13:55 +74|Donald|dday21@icq.com|9.178.102.255|1972-12-03 00:58:24 +75|Eric|ethomas22@addtoany.com|85.2.241.227|1992-11-01 05:59:30 +76|Joyce|jarmstrong23@sitemeter.com|169.224.20.36|1985-10-24 06:50:01 +77|Maria|mmartinez24@amazonaws.com|143.189.167.135|2005-10-05 05:17:42 +78|Harry|hburton25@youtube.com|156.47.176.237|1978-03-26 05:53:33 +79|Kevin|klawrence26@hao123.com|79.136.183.83|1994-10-12 04:38:52 +80|David|dhall27@prweb.com|133.149.172.153|1976-12-15 16:24:24 +81|Kathy|kperry28@twitter.com|229.242.72.228|1979-03-04 02:58:56 +82|Adam|aprice29@elegantthemes.com|13.145.21.10|1982-11-07 11:46:59 +83|Brandon|bgriffin2a@va.gov|73.249.128.212|2013-10-30 05:30:36 +84|Henry|hnguyen2b@discovery.com|211.36.214.242|1985-01-09 06:37:27 +85|Eric|esanchez2c@edublogs.org|191.166.188.251|2004-05-01 23:21:42 +86|Jason|jlee2d@jimdo.com|193.92.16.182|1973-01-08 09:05:39 +87|Diana|drichards2e@istockphoto.com|19.130.175.245|1994-10-05 22:50:49 +88|Andrea|awelch2f@abc.net.au|94.155.233.96|2002-04-26 08:41:44 +89|Louis|lwagner2g@miitbeian.gov.cn|26.217.34.111|2003-08-25 07:56:39 +90|Jane|jsims2h@seesaa.net|43.4.220.135|1987-03-20 20:39:04 +91|Larry|lgrant2i@si.edu|97.126.79.34|2000-09-07 20:26:19 +92|Louis|ldean2j@prnewswire.com|37.148.40.127|2011-09-16 20:12:14 +93|Jennifer|jcampbell2k@xing.com|38.106.254.142|1988-07-15 05:06:49 +94|Wayne|wcunningham2l@google.com.hk|223.28.26.187|2009-12-15 06:16:54 +95|Lori|lstevens2m@icq.com|181.250.181.58|1984-10-28 03:29:19 +96|Judy|jsimpson2n@marriott.com|180.121.239.219|1986-02-07 15:18:10 +97|Phillip|phoward2o@usa.gov|255.247.0.175|2002-12-26 08:44:45 +98|Gloria|gwalker2p@usa.gov|156.140.7.128|1997-10-04 07:58:58 +99|Paul|pjohnson2q@umn.edu|183.59.198.197|1991-11-14 12:33:55 +100|Frank|fgreene2r@blogspot.com|150.143.68.121|2010-06-12 23:55:39 +101|Deborah|dknight2s@reverbnation.com|222.131.211.191|1970-07-08 08:54:23 +102|Sandra|sblack2t@tripadvisor.com|254.183.128.254|2000-04-12 02:39:36 +103|Edward|eburns2u@dailymotion.com|253.89.118.18|1993-10-10 10:54:01 +104|Anthony|ayoung2v@ustream.tv|118.4.193.176|1978-08-26 17:07:29 +105|Donald|dlawrence2w@wp.com|139.200.159.227|2007-07-21 20:56:20 +106|Matthew|mfreeman2x@google.fr|205.26.239.92|2014-12-05 17:05:39 +107|Sean|ssanders2y@trellian.com|143.89.82.108|1993-07-14 21:45:02 +108|Sharon|srobinson2z@soundcloud.com|66.234.247.54|1977-04-06 19:07:03 +109|Jennifer|jwatson30@t-online.de|196.102.127.7|1998-03-07 05:12:23 +110|Clarence|cbrooks31@si.edu|218.93.234.73|2002-11-06 17:22:25 +111|Jose|jflores32@goo.gl|185.105.244.231|1995-01-05 06:32:21 +112|George|glee33@adobe.com|173.82.249.196|2015-01-04 02:47:46 +113|Larry|lhill34@linkedin.com|66.5.206.195|2010-11-02 10:21:17 +114|Marie|mmeyer35@mysql.com|151.152.88.107|1990-05-22 20:52:51 +115|Clarence|cwebb36@skype.com|130.198.55.217|1972-10-27 07:38:54 +116|Sarah|scarter37@answers.com|80.89.18.153|1971-08-24 19:29:30 +117|Henry|hhughes38@webeden.co.uk|152.60.114.174|1973-01-27 09:00:42 +118|Teresa|thenry39@hao123.com|32.187.239.106|2015-11-06 01:48:44 +119|Billy|bgutierrez3a@sun.com|52.37.70.134|2002-03-19 03:20:19 +120|Anthony|agibson3b@github.io|154.251.232.213|1991-04-19 01:08:15 +121|Sandra|sromero3c@wikia.com|44.124.171.2|1998-09-06 20:30:34 +122|Paula|pandrews3d@blogs.com|153.142.118.226|2003-06-24 16:31:24 +123|Terry|tbaker3e@csmonitor.com|99.120.45.219|1970-12-09 23:57:21 +124|Lois|lwilson3f@reuters.com|147.44.171.83|1971-01-09 22:28:51 +125|Sara|smorgan3g@nature.com|197.67.192.230|1992-01-28 20:33:24 +126|Charles|ctorres3h@china.com.cn|156.115.216.2|1993-10-02 19:36:34 +127|Richard|ralexander3i@marriott.com|248.235.180.59|1999-02-03 18:40:55 +128|Christina|charper3j@cocolog-nifty.com|152.114.116.129|1978-09-13 00:37:32 +129|Steve|sadams3k@economist.com|112.248.91.98|2004-03-21 09:07:43 +130|Katherine|krobertson3l@ow.ly|37.220.107.28|1977-03-18 19:28:50 +131|Donna|dgibson3m@state.gov|222.218.76.221|1999-02-01 06:46:16 +132|Christina|cwest3n@mlb.com|152.114.6.160|1979-12-24 15:30:35 +133|Sandra|swillis3o@meetup.com|180.71.49.34|1984-09-27 08:05:54 +134|Clarence|cedwards3p@smugmug.com|10.64.180.186|1979-04-16 16:52:10 +135|Ruby|rjames3q@wp.com|98.61.54.20|2007-01-13 14:25:52 +136|Sarah|smontgomery3r@tripod.com|91.45.164.172|2009-07-25 04:34:30 +137|Sarah|soliver3s@eventbrite.com|30.106.39.146|2012-05-09 22:12:33 +138|Deborah|dwheeler3t@biblegateway.com|59.105.213.173|1999-11-09 08:08:44 +139|Deborah|dray3u@i2i.jp|11.108.186.217|2014-02-04 03:15:19 +140|Paul|parmstrong3v@alexa.com|6.250.59.43|2009-12-21 10:08:53 +141|Aaron|abishop3w@opera.com|207.145.249.62|1996-04-25 23:20:23 +142|Henry|hsanders3x@google.ru|140.215.203.171|2012-01-29 11:52:32 +143|Anne|aanderson3y@1688.com|74.150.102.118|1982-04-03 13:46:17 +144|Victor|vmurphy3z@hugedomains.com|222.155.99.152|1987-11-03 19:58:41 +145|Evelyn|ereid40@pbs.org|249.122.33.117|1977-12-14 17:09:57 +146|Brian|bgonzalez41@wikia.com|246.254.235.141|1991-02-24 00:45:58 +147|Sandra|sgray42@squarespace.com|150.73.28.159|1972-07-28 17:26:32 +148|Alice|ajones43@a8.net|78.253.12.177|2002-12-05 16:57:46 +149|Jessica|jhanson44@mapquest.com|87.229.30.160|1994-01-30 11:40:04 +150|Louise|lbailey45@reuters.com|191.219.31.101|2011-09-07 21:11:45 +151|Christopher|cgonzalez46@printfriendly.com|83.137.213.239|1984-10-24 14:58:04 +152|Gregory|gcollins47@yandex.ru|28.176.10.115|1998-07-25 17:17:10 +153|Jane|jperkins48@usnews.com|46.53.164.159|1979-08-19 15:25:00 +154|Phyllis|plong49@yahoo.co.jp|208.140.88.2|1985-07-06 02:16:36 +155|Adam|acarter4a@scribd.com|78.48.148.204|2005-07-20 03:31:09 +156|Frank|fweaver4b@angelfire.com|199.180.255.224|2011-03-04 23:07:54 +157|Ronald|rmurphy4c@cloudflare.com|73.42.97.231|1991-01-11 10:39:41 +158|Richard|rmorris4d@e-recht24.de|91.9.97.223|2009-01-17 21:05:15 +159|Rose|rfoster4e@woothemes.com|203.169.53.16|1991-04-21 02:09:38 +160|George|ggarrett4f@uiuc.edu|186.61.5.167|1989-11-11 11:29:42 +161|Victor|vhamilton4g@biblegateway.com|121.229.138.38|2012-06-22 18:01:23 +162|Mark|mbennett4h@businessinsider.com|209.184.29.203|1980-04-16 15:26:34 +163|Martin|mwells4i@ifeng.com|97.223.55.105|2010-05-26 14:08:18 +164|Diana|dstone4j@google.ru|90.155.52.47|2013-02-11 00:14:54 +165|Walter|wferguson4k@blogger.com|30.63.212.44|1986-02-20 17:46:46 +166|Denise|dcoleman4l@vistaprint.com|10.209.153.77|1992-05-13 20:14:14 +167|Philip|pknight4m@xing.com|15.28.135.167|2000-09-11 18:41:13 +168|Russell|rcarr4n@youtube.com|113.55.165.50|2008-07-10 17:49:27 +169|Donna|dburke4o@dion.ne.jp|70.0.105.111|1992-02-10 17:24:58 +170|Anne|along4p@squidoo.com|36.154.58.107|2012-08-19 23:35:31 +171|Clarence|cbanks4q@webeden.co.uk|94.57.53.114|1972-03-11 21:46:44 +172|Betty|bbowman4r@cyberchimps.com|178.115.209.69|2013-01-13 21:34:51 +173|Andrew|ahudson4s@nytimes.com|84.32.252.144|1998-09-15 14:20:04 +174|Keith|kgordon4t@cam.ac.uk|189.237.211.102|2009-01-22 05:34:38 +175|Patrick|pwheeler4u@mysql.com|47.22.117.226|1984-09-05 22:33:15 +176|Jesse|jfoster4v@mapquest.com|229.95.131.46|1990-01-20 12:19:15 +177|Arthur|afisher4w@jugem.jp|107.255.244.98|1983-10-13 11:08:46 +178|Nicole|nryan4x@wsj.com|243.211.33.221|1974-05-30 23:19:14 +179|Bruce|bjohnson4y@sfgate.com|17.41.200.101|1992-09-23 02:02:19 +180|Terry|tcox4z@reference.com|20.189.120.106|1982-02-13 12:43:14 +181|Ashley|astanley50@kickstarter.com|86.3.56.98|1976-05-09 01:27:16 +182|Michael|mrivera51@about.me|72.118.249.0|1971-11-11 17:28:37 +183|Steven|sgonzalez52@mozilla.org|169.112.247.47|2002-08-24 14:59:25 +184|Kathleen|kfuller53@bloglovin.com|80.93.59.30|2002-03-11 13:41:29 +185|Nicole|nhenderson54@usda.gov|39.253.60.30|1995-04-24 05:55:07 +186|Ralph|rharper55@purevolume.com|167.147.142.189|1980-02-10 18:35:45 +187|Heather|hcunningham56@photobucket.com|96.222.196.229|2007-06-15 05:37:50 +188|Nancy|nlittle57@cbc.ca|241.53.255.175|2007-07-12 23:42:48 +189|Juan|jramirez58@pinterest.com|190.128.84.27|1978-11-07 23:37:37 +190|Beverly|bfowler59@chronoengine.com|54.144.230.49|1979-03-31 23:27:28 +191|Shirley|sstevens5a@prlog.org|200.97.231.248|2011-12-06 07:08:50 +192|Annie|areyes5b@squidoo.com|223.32.182.101|2011-05-28 02:42:09 +193|Jack|jkelley5c@tiny.cc|47.34.118.150|1981-12-05 17:31:40 +194|Keith|krobinson5d@1und1.de|170.210.209.31|1999-03-09 11:05:43 +195|Joseph|jmiller5e@google.com.au|136.74.212.139|1984-10-08 13:18:20 +196|Annie|aday5f@blogspot.com|71.99.186.69|1986-02-18 12:27:34 +197|Nancy|nperez5g@liveinternet.ru|28.160.6.107|1983-10-20 17:51:20 +198|Tammy|tward5h@ucoz.ru|141.43.164.70|1980-03-31 04:45:29 +199|Doris|dryan5i@ted.com|239.117.202.188|1985-07-03 03:17:53 +200|Rose|rmendoza5j@photobucket.com|150.200.206.79|1973-04-21 21:36:40 +201|Cynthia|cbutler5k@hubpages.com|80.153.174.161|2001-01-20 01:42:26 +202|Samuel|soliver5l@people.com.cn|86.127.246.140|1970-09-02 02:19:00 +203|Carl|csanchez5m@mysql.com|50.149.237.107|1993-12-01 07:02:09 +204|Kathryn|kowens5n@geocities.jp|145.166.205.201|2004-07-06 18:39:33 +205|Nicholas|nnichols5o@parallels.com|190.240.66.170|2014-11-11 18:52:19 +206|Keith|kwillis5p@youtube.com|181.43.206.100|1998-06-13 06:30:51 +207|Justin|jwebb5q@intel.com|211.54.245.74|2000-11-04 16:58:26 +208|Gary|ghicks5r@wikipedia.org|196.154.213.104|1992-12-01 19:48:28 +209|Martin|mpowell5s@flickr.com|153.67.12.241|1983-06-30 06:24:32 +210|Brenda|bkelley5t@xinhuanet.com|113.100.5.172|2005-01-08 20:50:22 +211|Edward|eray5u@a8.net|205.187.246.65|2011-09-26 08:04:44 +212|Steven|slawson5v@senate.gov|238.150.250.36|1978-11-22 02:48:09 +213|Robert|rthompson5w@furl.net|70.7.89.236|2001-09-12 08:52:07 +214|Jack|jporter5x@diigo.com|220.172.29.99|1976-07-26 14:29:21 +215|Lisa|ljenkins5y@oakley.com|150.151.170.180|2010-03-20 19:21:16 +216|Theresa|tbell5z@mayoclinic.com|247.25.53.173|2001-03-11 05:36:40 +217|Jimmy|jstephens60@weather.com|145.101.93.235|1983-04-12 09:35:30 +218|Louis|lhunt61@amazon.co.jp|78.137.6.253|1997-08-29 19:34:34 +219|Lawrence|lgilbert62@ted.com|243.132.8.78|2015-04-08 22:06:56 +220|David|dgardner63@4shared.com|204.40.46.136|1971-07-09 03:29:11 +221|Charles|ckennedy64@gmpg.org|211.83.233.2|2011-02-26 11:55:04 +222|Lillian|lbanks65@msu.edu|124.233.12.80|2010-05-16 20:29:02 +223|Ernest|enguyen66@baidu.com|82.45.128.148|1996-07-04 10:07:04 +224|Ryan|rrussell67@cloudflare.com|202.53.240.223|1983-08-05 12:36:29 +225|Donald|ddavis68@ustream.tv|47.39.218.137|1989-05-27 02:30:56 +226|Joe|jscott69@blogspot.com|140.23.131.75|1973-03-16 12:21:31 +227|Anne|amarshall6a@google.ca|113.162.200.197|1988-12-09 03:38:29 +228|Willie|wturner6b@constantcontact.com|85.83.182.249|1991-10-06 01:51:10 +229|Nicole|nwilson6c@sogou.com|30.223.51.135|1977-05-29 19:54:56 +230|Janet|jwheeler6d@stumbleupon.com|153.194.27.144|2011-03-13 12:48:47 +231|Lois|lcarr6e@statcounter.com|0.41.36.53|1993-02-06 04:52:01 +232|Shirley|scruz6f@tmall.com|37.156.39.223|2007-02-18 17:47:01 +233|Patrick|pford6g@reverbnation.com|36.198.200.89|1977-03-06 15:47:24 +234|Lisa|lhudson6h@usatoday.com|134.213.58.137|2014-10-28 01:56:56 +235|Pamela|pmartinez6i@opensource.org|5.151.127.202|1987-11-30 16:44:47 +236|Larry|lperez6j@infoseek.co.jp|235.122.96.148|1979-01-18 06:33:45 +237|Pamela|pramirez6k@census.gov|138.233.34.163|2012-01-29 10:35:20 +238|Daniel|dcarr6l@php.net|146.21.152.242|1984-11-17 08:22:59 +239|Patrick|psmith6m@indiegogo.com|136.222.199.36|2001-05-30 22:16:44 +240|Raymond|rhenderson6n@hc360.com|116.31.112.38|2000-01-05 20:35:41 +241|Teresa|treynolds6o@miitbeian.gov.cn|198.126.205.220|1996-11-08 01:27:31 +242|Johnny|jmason6p@flickr.com|192.8.232.114|2013-05-14 05:35:50 +243|Angela|akelly6q@guardian.co.uk|234.116.60.197|1977-08-20 02:05:17 +244|Douglas|dcole6r@cmu.edu|128.135.212.69|2016-10-26 17:40:36 +245|Frances|fcampbell6s@twitpic.com|94.22.243.235|1987-04-26 07:07:13 +246|Donna|dgreen6t@chron.com|227.116.46.107|2011-07-25 12:59:54 +247|Benjamin|bfranklin6u@redcross.org|89.141.142.89|1974-05-03 20:28:18 +248|Randy|rpalmer6v@rambler.ru|70.173.63.178|2011-12-20 17:40:18 +249|Melissa|mmurray6w@bbb.org|114.234.118.137|1991-02-26 12:45:44 +250|Jean|jlittle6x@epa.gov|141.21.163.254|1991-08-16 04:57:09 +251|Daniel|dolson6y@nature.com|125.75.104.97|2010-04-23 06:25:54 +252|Kathryn|kwells6z@eventbrite.com|225.104.28.249|2015-01-31 02:21:50 +253|Theresa|tgonzalez70@ox.ac.uk|91.93.156.26|1971-12-11 10:31:31 +254|Beverly|broberts71@bluehost.com|244.40.158.89|2013-09-21 13:02:31 +255|Pamela|pmurray72@netscape.com|218.54.95.216|1985-04-16 00:34:00 +256|Timothy|trichardson73@amazonaws.com|235.49.24.229|2000-11-11 09:48:28 +257|Mildred|mpalmer74@is.gd|234.125.95.132|1992-05-25 02:25:02 +258|Jessica|jcampbell75@google.it|55.98.30.140|2014-08-26 00:26:34 +259|Beverly|bthomas76@cpanel.net|48.78.228.176|1970-08-18 10:40:05 +260|Eugene|eward77@cargocollective.com|139.226.204.2|1996-12-04 23:17:00 +261|Andrea|aallen78@webnode.com|160.31.214.38|2009-07-06 07:22:37 +262|Justin|jruiz79@merriam-webster.com|150.149.246.122|2005-06-06 11:44:19 +263|Kenneth|kedwards7a@networksolutions.com|98.82.193.128|2001-07-03 02:00:10 +264|Rachel|rday7b@miibeian.gov.cn|114.15.247.221|1994-08-18 19:45:40 +265|Russell|rmiller7c@instagram.com|184.130.152.253|1977-11-06 01:58:12 +266|Bonnie|bhudson7d@cornell.edu|235.180.186.206|1990-12-03 22:45:24 +267|Raymond|rknight7e@yandex.ru|161.2.44.252|1995-08-25 04:31:19 +268|Bonnie|brussell7f@elpais.com|199.237.57.207|1991-03-29 08:32:06 +269|Marie|mhenderson7g@elpais.com|52.203.131.144|2004-06-04 21:50:28 +270|Alan|acarr7h@trellian.com|147.51.205.72|2005-03-03 10:51:31 +271|Barbara|bturner7i@hugedomains.com|103.160.110.226|2004-08-04 13:42:40 +272|Christina|cdaniels7j@census.gov|0.238.61.251|1972-10-18 12:47:33 +273|Jeremy|jgomez7k@reuters.com|111.26.65.56|2013-01-13 10:41:35 +274|Laura|lwood7l@icio.us|149.153.38.205|2011-06-25 09:33:59 +275|Matthew|mbowman7m@auda.org.au|182.138.206.172|1999-03-05 03:25:36 +276|Denise|dparker7n@icq.com|0.213.88.138|2011-11-04 09:43:06 +277|Phillip|pparker7o@discuz.net|219.242.165.240|1973-10-19 04:22:29 +278|Joan|jpierce7p@salon.com|63.31.213.202|1989-04-09 22:06:24 +279|Irene|ibaker7q@cbc.ca|102.33.235.114|1992-09-04 13:00:57 +280|Betty|bbowman7r@ted.com|170.91.249.242|2015-09-28 08:14:22 +281|Teresa|truiz7s@boston.com|82.108.158.207|1999-07-18 05:17:09 +282|Helen|hbrooks7t@slideshare.net|102.87.162.187|2003-01-06 15:45:29 +283|Karen|kgriffin7u@wunderground.com|43.82.44.184|2010-05-28 01:56:37 +284|Lisa|lfernandez7v@mtv.com|200.238.218.220|1993-04-03 20:33:51 +285|Jesse|jlawrence7w@timesonline.co.uk|95.122.105.78|1990-01-05 17:28:43 +286|Terry|tross7x@macromedia.com|29.112.114.133|2009-08-29 21:32:17 +287|Angela|abradley7y@icq.com|177.44.27.72|1989-10-04 21:46:06 +288|Maria|mhart7z@dailymotion.com|55.27.55.202|1975-01-21 01:22:57 +289|Raymond|randrews80@pinterest.com|88.90.78.67|1992-03-16 21:37:40 +290|Kathy|krice81@bluehost.com|212.63.196.102|2000-12-14 03:06:44 +291|Cynthia|cramos82@nymag.com|107.89.190.6|2005-06-28 02:02:33 +292|Kimberly|kjones83@mysql.com|86.169.101.101|2007-06-13 22:56:49 +293|Timothy|thansen84@microsoft.com|108.100.254.90|2003-04-04 10:31:57 +294|Carol|cspencer85@berkeley.edu|75.118.144.187|1999-03-30 14:53:21 +295|Louis|lmedina86@latimes.com|141.147.163.24|1991-04-11 17:53:13 +296|Margaret|mcole87@google.fr|53.184.26.83|1991-12-19 01:54:10 +297|Mary|mgomez88@yellowpages.com|208.56.57.99|1976-05-21 18:05:08 +298|Amanda|aanderson89@geocities.com|147.73.15.252|1987-08-22 15:05:28 +299|Kathryn|kgarrett8a@nature.com|27.29.177.220|1976-07-15 04:25:04 +300|Dorothy|dmason8b@shareasale.com|106.210.99.193|1990-09-03 21:39:31 +301|Lois|lkennedy8c@amazon.de|194.169.29.187|2007-07-29 14:09:31 +302|Irene|iburton8d@washingtonpost.com|196.143.110.249|2013-09-05 11:32:46 +303|Betty|belliott8e@wired.com|183.105.222.199|1979-09-19 19:29:13 +304|Bobby|bmeyer8f@census.gov|36.13.161.145|2014-05-24 14:34:39 +305|Ann|amorrison8g@sfgate.com|72.154.54.137|1978-10-05 14:22:34 +306|Daniel|djackson8h@wunderground.com|144.95.32.34|1990-07-27 13:23:05 +307|Joe|jboyd8i@alibaba.com|187.105.86.178|2011-09-28 16:46:32 +308|Ralph|rdunn8j@fc2.com|3.19.87.255|1984-10-18 08:00:40 +309|Craig|ccarter8k@gizmodo.com|235.152.76.215|1998-07-04 12:15:21 +310|Paula|pdean8l@hhs.gov|161.100.173.197|1973-02-13 09:38:55 +311|Andrew|agarrett8m@behance.net|199.253.123.218|1991-02-14 13:36:32 +312|Janet|jhowell8n@alexa.com|39.189.139.79|2012-11-24 20:17:33 +313|Keith|khansen8o@godaddy.com|116.186.223.196|1987-08-23 21:22:05 +314|Nicholas|nedwards8p@state.gov|142.175.142.11|1977-03-28 18:27:27 +315|Jacqueline|jallen8q@oaic.gov.au|189.66.135.192|1994-10-26 11:44:26 +316|Frank|fgardner8r@mapy.cz|154.77.119.169|1983-01-29 19:19:51 +317|Eric|eharrison8s@google.cn|245.139.65.123|1984-02-04 09:54:36 +318|Gregory|gcooper8t@go.com|171.147.0.221|2004-06-14 05:22:08 +319|Jean|jfreeman8u@rakuten.co.jp|67.243.121.5|1977-01-07 18:23:43 +320|Juan|jlewis8v@shinystat.com|216.181.171.189|2001-08-23 17:32:43 +321|Randy|rwilliams8w@shinystat.com|105.152.146.28|1983-02-17 00:05:50 +322|Stephen|shart8x@sciencedirect.com|196.131.205.148|2004-02-15 10:12:03 +323|Annie|ahunter8y@example.com|63.36.34.103|2003-07-23 21:15:25 +324|Melissa|mflores8z@cbc.ca|151.230.217.90|1983-11-02 14:53:56 +325|Jane|jweaver90@about.me|0.167.235.217|1987-07-29 00:13:44 +326|Anthony|asmith91@oracle.com|97.87.48.41|2001-05-31 18:44:11 +327|Terry|tdavis92@buzzfeed.com|46.20.12.51|2015-09-12 23:13:55 +328|Brandon|bmontgomery93@gravatar.com|252.101.48.186|2010-10-28 08:26:27 +329|Chris|cmurray94@bluehost.com|25.158.167.97|2004-05-05 16:10:31 +330|Denise|dfuller95@hugedomains.com|216.210.149.28|1979-04-20 08:57:24 +331|Arthur|amcdonald96@sakura.ne.jp|206.42.36.213|2009-08-15 03:26:16 +332|Jesse|jhoward97@google.cn|46.181.118.30|1974-04-18 14:08:41 +333|Frank|fsimpson98@domainmarket.com|163.220.211.87|2006-06-30 14:46:52 +334|Janice|jwoods99@pen.io|229.245.237.182|1988-04-06 11:52:58 +335|Rebecca|rroberts9a@huffingtonpost.com|148.96.15.80|1976-10-05 08:44:16 +336|Joshua|jray9b@opensource.org|192.253.12.198|1971-12-25 22:27:07 +337|Joyce|jcarpenter9c@statcounter.com|125.171.46.215|2001-12-31 22:08:13 +338|Andrea|awest9d@privacy.gov.au|79.101.180.201|1983-02-18 20:07:47 +339|Christine|chudson9e@yelp.com|64.198.43.56|1997-09-08 08:03:43 +340|Joe|jparker9f@earthlink.net|251.215.148.153|1973-11-04 05:08:18 +341|Thomas|tkim9g@answers.com|49.187.34.47|1991-08-07 21:13:48 +342|Janice|jdean9h@scientificamerican.com|4.197.117.16|2009-12-08 02:35:49 +343|James|jmitchell9i@umich.edu|43.121.18.147|2011-04-28 17:04:09 +344|Charles|cgardner9j@purevolume.com|197.78.240.240|1998-02-11 06:47:07 +345|Robert|rhenderson9k@friendfeed.com|215.84.180.88|2002-05-10 15:33:14 +346|Chris|cgray9l@4shared.com|249.70.192.240|1998-10-03 16:43:42 +347|Gloria|ghayes9m@hibu.com|81.103.138.26|1999-12-26 11:23:13 +348|Edward|eramirez9n@shareasale.com|38.136.90.136|2010-08-19 08:01:06 +349|Cheryl|cbutler9o@google.ca|172.180.78.172|1995-05-27 20:03:52 +350|Margaret|mwatkins9p@sfgate.com|3.20.198.6|2014-10-21 01:42:58 +351|Rebecca|rwelch9q@examiner.com|45.81.42.208|2001-02-08 12:19:06 +352|Joe|jpalmer9r@phpbb.com|163.202.92.190|1970-01-05 11:29:12 +353|Sandra|slewis9s@dyndns.org|77.215.201.236|1974-01-05 07:04:04 +354|Todd|tfranklin9t@g.co|167.125.181.82|2009-09-28 10:13:58 +355|Joseph|jlewis9u@webmd.com|244.204.6.11|1990-10-21 15:49:57 +356|Alan|aknight9v@nydailynews.com|152.197.95.83|1996-03-08 08:43:17 +357|Sharon|sdean9w@123-reg.co.uk|237.46.40.26|1985-11-30 12:09:24 +358|Annie|awright9x@cafepress.com|190.45.231.111|2000-08-24 11:56:06 +359|Diane|dhamilton9y@youtube.com|85.146.171.196|2015-02-24 02:03:57 +360|Antonio|alane9z@auda.org.au|61.63.146.203|2001-05-13 03:43:34 +361|Matthew|mallena0@hhs.gov|29.97.32.19|1973-02-19 23:43:32 +362|Bonnie|bfowlera1@soup.io|251.216.99.53|2013-08-01 15:35:41 +363|Margaret|mgraya2@examiner.com|69.255.151.79|1998-01-23 22:24:59 +364|Joan|jwagnera3@printfriendly.com|192.166.120.61|1973-07-13 00:30:22 +365|Catherine|cperkinsa4@nytimes.com|58.21.24.214|2006-11-19 11:52:26 +366|Mark|mcartera5@cpanel.net|220.33.102.142|2007-09-09 09:43:27 +367|Paula|ppricea6@msn.com|36.182.238.124|2009-11-11 09:13:05 +368|Catherine|cgreena7@army.mil|228.203.58.19|2005-08-09 16:52:15 +369|Helen|hhamiltona8@symantec.com|155.56.194.99|2005-02-01 05:40:36 +370|Jane|jmeyera9@ezinearticles.com|133.244.113.213|2013-11-06 22:10:23 +371|Wanda|wevansaa@bloglovin.com|233.125.192.48|1994-12-26 23:43:42 +372|Mark|mmarshallab@tumblr.com|114.74.60.47|2016-09-29 18:03:01 +373|Andrew|amartinezac@google.cn|182.54.37.130|1976-06-06 17:04:17 +374|Helen|hmoralesad@e-recht24.de|42.45.4.123|1977-03-28 19:06:59 +375|Bonnie|bstoneae@php.net|196.149.79.137|1970-02-05 17:05:58 +376|Douglas|dfreemanaf@nasa.gov|215.65.124.218|2008-11-20 21:51:55 +377|Willie|wwestag@army.mil|35.189.92.118|1992-07-24 05:08:08 +378|Cheryl|cwagnerah@upenn.edu|228.239.222.141|2010-01-25 06:29:01 +379|Sandra|swardai@baidu.com|63.11.113.240|1985-05-23 08:07:37 +380|Julie|jrobinsonaj@jugem.jp|110.58.202.50|2015-03-05 09:42:07 +381|Larry|lwagnerak@shop-pro.jp|98.234.25.24|1975-07-22 22:22:02 +382|Juan|jcastilloal@yelp.com|24.174.74.202|2007-01-17 09:32:43 +383|Donna|dfrazieram@artisteer.com|205.26.147.45|1990-02-11 20:55:46 +384|Rachel|rfloresan@w3.org|109.60.216.162|1983-05-22 22:42:18 +385|Robert|rreynoldsao@theguardian.com|122.65.209.130|2009-05-01 18:02:51 +386|Donald|dbradleyap@etsy.com|42.54.35.126|1997-01-16 16:31:52 +387|Rachel|rfisheraq@nih.gov|160.243.250.45|2006-02-17 22:05:49 +388|Nicholas|nhamiltonar@princeton.edu|156.211.37.111|1976-06-21 03:36:29 +389|Timothy|twhiteas@ca.gov|36.128.23.70|1975-09-24 03:51:18 +390|Diana|dbradleyat@odnoklassniki.ru|44.102.120.184|1983-04-27 09:02:50 +391|Billy|bfowlerau@jimdo.com|91.200.68.196|1995-01-29 06:57:35 +392|Bruce|bandrewsav@ucoz.com|48.12.101.125|1992-10-27 04:31:39 +393|Linda|lromeroaw@usa.gov|100.71.233.19|1992-06-08 15:13:18 +394|Debra|dwatkinsax@ucoz.ru|52.160.233.193|2001-11-11 06:51:01 +395|Katherine|kburkeay@wix.com|151.156.242.141|2010-06-14 19:54:28 +396|Martha|mharrisonaz@youku.com|21.222.10.199|1989-10-16 14:17:55 +397|Dennis|dwellsb0@youtu.be|103.16.29.3|1985-12-21 06:05:51 +398|Gloria|grichardsb1@bloglines.com|90.147.120.234|1982-08-27 01:04:43 +399|Brenda|bfullerb2@t.co|33.253.63.90|2011-04-20 05:00:35 +400|Larry|lhendersonb3@disqus.com|88.95.132.128|1982-08-31 02:15:12 +401|Richard|rlarsonb4@wisc.edu|13.48.231.150|1979-04-15 14:08:09 +402|Terry|thuntb5@usa.gov|65.91.103.240|1998-05-15 11:50:49 +403|Harry|hburnsb6@nasa.gov|33.38.21.244|1981-04-12 14:02:20 +404|Diana|dellisb7@mlb.com|218.229.81.135|1997-01-29 00:17:25 +405|Jack|jburkeb8@tripadvisor.com|210.227.182.216|1984-03-09 17:24:03 +406|Julia|jlongb9@fotki.com|10.210.12.104|2005-10-26 03:54:13 +407|Lois|lscottba@msu.edu|188.79.136.138|1973-02-02 18:40:39 +408|Sandra|shendersonbb@shareasale.com|114.171.220.108|2012-06-09 18:22:26 +409|Irene|isanchezbc@cdbaby.com|109.255.50.119|1983-09-28 21:11:27 +410|Emily|ebrooksbd@bandcamp.com|227.81.93.79|1970-08-31 21:08:01 +411|Michelle|mdiazbe@businessweek.com|236.249.6.226|1993-05-22 08:07:07 +412|Tammy|tbennettbf@wisc.edu|145.253.239.152|1978-12-31 20:24:51 +413|Christine|cgreenebg@flickr.com|97.25.140.118|1978-07-17 12:55:30 +414|Patricia|pgarzabh@tuttocitta.it|139.246.192.211|1984-02-27 13:40:08 +415|Kimberly|kromerobi@aol.com|73.56.88.247|1976-09-16 14:22:04 +416|George|gjohnstonbj@fda.gov|240.36.245.185|1979-07-24 14:36:02 +417|Eugene|efullerbk@sciencedaily.com|42.38.105.140|2012-09-12 01:56:41 +418|Andrea|astevensbl@goo.gl|31.152.207.204|1979-05-24 11:06:21 +419|Shirley|sreidbm@scientificamerican.com|103.60.31.241|1984-02-23 04:07:41 +420|Terry|tmorenobn@blinklist.com|92.161.34.42|1994-06-25 14:01:35 +421|Christopher|cmorenobo@go.com|158.86.176.82|1973-09-05 09:18:47 +422|Dennis|dhansonbp@ning.com|40.160.81.75|1982-01-20 10:19:41 +423|Beverly|brussellbq@de.vu|138.32.56.204|1997-11-06 07:20:19 +424|Howard|hparkerbr@163.com|103.171.134.171|2015-06-24 15:37:10 +425|Helen|hmccoybs@fema.gov|61.200.4.71|1995-06-20 08:59:10 +426|Ann|ahudsonbt@cafepress.com|239.187.71.125|1977-04-11 07:59:28 +427|Tina|twestbu@nhs.uk|80.213.117.74|1992-08-19 05:54:44 +428|Terry|tnguyenbv@noaa.gov|21.93.118.95|1991-09-19 23:22:55 +429|Ashley|aburtonbw@wix.com|233.176.205.109|2009-11-10 05:01:20 +430|Eric|emyersbx@1und1.de|168.91.212.67|1987-08-10 07:16:20 +431|Barbara|blittleby@lycos.com|242.14.189.239|2008-08-02 12:13:04 +432|Sean|sevansbz@instagram.com|14.39.177.13|2007-04-16 17:28:49 +433|Shirley|sburtonc0@newsvine.com|34.107.138.76|1980-12-10 02:19:29 +434|Patricia|pfreemanc1@so-net.ne.jp|219.213.142.117|1987-03-01 02:25:45 +435|Paula|pfosterc2@vkontakte.ru|227.14.138.141|1972-09-22 12:59:34 +436|Nicole|nstewartc3@1688.com|8.164.23.115|1998-10-27 00:10:17 +437|Earl|ekimc4@ovh.net|100.26.244.177|2013-01-22 10:05:46 +438|Beverly|breedc5@reuters.com|174.12.226.27|1974-09-22 07:29:36 +439|Lawrence|lbutlerc6@a8.net|105.164.42.164|1992-06-05 00:43:40 +440|Charles|cmoorec7@ucoz.com|252.197.131.69|1990-04-09 02:34:05 +441|Alice|alawsonc8@live.com|183.73.220.232|1989-02-28 09:11:04 +442|Dorothy|dcarpenterc9@arstechnica.com|241.47.200.14|2005-05-02 19:57:21 +443|Carolyn|cfowlerca@go.com|213.109.55.202|1978-09-10 20:18:20 +444|Anthony|alongcb@free.fr|169.221.158.204|1984-09-13 01:59:23 +445|Annie|amoorecc@e-recht24.de|50.34.148.61|2009-03-26 03:41:07 +446|Carlos|candrewscd@ihg.com|236.69.59.212|1972-03-29 22:42:48 +447|Beverly|bramosce@google.ca|164.250.184.49|1982-11-10 04:34:01 +448|Teresa|tlongcf@umich.edu|174.88.53.223|1987-05-17 12:48:00 +449|Roy|rboydcg@uol.com.br|91.58.243.215|1974-06-16 17:59:54 +450|Ashley|afieldsch@tamu.edu|130.138.11.126|1983-09-15 05:52:36 +451|Judith|jhawkinsci@cmu.edu|200.187.103.245|2003-10-22 12:24:03 +452|Rebecca|rwestcj@ocn.ne.jp|72.85.3.103|1980-11-13 11:01:26 +453|Raymond|rporterck@infoseek.co.jp|146.33.216.151|1982-05-17 23:58:03 +454|Janet|jmarshallcl@odnoklassniki.ru|52.46.193.166|1998-10-04 00:02:21 +455|Shirley|speterscm@salon.com|248.126.31.15|1987-01-30 06:04:59 +456|Annie|abowmancn@economist.com|222.213.248.59|2006-03-14 23:52:59 +457|Jean|jlarsonco@blogspot.com|71.41.25.195|2007-09-08 23:49:45 +458|Phillip|pmoralescp@stanford.edu|74.119.87.28|2011-03-14 20:25:40 +459|Norma|nrobinsoncq@economist.com|28.225.21.54|1989-10-21 01:22:43 +460|Kimberly|kclarkcr@dion.ne.jp|149.171.132.153|2008-06-27 02:27:30 +461|Ruby|rmorriscs@ucla.edu|177.85.163.249|2016-01-28 16:43:44 +462|Jonathan|jcastilloct@tripod.com|78.4.28.77|2000-05-24 17:33:06 +463|Edward|ebryantcu@jigsy.com|140.31.98.193|1992-12-17 08:32:47 +464|Chris|chamiltoncv@eepurl.com|195.171.234.206|1970-12-05 03:42:19 +465|Michael|mweavercw@reference.com|7.233.133.213|1987-03-29 02:30:54 +466|Howard|hlawrencecx@businessweek.com|113.225.124.224|1990-07-30 07:20:57 +467|Philip|phowardcy@comsenz.com|159.170.247.249|2010-10-15 10:18:37 +468|Mary|mmarshallcz@xing.com|125.132.189.70|2007-07-19 13:48:47 +469|Scott|salvarezd0@theguardian.com|78.49.103.230|1987-10-31 06:10:44 +470|Wayne|wcarrolld1@blog.com|238.1.120.204|1980-11-19 03:26:10 +471|Jennifer|jwoodsd2@multiply.com|92.20.224.49|2010-05-06 22:17:04 +472|Raymond|rwelchd3@toplist.cz|176.158.35.240|2007-12-12 19:02:51 +473|Steven|sdixond4@wisc.edu|167.55.237.52|1984-05-05 11:44:37 +474|Ralph|rjamesd5@ameblo.jp|241.190.50.133|2000-07-06 08:44:37 +475|Jason|jrobinsond6@hexun.com|138.119.139.56|2006-02-03 05:27:45 +476|Doris|dwoodd7@fema.gov|180.220.156.190|1978-05-11 20:14:20 +477|Elizabeth|eberryd8@youtu.be|74.188.53.229|2006-11-18 08:29:06 +478|Irene|igilbertd9@privacy.gov.au|194.152.218.1|1985-09-17 02:46:52 +479|Jessica|jdeanda@ameblo.jp|178.103.93.118|1974-06-07 19:04:05 +480|Rachel|ralvarezdb@phoca.cz|17.22.223.174|1999-03-08 02:43:25 +481|Kenneth|kthompsondc@shinystat.com|229.119.91.234|2007-05-15 13:17:32 +482|Harold|hmurraydd@parallels.com|133.26.188.80|1993-11-15 03:42:07 +483|Paula|phowellde@samsung.com|34.215.28.216|1993-11-29 15:55:00 +484|Ruth|rpiercedf@tripadvisor.com|111.30.130.123|1986-08-17 10:19:38 +485|Phyllis|paustindg@vk.com|50.84.34.178|1994-04-13 03:05:24 +486|Laura|lfosterdh@usnews.com|37.8.101.33|2001-06-30 08:58:59 +487|Eric|etaylordi@com.com|103.183.253.45|2006-09-15 20:18:46 +488|Doris|driveradj@prweb.com|247.16.2.199|1989-05-08 09:27:09 +489|Ryan|rhughesdk@elegantthemes.com|103.234.153.232|1989-08-01 18:36:06 +490|Steve|smoralesdl@jigsy.com|3.76.84.207|2011-03-13 17:01:05 +491|Louis|lsullivandm@who.int|78.135.44.208|1975-11-26 16:01:23 +492|Catherine|ctuckerdn@seattletimes.com|93.137.106.21|1990-03-13 16:14:56 +493|Ann|adixondo@gmpg.org|191.136.222.111|2002-06-05 14:22:18 +494|Johnny|jhartdp@amazon.com|103.252.198.39|1988-07-30 23:54:49 +495|Susan|srichardsdq@skype.com|126.247.192.11|2005-01-09 12:08:14 +496|Brenda|bparkerdr@skype.com|63.232.216.86|1974-05-18 05:58:29 +497|Tammy|tmurphyds@constantcontact.com|56.56.37.112|2014-08-05 18:22:25 +498|Larry|lhayesdt@wordpress.com|162.146.13.46|1997-02-26 14:01:53 +499||ethomasdu@hhs.gov|6.241.88.250|2007-09-14 13:03:34 +500|Paula|pshawdv@networksolutions.com|123.27.47.249|2003-10-30 21:19:20 +""".lstrip() + + seeds__expected_sql = """ create table {schema}.seed_expected ( seed_id INTEGER, diff --git a/tests/adapter/dbt/tests/adapter/simple_seed/test_seed.py b/tests/functional/adapter/simple_seed/test_seed.py similarity index 73% rename from tests/adapter/dbt/tests/adapter/simple_seed/test_seed.py rename to tests/functional/adapter/simple_seed/test_seed.py index 40b56e61efb..536ed7ad017 100644 --- a/tests/adapter/dbt/tests/adapter/simple_seed/test_seed.py +++ b/tests/functional/adapter/simple_seed/test_seed.py @@ -1,34 +1,34 @@ import csv -import pytest - from codecs import BOM_UTF8 from pathlib import Path +import pytest + from dbt.tests.util import ( + check_relations_equal, + check_table_does_exist, + check_table_does_not_exist, copy_file, mkdir, + read_file, rm_dir, run_dbt, - read_file, - check_relations_equal, - check_table_does_exist, - check_table_does_not_exist, ) - -from dbt.tests.adapter.simple_seed.fixtures import ( +from tests.functional.adapter.simple_seed.fixtures import ( models__downstream_from_seed_actual, + models__downstream_from_seed_pipe_separated, models__from_basic_seed, ) - -from dbt.tests.adapter.simple_seed.seeds import ( +from tests.functional.adapter.simple_seed.seeds import ( seed__actual_csv, - seeds__expected_sql, - seeds__enabled_in_config_csv, + seed__unicode_csv, + seed__with_dots_csv, seeds__disabled_in_config_csv, + seeds__enabled_in_config_csv, + seeds__expected_sql, + seeds__pipe_separated_csv, seeds__tricky_csv, seeds__wont_parse_csv, - seed__unicode_csv, - seed__with_dots_csv, ) @@ -163,6 +163,84 @@ def test_simple_seed_with_drop_and_schema(self, project): check_relations_equal(project.adapter, [f"{custom_schema}.seed_actual", "seed_expected"]) +class SeedUniqueDelimiterTestBase(SeedConfigBase): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "seeds": {"quote_columns": False, "delimiter": "|"}, + } + + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project): + """Create table for ensuring seeds and models used in tests build correctly""" + project.run_sql(seeds__expected_sql) + + @pytest.fixture(scope="class") + def seeds(self, test_data_dir): + return {"seed_pipe_separated.csv": seeds__pipe_separated_csv} + + @pytest.fixture(scope="class") + def models(self): + return { + "models__downstream_from_seed_pipe_separated.sql": models__downstream_from_seed_pipe_separated, + } + + def _build_relations_for_test(self, project): + """The testing environment needs seeds and models to interact with""" + seed_result = run_dbt(["seed"]) + assert len(seed_result) == 1 + check_relations_equal(project.adapter, ["seed_expected", "seed_pipe_separated"]) + + run_result = run_dbt() + assert len(run_result) == 1 + check_relations_equal( + project.adapter, ["models__downstream_from_seed_pipe_separated", "seed_expected"] + ) + + def _check_relation_end_state(self, run_result, project, exists: bool): + assert len(run_result) == 1 + check_relations_equal(project.adapter, ["seed_pipe_separated", "seed_expected"]) + if exists: + check_table_does_exist(project.adapter, "models__downstream_from_seed_pipe_separated") + else: + check_table_does_not_exist( + project.adapter, "models__downstream_from_seed_pipe_separated" + ) + + +class TestSeedWithUniqueDelimiter(SeedUniqueDelimiterTestBase): + def test_seed_with_unique_delimiter(self, project): + """Testing correct run of seeds with a unique delimiter (pipe in this case)""" + self._build_relations_for_test(project) + self._check_relation_end_state(run_result=run_dbt(["seed"]), project=project, exists=True) + + +class TestSeedWithWrongDelimiter(SeedUniqueDelimiterTestBase): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "seeds": {"quote_columns": False, "delimiter": ";"}, + } + + def test_seed_with_wrong_delimiter(self, project): + """Testing failure of running dbt seed with a wrongly configured delimiter""" + seed_result = run_dbt(["seed"], expect_pass=False) + assert "syntax error" in seed_result.results[0].message.lower() + + +class TestSeedWithEmptyDelimiter(SeedUniqueDelimiterTestBase): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "seeds": {"quote_columns": False, "delimiter": ""}, + } + + def test_seed_with_empty_delimiter(self, project): + """Testing failure of running dbt seed with an empty configured delimiter value""" + seed_result = run_dbt(["seed"], expect_pass=False) + assert "compilation error" in seed_result.results[0].message.lower() + + class TestSimpleSeedEnabledViaConfig(object): @pytest.fixture(scope="session") def seeds(self): @@ -285,3 +363,26 @@ def seeds(self, test_data_dir): def test_simple_seed(self, project): results = run_dbt(["seed"]) assert len(results) == 3 + + +class BaseTestEmptySeed: + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "seeds": { + "quote_columns": False, + }, + } + + @pytest.fixture(scope="class") + def seeds(self): + return {"empty_with_header.csv": "a,b,c"} + + def test_empty_seeds(self, project): + # Should create an empty table and not fail + results = run_dbt(["seed"]) + assert len(results) == 1 + + +class TestEmptySeed(BaseTestEmptySeed): + pass diff --git a/tests/adapter/dbt/tests/adapter/simple_seed/test_seed_type_override.py b/tests/functional/adapter/simple_seed/test_seed_type_override.py similarity index 94% rename from tests/adapter/dbt/tests/adapter/simple_seed/test_seed_type_override.py rename to tests/functional/adapter/simple_seed/test_seed_type_override.py index bd6333e607c..d4142f860bb 100644 --- a/tests/adapter/dbt/tests/adapter/simple_seed/test_seed_type_override.py +++ b/tests/functional/adapter/simple_seed/test_seed_type_override.py @@ -1,15 +1,13 @@ import pytest from dbt.tests.util import run_dbt - -from dbt.tests.adapter.simple_seed.fixtures import ( +from tests.functional.adapter.simple_seed.fixtures import ( macros__schema_test, properties__schema_yml, ) - -from dbt.tests.adapter.simple_seed.seeds import ( - seeds__enabled_in_config_csv, +from tests.functional.adapter.simple_seed.seeds import ( seeds__disabled_in_config_csv, + seeds__enabled_in_config_csv, seeds__tricky_csv, ) diff --git a/tests/functional/adapter/simple_snapshot/__init__.py b/tests/functional/adapter/simple_snapshot/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/simple_snapshot/common.py b/tests/functional/adapter/simple_snapshot/common.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/simple_snapshot/common.py rename to tests/functional/adapter/simple_snapshot/common.py index 726cc292e02..8eafeb90cb4 100644 --- a/tests/adapter/dbt/tests/adapter/simple_snapshot/common.py +++ b/tests/functional/adapter/simple_snapshot/common.py @@ -1,7 +1,7 @@ from typing import Dict, List -from dbt.tests.util import relation_from_name from dbt.tests.fixtures.project import TestProjInfo +from dbt.tests.util import relation_from_name def get_records( diff --git a/tests/adapter/dbt/tests/adapter/simple_snapshot/seeds.py b/tests/functional/adapter/simple_snapshot/seeds.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/simple_snapshot/seeds.py rename to tests/functional/adapter/simple_snapshot/seeds.py diff --git a/tests/adapter/dbt/tests/adapter/simple_snapshot/snapshots.py b/tests/functional/adapter/simple_snapshot/snapshots.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/simple_snapshot/snapshots.py rename to tests/functional/adapter/simple_snapshot/snapshots.py diff --git a/tests/adapter/dbt/tests/adapter/simple_snapshot/test_snapshot.py b/tests/functional/adapter/simple_snapshot/test_snapshot.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/simple_snapshot/test_snapshot.py rename to tests/functional/adapter/simple_snapshot/test_snapshot.py index 75a2e2b3e28..05f08bb3928 100644 --- a/tests/adapter/dbt/tests/adapter/simple_snapshot/test_snapshot.py +++ b/tests/functional/adapter/simple_snapshot/test_snapshot.py @@ -1,10 +1,9 @@ +from typing import Dict, Iterable, List + import pytest -from typing import Dict, List, Iterable from dbt.tests.util import run_dbt - -from dbt.tests.adapter.simple_snapshot import common -from dbt.tests.adapter.simple_snapshot import seeds, snapshots +from tests.functional.adapter.simple_snapshot import common, seeds, snapshots MODEL_FACT_SQL = """ {{ config(materialized="table") }} diff --git a/tests/functional/adapter/store_test_failures_tests/__init__.py b/tests/functional/adapter/store_test_failures_tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional/adapter/store_test_failures_tests/_files.py b/tests/functional/adapter/store_test_failures_tests/_files.py new file mode 100644 index 00000000000..62c4911c8d5 --- /dev/null +++ b/tests/functional/adapter/store_test_failures_tests/_files.py @@ -0,0 +1,150 @@ +SEED__CHIPMUNKS = """ +name,shirt +alvin,red +simon,blue +theodore,green +dave, +""".strip() + + +MODEL__CHIPMUNKS = """ +{{ config(materialized='table') }} +select * +from {{ ref('chipmunks_stage') }} +""" + + +TEST__VIEW_TRUE = """ +{{ config(store_failures_as="view", store_failures=True) }} +select * +from {{ ref('chipmunks') }} +where shirt = 'green' +""" + + +TEST__VIEW_FALSE = """ +{{ config(store_failures_as="view", store_failures=False) }} +select * +from {{ ref('chipmunks') }} +where shirt = 'green' +""" + + +TEST__VIEW_UNSET = """ +{{ config(store_failures_as="view") }} +select * +from {{ ref('chipmunks') }} +where shirt = 'green' +""" + + +TEST__TABLE_TRUE = """ +{{ config(store_failures_as="table", store_failures=True) }} +select * +from {{ ref('chipmunks') }} +where shirt = 'green' +""" + + +TEST__TABLE_FALSE = """ +{{ config(store_failures_as="table", store_failures=False) }} +select * +from {{ ref('chipmunks') }} +where shirt = 'green' +""" + + +TEST__TABLE_UNSET = """ +{{ config(store_failures_as="table") }} +select * +from {{ ref('chipmunks') }} +where shirt = 'green' +""" + + +TEST__EPHEMERAL_TRUE = """ +{{ config(store_failures_as="ephemeral", store_failures=True) }} +select * +from {{ ref('chipmunks') }} +where shirt = 'green' +""" + + +TEST__EPHEMERAL_FALSE = """ +{{ config(store_failures_as="ephemeral", store_failures=False) }} +select * +from {{ ref('chipmunks') }} +where shirt = 'green' +""" + + +TEST__EPHEMERAL_UNSET = """ +{{ config(store_failures_as="ephemeral") }} +select * +from {{ ref('chipmunks') }} +where shirt = 'green' +""" + + +TEST__UNSET_TRUE = """ +{{ config(store_failures=True) }} +select * +from {{ ref('chipmunks') }} +where shirt = 'green' +""" + + +TEST__UNSET_FALSE = """ +{{ config(store_failures=False) }} +select * +from {{ ref('chipmunks') }} +where shirt = 'green' +""" + + +TEST__UNSET_UNSET = """ +select * +from {{ ref('chipmunks') }} +where shirt = 'green' +""" + + +TEST__VIEW_UNSET_PASS = """ +{{ config(store_failures_as="view") }} +select * +from {{ ref('chipmunks') }} +where shirt = 'purple' +""" + + +TEST__ERROR_UNSET = """ +{{ config(store_failures_as="error") }} +select * +from {{ ref('chipmunks') }} +where shirt = 'green' +""" + + +SCHEMA_YML = """ +version: 2 + +models: + - name: chipmunks + columns: + - name: name + data_tests: + - not_null: + store_failures_as: view + - accepted_values: + store_failures: false + store_failures_as: table + values: + - alvin + - simon + - theodore + - name: shirt + data_tests: + - not_null: + store_failures: true + store_failures_as: view +""" diff --git a/tests/functional/adapter/store_test_failures_tests/basic.py b/tests/functional/adapter/store_test_failures_tests/basic.py new file mode 100644 index 00000000000..88196d1f308 --- /dev/null +++ b/tests/functional/adapter/store_test_failures_tests/basic.py @@ -0,0 +1,303 @@ +from collections import namedtuple +from typing import Set + +import pytest + +from dbt.artifacts.schemas.results import TestStatus +from dbt.tests.util import check_relation_types, run_dbt +from tests.functional.adapter.store_test_failures_tests import _files + +TestResult = namedtuple("TestResult", ["name", "status", "type"]) + + +class StoreTestFailuresAsBase: + seed_table: str = "chipmunks_stage" + model_table: str = "chipmunks" + audit_schema_suffix: str = "dbt_test__audit" + + audit_schema: str + + @pytest.fixture(scope="class", autouse=True) + def setup_class(self, project): + # the seed doesn't get touched, load it once + run_dbt(["seed"]) + yield + + @pytest.fixture(scope="function", autouse=True) + def setup_method(self, project, setup_class): + # make sure the model is always right + run_dbt(["run"]) + + # the name of the audit schema doesn't change in a class, but this doesn't run at the class level + self.audit_schema = f"{project.test_schema}_{self.audit_schema_suffix}" + yield + + @pytest.fixture(scope="function", autouse=True) + def teardown_method(self, project): + yield + + # clear out the audit schema after each test case + with project.adapter.connection_named("__test"): + audit_schema = project.adapter.Relation.create( + database=project.database, schema=self.audit_schema + ) + project.adapter.drop_schema(audit_schema) + + @pytest.fixture(scope="class") + def seeds(self): + return {f"{self.seed_table}.csv": _files.SEED__CHIPMUNKS} + + @pytest.fixture(scope="class") + def models(self): + return {f"{self.model_table}.sql": _files.MODEL__CHIPMUNKS} + + def run_and_assert( + self, project, expected_results: Set[TestResult], expect_pass: bool = False + ) -> None: + """ + Run `dbt test` and assert the results are the expected results + + Args: + project: the `project` fixture; needed since we invoke `run_dbt` + expected_results: the expected results of the tests as instances of TestResult + expect_pass: passed directly into `run_dbt`; this is only needed if all expected results are tests that pass + + Returns: + the row count as an integer + """ + # run the tests + results = run_dbt(["test"], expect_pass=expect_pass) + + # show that the statuses are what we expect + actual = {(result.node.name, result.status) for result in results} + expected = {(result.name, result.status) for result in expected_results} + assert actual == expected + + # show that the results are persisted in the correct database objects + check_relation_types( + project.adapter, {result.name: result.type for result in expected_results} + ) + + +class StoreTestFailuresAsInteractions(StoreTestFailuresAsBase): + """ + These scenarios test interactions between `store_failures` and `store_failures_as` at the model level. + Granularity (e.g. setting one at the project level and another at the model level) is not considered. + + Test Scenarios: + + - If `store_failures_as = "view"` and `store_failures = True`, then store the failures in a view. + - If `store_failures_as = "view"` and `store_failures = False`, then store the failures in a view. + - If `store_failures_as = "view"` and `store_failures` is not set, then store the failures in a view. + - If `store_failures_as = "table"` and `store_failures = True`, then store the failures in a table. + - If `store_failures_as = "table"` and `store_failures = False`, then store the failures in a table. + - If `store_failures_as = "table"` and `store_failures` is not set, then store the failures in a table. + - If `store_failures_as = "ephemeral"` and `store_failures = True`, then do not store the failures. + - If `store_failures_as = "ephemeral"` and `store_failures = False`, then do not store the failures. + - If `store_failures_as = "ephemeral"` and `store_failures` is not set, then do not store the failures. + - If `store_failures_as` is not set and `store_failures = True`, then store the failures in a table. + - If `store_failures_as` is not set and `store_failures = False`, then do not store the failures. + - If `store_failures_as` is not set and `store_failures` is not set, then do not store the failures. + """ + + @pytest.fixture(scope="class") + def tests(self): + return { + "view_unset_pass.sql": _files.TEST__VIEW_UNSET_PASS, # control + "view_true.sql": _files.TEST__VIEW_TRUE, + "view_false.sql": _files.TEST__VIEW_FALSE, + "view_unset.sql": _files.TEST__VIEW_UNSET, + "table_true.sql": _files.TEST__TABLE_TRUE, + "table_false.sql": _files.TEST__TABLE_FALSE, + "table_unset.sql": _files.TEST__TABLE_UNSET, + "ephemeral_true.sql": _files.TEST__EPHEMERAL_TRUE, + "ephemeral_false.sql": _files.TEST__EPHEMERAL_FALSE, + "ephemeral_unset.sql": _files.TEST__EPHEMERAL_UNSET, + "unset_true.sql": _files.TEST__UNSET_TRUE, + "unset_false.sql": _files.TEST__UNSET_FALSE, + "unset_unset.sql": _files.TEST__UNSET_UNSET, + } + + def test_tests_run_successfully_and_are_stored_as_expected(self, project): + expected_results = { + TestResult("view_unset_pass", TestStatus.Pass, "view"), # control + TestResult("view_true", TestStatus.Fail, "view"), + TestResult("view_false", TestStatus.Fail, "view"), + TestResult("view_unset", TestStatus.Fail, "view"), + TestResult("table_true", TestStatus.Fail, "table"), + TestResult("table_false", TestStatus.Fail, "table"), + TestResult("table_unset", TestStatus.Fail, "table"), + TestResult("ephemeral_true", TestStatus.Fail, None), + TestResult("ephemeral_false", TestStatus.Fail, None), + TestResult("ephemeral_unset", TestStatus.Fail, None), + TestResult("unset_true", TestStatus.Fail, "table"), + TestResult("unset_false", TestStatus.Fail, None), + TestResult("unset_unset", TestStatus.Fail, None), + } + self.run_and_assert(project, expected_results) + + +class StoreTestFailuresAsProjectLevelOff(StoreTestFailuresAsBase): + """ + These scenarios test that `store_failures_as` at the model level takes precedence over `store_failures` + at the project level. + + Test Scenarios: + + - If `store_failures = False` in the project and `store_failures_as = "view"` in the model, + then store the failures in a view. + - If `store_failures = False` in the project and `store_failures_as = "table"` in the model, + then store the failures in a table. + - If `store_failures = False` in the project and `store_failures_as = "ephemeral"` in the model, + then do not store the failures. + - If `store_failures = False` in the project and `store_failures_as` is not set, + then do not store the failures. + """ + + @pytest.fixture(scope="class") + def tests(self): + return { + "results_view.sql": _files.TEST__VIEW_UNSET, + "results_table.sql": _files.TEST__TABLE_UNSET, + "results_ephemeral.sql": _files.TEST__EPHEMERAL_UNSET, + "results_unset.sql": _files.TEST__UNSET_UNSET, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"data_tests": {"store_failures": False}} + + def test_tests_run_successfully_and_are_stored_as_expected(self, project): + expected_results = { + TestResult("results_view", TestStatus.Fail, "view"), + TestResult("results_table", TestStatus.Fail, "table"), + TestResult("results_ephemeral", TestStatus.Fail, None), + TestResult("results_unset", TestStatus.Fail, None), + } + self.run_and_assert(project, expected_results) + + +class StoreTestFailuresAsProjectLevelView(StoreTestFailuresAsBase): + """ + These scenarios test that `store_failures_as` at the project level takes precedence over `store_failures` + at the model level. + + Test Scenarios: + + - If `store_failures_as = "view"` in the project and `store_failures = False` in the model, + then store the failures in a view. + - If `store_failures_as = "view"` in the project and `store_failures = True` in the model, + then store the failures in a view. + - If `store_failures_as = "view"` in the project and `store_failures` is not set, + then store the failures in a view. + """ + + @pytest.fixture(scope="class") + def tests(self): + return { + "results_true.sql": _files.TEST__VIEW_TRUE, + "results_false.sql": _files.TEST__VIEW_FALSE, + "results_unset.sql": _files.TEST__VIEW_UNSET, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"data_tests": {"store_failures_as": "view"}} + + def test_tests_run_successfully_and_are_stored_as_expected(self, project): + expected_results = { + TestResult("results_true", TestStatus.Fail, "view"), + TestResult("results_false", TestStatus.Fail, "view"), + TestResult("results_unset", TestStatus.Fail, "view"), + } + self.run_and_assert(project, expected_results) + + +class StoreTestFailuresAsProjectLevelEphemeral(StoreTestFailuresAsBase): + """ + This scenario tests that `store_failures_as` at the project level takes precedence over `store_failures` + at the model level. In particular, setting `store_failures_as = "ephemeral"` at the project level + turns off `store_failures` regardless of the setting of `store_failures` anywhere. Turning `store_failures` + back on at the model level requires `store_failures_as` to be set at the model level. + + Test Scenarios: + + - If `store_failures_as = "ephemeral"` in the project and `store_failures = True` in the project, + then do not store the failures. + - If `store_failures_as = "ephemeral"` in the project and `store_failures = True` in the project and the model, + then do not store the failures. + - If `store_failures_as = "ephemeral"` in the project and `store_failures_as = "view"` in the model, + then store the failures in a view. + """ + + @pytest.fixture(scope="class") + def tests(self): + return { + "results_unset.sql": _files.TEST__UNSET_UNSET, + "results_true.sql": _files.TEST__UNSET_TRUE, + "results_view.sql": _files.TEST__VIEW_UNSET, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"data_tests": {"store_failures_as": "ephemeral", "store_failures": True}} + + def test_tests_run_successfully_and_are_stored_as_expected(self, project): + expected_results = { + TestResult("results_unset", TestStatus.Fail, None), + TestResult("results_true", TestStatus.Fail, None), + TestResult("results_view", TestStatus.Fail, "view"), + } + self.run_and_assert(project, expected_results) + + +class StoreTestFailuresAsGeneric(StoreTestFailuresAsBase): + """ + This tests that `store_failures_as` works with generic tests. + Test Scenarios: + + - If `store_failures_as = "view"` is used with the `not_null` test in the model, then store the failures in a view. + """ + + @pytest.fixture(scope="class") + def models(self): + return { + f"{self.model_table}.sql": _files.MODEL__CHIPMUNKS, + "schema.yml": _files.SCHEMA_YML, + } + + def test_tests_run_successfully_and_are_stored_as_expected(self, project): + expected_results = { + # `store_failures` unset, `store_failures_as = "view"` + TestResult("not_null_chipmunks_name", TestStatus.Pass, "view"), + # `store_failures = False`, `store_failures_as = "table"` + TestResult( + "accepted_values_chipmunks_name__alvin__simon__theodore", TestStatus.Fail, "table" + ), + # `store_failures = True`, `store_failures_as = "view"` + TestResult("not_null_chipmunks_shirt", TestStatus.Fail, "view"), + } + self.run_and_assert(project, expected_results) + + +class StoreTestFailuresAsExceptions(StoreTestFailuresAsBase): + """ + This tests that `store_failures_as` raises exceptions in appropriate scenarios. + Test Scenarios: + + - If `store_failures_as = "error"`, a helpful exception is raised. + """ + + @pytest.fixture(scope="class") + def tests(self): + return { + "store_failures_as_error.sql": _files.TEST__ERROR_UNSET, + } + + def test_tests_run_unsuccessfully_and_raise_appropriate_exception(self, project): + results = run_dbt(["test"], expect_pass=False) + assert len(results) == 1 + result = results[0] + assert "Compilation Error" in result.message + assert "'error' is not a valid value" in result.message + assert "Accepted values are: ['ephemeral', 'table', 'view']" in result.message diff --git a/tests/adapter/dbt/tests/adapter/store_test_failures_tests/fixtures.py b/tests/functional/adapter/store_test_failures_tests/fixtures.py similarity index 97% rename from tests/adapter/dbt/tests/adapter/store_test_failures_tests/fixtures.py rename to tests/functional/adapter/store_test_failures_tests/fixtures.py index dae8530135e..b1b9f602665 100644 --- a/tests/adapter/dbt/tests/adapter/store_test_failures_tests/fixtures.py +++ b/tests/functional/adapter/store_test_failures_tests/fixtures.py @@ -53,19 +53,19 @@ - name: fine_model columns: - name: id - tests: + data_tests: - unique - not_null - name: problematic_model columns: - name: id - tests: + data_tests: - unique: store_failures: true - not_null - name: first_name - tests: + data_tests: # test truncation of really long test name - accepted_values: values: @@ -83,7 +83,7 @@ - name: fine_model_but_with_a_no_good_very_long_name columns: - name: quite_long_column_name - tests: + data_tests: # test truncation of really long test name with builtin - unique """ diff --git a/tests/adapter/dbt/tests/adapter/store_test_failures_tests/test_store_test_failures.py b/tests/functional/adapter/store_test_failures_tests/test_store_test_failures.py similarity index 96% rename from tests/adapter/dbt/tests/adapter/store_test_failures_tests/test_store_test_failures.py rename to tests/functional/adapter/store_test_failures_tests/test_store_test_failures.py index 4074ffd7965..40546cea9e9 100644 --- a/tests/adapter/dbt/tests/adapter/store_test_failures_tests/test_store_test_failures.py +++ b/tests/functional/adapter/store_test_failures_tests/test_store_test_failures.py @@ -1,20 +1,16 @@ import pytest -from dbt.tests.util import ( - check_relations_equal, - run_dbt, -) - -from dbt.tests.adapter.store_test_failures_tests.fixtures import ( - seeds__people, +from dbt.tests.util import check_relations_equal, run_dbt +from tests.functional.adapter.store_test_failures_tests.fixtures import ( + models__file_model_but_with_a_no_good_very_long_name, + models__fine_model, + models__problematic_model, + properties__schema_yml, seeds__expected_accepted_values, seeds__expected_failing_test, seeds__expected_not_null_problematic_model_id, seeds__expected_unique_problematic_model_id, - properties__schema_yml, - models__problematic_model, - models__fine_model, - models__file_model_but_with_a_no_good_very_long_name, + seeds__people, tests__failing_test, tests__passing_test, ) @@ -68,7 +64,7 @@ def project_config_update(self): "quote_columns": False, "test": self.column_type_overrides(), }, - "tests": {"+schema": TEST_AUDIT_SCHEMA_SUFFIX}, + "data_tests": {"+schema": TEST_AUDIT_SCHEMA_SUFFIX}, } def column_type_overrides(self): diff --git a/tests/functional/adapter/unit_testing/__init__.py b/tests/functional/adapter/unit_testing/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional/adapter/unit_testing/test_unit_testing.py b/tests/functional/adapter/unit_testing/test_unit_testing.py new file mode 100644 index 00000000000..c571b74480a --- /dev/null +++ b/tests/functional/adapter/unit_testing/test_unit_testing.py @@ -0,0 +1,69 @@ +import pytest + +from dbt.adapters.postgres.relation_configs import MAX_CHARACTERS_IN_IDENTIFIER +from dbt.tests.util import run_dbt, write_file + +my_model_a_sql = """ +SELECT +1 as a, +1 as id, +2 as not_testing, +'a' as string_a, +DATE '2020-01-02' as date_a +""" + +test_model_a_long_test_name_yml = """ +unit_tests: + - name: {test_name} + model: my_model_a + given: [] + expect: + rows: + - {{a: 1, id: 1, not_testing: 2, string_a: "a", date_a: "2020-01-02"}} +""" + + +class BaseUnitTestLongTestName: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model_a.sql": my_model_a_sql, + "test_model_a.yml": test_model_a_long_test_name_yml, + } + + @pytest.fixture + def max_unit_test_name_length(self) -> int: + return -1 + + def test_long_unit_test_name(self, project, max_unit_test_name_length): + # max test name == passing unit test + write_file( + test_model_a_long_test_name_yml.format(test_name="a" * max_unit_test_name_length), + "models", + "test_model_a.yml", + ) + results = run_dbt(["run"]) + assert len(results) == 1 + + results = run_dbt(["test"], expect_pass=True) + assert len(results) == 1 + + # max test name == failing command + write_file( + test_model_a_long_test_name_yml.format( + test_name="a" * (max_unit_test_name_length + 1) + ), + "models", + "test_model_a.yml", + ) + + results = run_dbt(["run"]) + assert len(results) == 1 + + run_dbt(["test"], expect_pass=False) + + +class TestPostgresUnitTestLongTestNames(BaseUnitTestLongTestName): + @pytest.fixture + def max_unit_test_name_length(self) -> int: + return MAX_CHARACTERS_IN_IDENTIFIER diff --git a/tests/functional/adapter/utils/__init__.py b/tests/functional/adapter/utils/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/utils/base_array_utils.py b/tests/functional/adapter/utils/base_array_utils.py similarity index 83% rename from tests/adapter/dbt/tests/adapter/utils/base_array_utils.py rename to tests/functional/adapter/utils/base_array_utils.py index 64147a7bd8b..3abc2ffce64 100644 --- a/tests/adapter/dbt/tests/adapter/utils/base_array_utils.py +++ b/tests/functional/adapter/utils/base_array_utils.py @@ -1,5 +1,5 @@ -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.util import run_dbt, check_relations_equal, get_relation_columns +from dbt.tests.util import check_relations_equal, get_relation_columns, run_dbt +from tests.functional.adapter.utils.base_utils import BaseUtils class BaseArrayUtils(BaseUtils): diff --git a/tests/adapter/dbt/tests/adapter/utils/base_utils.py b/tests/functional/adapter/utils/base_utils.py similarity index 99% rename from tests/adapter/dbt/tests/adapter/utils/base_utils.py rename to tests/functional/adapter/utils/base_utils.py index 622b4ab4224..75672e70090 100644 --- a/tests/adapter/dbt/tests/adapter/utils/base_utils.py +++ b/tests/functional/adapter/utils/base_utils.py @@ -1,4 +1,5 @@ import pytest + from dbt.tests.util import run_dbt macros__equals_sql = """ diff --git a/tests/functional/adapter/utils/data_types/__init__.py b/tests/functional/adapter/utils/data_types/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/adapter/dbt/tests/adapter/utils/data_types/base_data_type_macro.py b/tests/functional/adapter/utils/data_types/base_data_type_macro.py similarity index 93% rename from tests/adapter/dbt/tests/adapter/utils/data_types/base_data_type_macro.py rename to tests/functional/adapter/utils/data_types/base_data_type_macro.py index 07eb07d8911..b5fe690ff56 100644 --- a/tests/adapter/dbt/tests/adapter/utils/data_types/base_data_type_macro.py +++ b/tests/functional/adapter/utils/data_types/base_data_type_macro.py @@ -1,4 +1,4 @@ -from dbt.tests.util import run_dbt, check_relations_equal, get_relation_columns +from dbt.tests.util import check_relations_equal, get_relation_columns, run_dbt class BaseDataTypeMacro: diff --git a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_bigint.py b/tests/functional/adapter/utils/data_types/test_type_bigint.py similarity index 83% rename from tests/adapter/dbt/tests/adapter/utils/data_types/test_type_bigint.py rename to tests/functional/adapter/utils/data_types/test_type_bigint.py index 80e3daa11d8..946f8b888c8 100644 --- a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_bigint.py +++ b/tests/functional/adapter/utils/data_types/test_type_bigint.py @@ -1,5 +1,8 @@ import pytest -from dbt.tests.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro + +from tests.functional.adapter.utils.data_types.base_data_type_macro import ( + BaseDataTypeMacro, +) models__expected_sql = """ select 9223372036854775800 as bigint_col diff --git a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_boolean.py b/tests/functional/adapter/utils/data_types/test_type_boolean.py similarity index 83% rename from tests/adapter/dbt/tests/adapter/utils/data_types/test_type_boolean.py rename to tests/functional/adapter/utils/data_types/test_type_boolean.py index 3efd7874236..db2fcd5459a 100644 --- a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_boolean.py +++ b/tests/functional/adapter/utils/data_types/test_type_boolean.py @@ -1,5 +1,8 @@ import pytest -from dbt.tests.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro + +from tests.functional.adapter.utils.data_types.base_data_type_macro import ( + BaseDataTypeMacro, +) seeds__expected_csv = """boolean_col True diff --git a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_float.py b/tests/functional/adapter/utils/data_types/test_type_float.py similarity index 83% rename from tests/adapter/dbt/tests/adapter/utils/data_types/test_type_float.py rename to tests/functional/adapter/utils/data_types/test_type_float.py index 03231d04266..e3c75b6e7b8 100644 --- a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_float.py +++ b/tests/functional/adapter/utils/data_types/test_type_float.py @@ -1,5 +1,8 @@ import pytest -from dbt.tests.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro + +from tests.functional.adapter.utils.data_types.base_data_type_macro import ( + BaseDataTypeMacro, +) seeds__expected_csv = """float_col 1.2345 diff --git a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_int.py b/tests/functional/adapter/utils/data_types/test_type_int.py similarity index 82% rename from tests/adapter/dbt/tests/adapter/utils/data_types/test_type_int.py rename to tests/functional/adapter/utils/data_types/test_type_int.py index 3721de4d217..9836730da95 100644 --- a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_int.py +++ b/tests/functional/adapter/utils/data_types/test_type_int.py @@ -1,5 +1,8 @@ import pytest -from dbt.tests.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro + +from tests.functional.adapter.utils.data_types.base_data_type_macro import ( + BaseDataTypeMacro, +) seeds__expected_csv = """int_col 12345678 diff --git a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_numeric.py b/tests/functional/adapter/utils/data_types/test_type_numeric.py similarity index 89% rename from tests/adapter/dbt/tests/adapter/utils/data_types/test_type_numeric.py rename to tests/functional/adapter/utils/data_types/test_type_numeric.py index 031f1474577..fd8fef883fc 100644 --- a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_numeric.py +++ b/tests/functional/adapter/utils/data_types/test_type_numeric.py @@ -1,5 +1,8 @@ import pytest -from dbt.tests.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro + +from tests.functional.adapter.utils.data_types.base_data_type_macro import ( + BaseDataTypeMacro, +) seeds__expected_csv = """numeric_col 1.2345 diff --git a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_string.py b/tests/functional/adapter/utils/data_types/test_type_string.py similarity index 93% rename from tests/adapter/dbt/tests/adapter/utils/data_types/test_type_string.py rename to tests/functional/adapter/utils/data_types/test_type_string.py index 05e7b598cb0..1b39a038a1f 100644 --- a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_string.py +++ b/tests/functional/adapter/utils/data_types/test_type_string.py @@ -1,5 +1,8 @@ import pytest -from dbt.tests.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro + +from tests.functional.adapter.utils.data_types.base_data_type_macro import ( + BaseDataTypeMacro, +) seeds__expected_csv = """string_col "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum." diff --git a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_timestamp.py b/tests/functional/adapter/utils/data_types/test_type_timestamp.py similarity index 90% rename from tests/adapter/dbt/tests/adapter/utils/data_types/test_type_timestamp.py rename to tests/functional/adapter/utils/data_types/test_type_timestamp.py index 2c8d68999e3..d8ebef3fbd7 100644 --- a/tests/adapter/dbt/tests/adapter/utils/data_types/test_type_timestamp.py +++ b/tests/functional/adapter/utils/data_types/test_type_timestamp.py @@ -1,5 +1,8 @@ import pytest -from dbt.tests.adapter.utils.data_types.base_data_type_macro import BaseDataTypeMacro + +from tests.functional.adapter.utils.data_types.base_data_type_macro import ( + BaseDataTypeMacro, +) seeds__expected_csv = """timestamp_col 2021-01-01 01:01:01 diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_any_value.py b/tests/functional/adapter/utils/fixture_any_value.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/utils/fixture_any_value.py rename to tests/functional/adapter/utils/fixture_any_value.py index ba328d0e2d7..24dd395655e 100644 --- a/tests/adapter/dbt/tests/adapter/utils/fixture_any_value.py +++ b/tests/functional/adapter/utils/fixture_any_value.py @@ -53,7 +53,7 @@ version: 2 models: - name: test_any_value - tests: + data_tests: - assert_equal: actual: actual expected: expected diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_array_append.py b/tests/functional/adapter/utils/fixture_array_append.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_array_append.py rename to tests/functional/adapter/utils/fixture_array_append.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_array_concat.py b/tests/functional/adapter/utils/fixture_array_concat.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_array_concat.py rename to tests/functional/adapter/utils/fixture_array_concat.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_array_construct.py b/tests/functional/adapter/utils/fixture_array_construct.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_array_construct.py rename to tests/functional/adapter/utils/fixture_array_construct.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_bool_or.py b/tests/functional/adapter/utils/fixture_bool_or.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/utils/fixture_bool_or.py rename to tests/functional/adapter/utils/fixture_bool_or.py index 8ae4d1e6cfd..08e9c44f110 100644 --- a/tests/adapter/dbt/tests/adapter/utils/fixture_bool_or.py +++ b/tests/functional/adapter/utils/fixture_bool_or.py @@ -55,7 +55,7 @@ version: 2 models: - name: test_bool_or - tests: + data_tests: - assert_equal: actual: actual expected: expected diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_cast_bool_to_text.py b/tests/functional/adapter/utils/fixture_cast_bool_to_text.py similarity index 96% rename from tests/adapter/dbt/tests/adapter/utils/fixture_cast_bool_to_text.py rename to tests/functional/adapter/utils/fixture_cast_bool_to_text.py index 54795ef21aa..3fc20be6900 100644 --- a/tests/adapter/dbt/tests/adapter/utils/fixture_cast_bool_to_text.py +++ b/tests/functional/adapter/utils/fixture_cast_bool_to_text.py @@ -22,7 +22,7 @@ version: 2 models: - name: test_cast_bool_to_text - tests: + data_tests: - assert_equal: actual: actual expected: expected diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_concat.py b/tests/functional/adapter/utils/fixture_concat.py similarity index 97% rename from tests/adapter/dbt/tests/adapter/utils/fixture_concat.py rename to tests/functional/adapter/utils/fixture_concat.py index 8421d53eb66..04fd9958892 100644 --- a/tests/adapter/dbt/tests/adapter/utils/fixture_concat.py +++ b/tests/functional/adapter/utils/fixture_concat.py @@ -38,7 +38,7 @@ version: 2 models: - name: test_concat - tests: + data_tests: - assert_equal: actual: actual expected: expected diff --git a/tests/functional/adapter/utils/fixture_date_spine.py b/tests/functional/adapter/utils/fixture_date_spine.py new file mode 100644 index 00000000000..de01c865fdd --- /dev/null +++ b/tests/functional/adapter/utils/fixture_date_spine.py @@ -0,0 +1,92 @@ +# If date_spine works properly, there should be no `null` values in the resulting model + +models__test_date_spine_sql = """ +with generated_dates as ( + {% if target.type == 'postgres' %} + {{ date_spine("day", "'2023-09-01'::date", "'2023-09-10'::date") }} + + {% elif target.type == 'bigquery' or target.type == 'redshift' %} + select cast(date_day as date) as date_day + from ({{ date_spine("day", "'2023-09-01'", "'2023-09-10'") }}) + + {% else %} + {{ date_spine("day", "'2023-09-01'", "'2023-09-10'") }} + {% endif %} +), expected_dates as ( + {% if target.type == 'postgres' %} + select '2023-09-01'::date as expected + union all + select '2023-09-02'::date as expected + union all + select '2023-09-03'::date as expected + union all + select '2023-09-04'::date as expected + union all + select '2023-09-05'::date as expected + union all + select '2023-09-06'::date as expected + union all + select '2023-09-07'::date as expected + union all + select '2023-09-08'::date as expected + union all + select '2023-09-09'::date as expected + + {% elif target.type == 'bigquery' or target.type == 'redshift' %} + select cast('2023-09-01' as date) as expected + union all + select cast('2023-09-02' as date) as expected + union all + select cast('2023-09-03' as date) as expected + union all + select cast('2023-09-04' as date) as expected + union all + select cast('2023-09-05' as date) as expected + union all + select cast('2023-09-06' as date) as expected + union all + select cast('2023-09-07' as date) as expected + union all + select cast('2023-09-08' as date) as expected + union all + select cast('2023-09-09' as date) as expected + + {% else %} + select '2023-09-01' as expected + union all + select '2023-09-02' as expected + union all + select '2023-09-03' as expected + union all + select '2023-09-04' as expected + union all + select '2023-09-05' as expected + union all + select '2023-09-06' as expected + union all + select '2023-09-07' as expected + union all + select '2023-09-08' as expected + union all + select '2023-09-09' as expected + {% endif %} +), joined as ( + select + generated_dates.date_day, + expected_dates.expected + from generated_dates + left join expected_dates on generated_dates.date_day = expected_dates.expected +) + +SELECT * from joined +""" + +models__test_date_spine_yml = """ +version: 2 +models: + - name: test_date_spine + data_tests: + - assert_equal: + actual: date_day + expected: expected +""" diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_date_trunc.py b/tests/functional/adapter/utils/fixture_date_trunc.py similarity index 97% rename from tests/adapter/dbt/tests/adapter/utils/fixture_date_trunc.py rename to tests/functional/adapter/utils/fixture_date_trunc.py index 10c0e68cad7..5d45c7c27e8 100644 --- a/tests/adapter/dbt/tests/adapter/utils/fixture_date_trunc.py +++ b/tests/functional/adapter/utils/fixture_date_trunc.py @@ -33,7 +33,7 @@ version: 2 models: - name: test_date_trunc - tests: + data_tests: - assert_equal: actual: actual expected: expected diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_dateadd.py b/tests/functional/adapter/utils/fixture_dateadd.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/utils/fixture_dateadd.py rename to tests/functional/adapter/utils/fixture_dateadd.py index ab916b673cb..eb1516396b3 100644 --- a/tests/adapter/dbt/tests/adapter/utils/fixture_dateadd.py +++ b/tests/functional/adapter/utils/fixture_dateadd.py @@ -33,7 +33,7 @@ version: 2 models: - name: test_dateadd - tests: + data_tests: - assert_equal: actual: actual expected: expected diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_datediff.py b/tests/functional/adapter/utils/fixture_datediff.py similarity index 99% rename from tests/adapter/dbt/tests/adapter/utils/fixture_datediff.py rename to tests/functional/adapter/utils/fixture_datediff.py index 75ec8149f52..7872ee76a58 100644 --- a/tests/adapter/dbt/tests/adapter/utils/fixture_datediff.py +++ b/tests/functional/adapter/utils/fixture_datediff.py @@ -58,7 +58,7 @@ version: 2 models: - name: test_datediff - tests: + data_tests: - assert_equal: actual: actual expected: expected diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_equals.py b/tests/functional/adapter/utils/fixture_equals.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_equals.py rename to tests/functional/adapter/utils/fixture_equals.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_escape_single_quotes.py b/tests/functional/adapter/utils/fixture_escape_single_quotes.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/utils/fixture_escape_single_quotes.py rename to tests/functional/adapter/utils/fixture_escape_single_quotes.py index aeaaaa44193..d17a9908543 100644 --- a/tests/adapter/dbt/tests/adapter/utils/fixture_escape_single_quotes.py +++ b/tests/functional/adapter/utils/fixture_escape_single_quotes.py @@ -39,7 +39,7 @@ version: 2 models: - name: test_escape_single_quotes - tests: + data_tests: - assert_equal: actual: actual expected: expected diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_except.py b/tests/functional/adapter/utils/fixture_except.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_except.py rename to tests/functional/adapter/utils/fixture_except.py diff --git a/tests/functional/adapter/utils/fixture_generate_series.py b/tests/functional/adapter/utils/fixture_generate_series.py new file mode 100644 index 00000000000..fa8fa9e4396 --- /dev/null +++ b/tests/functional/adapter/utils/fixture_generate_series.py @@ -0,0 +1,45 @@ +# If generate_series works properly, there should be no `null` values in the resulting model + +models__test_generate_series_sql = """ +with generated_numbers as ( + {{ dbt.generate_series(10) }} +), expected_numbers as ( + select 1 as expected + union all + select 2 as expected + union all + select 3 as expected + union all + select 4 as expected + union all + select 5 as expected + union all + select 6 as expected + union all + select 7 as expected + union all + select 8 as expected + union all + select 9 as expected + union all + select 10 as expected +), joined as ( + select + generated_numbers.generated_number, + expected_numbers.expected + from generated_numbers + left join expected_numbers on generated_numbers.generated_number = expected_numbers.expected +) + +SELECT * from joined +""" + +models__test_generate_series_yml = """ +version: 2 +models: + - name: test_generate_series + data_tests: + - assert_equal: + actual: generated_number + expected: expected +""" diff --git a/tests/functional/adapter/utils/fixture_get_intervals_between.py b/tests/functional/adapter/utils/fixture_get_intervals_between.py new file mode 100644 index 00000000000..0cb49f66348 --- /dev/null +++ b/tests/functional/adapter/utils/fixture_get_intervals_between.py @@ -0,0 +1,20 @@ +models__test_get_intervals_between_sql = """ +SELECT + {% if target.type == 'postgres' %} + {{ get_intervals_between("'09/01/2023'::date", "'09/12/2023'::date", "day") }} as intervals, + {% else %} + {{ get_intervals_between("'09/01/2023'", "'09/12/2023'", "day") }} as intervals, + {% endif %} + 11 as expected + +""" + +models__test_get_intervals_between_yml = """ +version: 2 +models: + - name: test_get_intervals_between + data_tests: + - assert_equal: + actual: intervals + expected: expected +""" diff --git a/tests/functional/adapter/utils/fixture_get_powers_of_two.py b/tests/functional/adapter/utils/fixture_get_powers_of_two.py new file mode 100644 index 00000000000..b7ad2cb9c30 --- /dev/null +++ b/tests/functional/adapter/utils/fixture_get_powers_of_two.py @@ -0,0 +1,39 @@ +# get_powers_of_two + +models__test_get_powers_of_two_sql = """ +select {{ get_powers_of_two(1) }} as actual, 1 as expected + +union all + +select {{ get_powers_of_two(4) }} as actual, 2 as expected + +union all + +select {{ get_powers_of_two(27) }} as actual, 5 as expected + +union all + +select {{ get_powers_of_two(256) }} as actual, 8 as expected + +union all + +select {{ get_powers_of_two(3125) }} as actual, 12 as expected + +union all + +select {{ get_powers_of_two(46656) }} as actual, 16 as expected + +union all + +select {{ get_powers_of_two(823543) }} as actual, 20 as expected +""" + +models__test_get_powers_of_two_yml = """ +version: 2 +models: + - name: test_powers_of_two + data_tests: + - assert_equal: + actual: actual + expected: expected +""" diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_hash.py b/tests/functional/adapter/utils/fixture_hash.py similarity index 97% rename from tests/adapter/dbt/tests/adapter/utils/fixture_hash.py rename to tests/functional/adapter/utils/fixture_hash.py index 91f366fc504..ffdc57c787d 100644 --- a/tests/adapter/dbt/tests/adapter/utils/fixture_hash.py +++ b/tests/functional/adapter/utils/fixture_hash.py @@ -37,7 +37,7 @@ version: 2 models: - name: test_hash - tests: + data_tests: - assert_equal: actual: actual expected: expected diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_intersect.py b/tests/functional/adapter/utils/fixture_intersect.py similarity index 100% rename from tests/adapter/dbt/tests/adapter/utils/fixture_intersect.py rename to tests/functional/adapter/utils/fixture_intersect.py diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_last_day.py b/tests/functional/adapter/utils/fixture_last_day.py similarity index 97% rename from tests/adapter/dbt/tests/adapter/utils/fixture_last_day.py rename to tests/functional/adapter/utils/fixture_last_day.py index 8a938358fad..13d54dd53cd 100644 --- a/tests/adapter/dbt/tests/adapter/utils/fixture_last_day.py +++ b/tests/functional/adapter/utils/fixture_last_day.py @@ -32,7 +32,7 @@ version: 2 models: - name: test_last_day - tests: + data_tests: - assert_equal: actual: actual expected: expected diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_length.py b/tests/functional/adapter/utils/fixture_length.py similarity index 96% rename from tests/adapter/dbt/tests/adapter/utils/fixture_length.py rename to tests/functional/adapter/utils/fixture_length.py index 9ecf5308e20..ad7877826a3 100644 --- a/tests/adapter/dbt/tests/adapter/utils/fixture_length.py +++ b/tests/functional/adapter/utils/fixture_length.py @@ -28,7 +28,7 @@ version: 2 models: - name: test_length - tests: + data_tests: - assert_equal: actual: actual expected: expected diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_listagg.py b/tests/functional/adapter/utils/fixture_listagg.py similarity index 77% rename from tests/adapter/dbt/tests/adapter/utils/fixture_listagg.py rename to tests/functional/adapter/utils/fixture_listagg.py index 50b770670f5..7c9101cf5b5 100644 --- a/tests/adapter/dbt/tests/adapter/utils/fixture_listagg.py +++ b/tests/functional/adapter/utils/fixture_listagg.py @@ -23,6 +23,12 @@ 3,"g, g, g",comma_whitespace_unordered 3,"g",distinct_comma 3,"g,g,g",no_params +1,"c_|_b_|_a",top_ordered +2,"p_|_a_|_1",top_ordered +3,"g_|_g_|_g",top_ordered +1,"c_|_b",top_ordered_limited +2,"p_|_a",top_ordered_limited +3,"g_|_g",top_ordered_limited """ @@ -87,6 +93,24 @@ where group_col = 3 group by group_col + union all + + select + group_col, + {{ listagg('string_text', "'_|_'", "order by order_col desc") }} as actual, + 'top_ordered' as version + from data + group by group_col + + union all + + select + group_col, + {{ listagg('string_text', "'_|_'", "order by order_col desc", 2) }} as actual, + 'top_ordered_limited' as version + from data + group by group_col + ) select @@ -103,7 +127,7 @@ version: 2 models: - name: test_listagg - tests: + data_tests: - assert_equal: actual: actual expected: expected diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_null_compare.py b/tests/functional/adapter/utils/fixture_null_compare.py similarity index 94% rename from tests/adapter/dbt/tests/adapter/utils/fixture_null_compare.py rename to tests/functional/adapter/utils/fixture_null_compare.py index 9af2f9a2e32..b79cf21d543 100644 --- a/tests/adapter/dbt/tests/adapter/utils/fixture_null_compare.py +++ b/tests/functional/adapter/utils/fixture_null_compare.py @@ -9,7 +9,7 @@ version: 2 models: - name: test_mixed_null_compare - tests: + data_tests: - assert_equal: actual: actual expected: expected @@ -27,7 +27,7 @@ version: 2 models: - name: test_null_compare - tests: + data_tests: - assert_equal: actual: actual expected: expected diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_position.py b/tests/functional/adapter/utils/fixture_position.py similarity index 96% rename from tests/adapter/dbt/tests/adapter/utils/fixture_position.py rename to tests/functional/adapter/utils/fixture_position.py index 701a92d5536..1bb59a91857 100644 --- a/tests/adapter/dbt/tests/adapter/utils/fixture_position.py +++ b/tests/functional/adapter/utils/fixture_position.py @@ -28,7 +28,7 @@ version: 2 models: - name: test_position - tests: + data_tests: - assert_equal: actual: actual expected: expected diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_replace.py b/tests/functional/adapter/utils/fixture_replace.py similarity index 97% rename from tests/adapter/dbt/tests/adapter/utils/fixture_replace.py rename to tests/functional/adapter/utils/fixture_replace.py index 4ed5cb0c1b0..ee1683f877d 100644 --- a/tests/adapter/dbt/tests/adapter/utils/fixture_replace.py +++ b/tests/functional/adapter/utils/fixture_replace.py @@ -32,7 +32,7 @@ version: 2 models: - name: test_replace - tests: + data_tests: - assert_equal: actual: actual expected: expected diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_right.py b/tests/functional/adapter/utils/fixture_right.py similarity index 96% rename from tests/adapter/dbt/tests/adapter/utils/fixture_right.py rename to tests/functional/adapter/utils/fixture_right.py index 5ab4af37b5d..c2e06bba61d 100644 --- a/tests/adapter/dbt/tests/adapter/utils/fixture_right.py +++ b/tests/functional/adapter/utils/fixture_right.py @@ -28,7 +28,7 @@ version: 2 models: - name: test_right - tests: + data_tests: - assert_equal: actual: actual expected: expected diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_safe_cast.py b/tests/functional/adapter/utils/fixture_safe_cast.py similarity index 96% rename from tests/adapter/dbt/tests/adapter/utils/fixture_safe_cast.py rename to tests/functional/adapter/utils/fixture_safe_cast.py index b9b1d8648f5..023721af2f2 100644 --- a/tests/adapter/dbt/tests/adapter/utils/fixture_safe_cast.py +++ b/tests/functional/adapter/utils/fixture_safe_cast.py @@ -26,7 +26,7 @@ version: 2 models: - name: test_safe_cast - tests: + data_tests: - assert_equal: actual: actual expected: expected diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_split_part.py b/tests/functional/adapter/utils/fixture_split_part.py similarity index 98% rename from tests/adapter/dbt/tests/adapter/utils/fixture_split_part.py rename to tests/functional/adapter/utils/fixture_split_part.py index 244fbdb7464..385478c0e05 100644 --- a/tests/adapter/dbt/tests/adapter/utils/fixture_split_part.py +++ b/tests/functional/adapter/utils/fixture_split_part.py @@ -50,7 +50,7 @@ version: 2 models: - name: test_split_part - tests: + data_tests: - assert_equal: actual: actual expected: expected diff --git a/tests/adapter/dbt/tests/adapter/utils/fixture_string_literal.py b/tests/functional/adapter/utils/fixture_string_literal.py similarity index 96% rename from tests/adapter/dbt/tests/adapter/utils/fixture_string_literal.py rename to tests/functional/adapter/utils/fixture_string_literal.py index 3cfe1fe5786..7a46039a46a 100644 --- a/tests/adapter/dbt/tests/adapter/utils/fixture_string_literal.py +++ b/tests/functional/adapter/utils/fixture_string_literal.py @@ -12,7 +12,7 @@ version: 2 models: - name: test_string_literal - tests: + data_tests: - assert_equal: actual: actual expected: expected diff --git a/tests/adapter/dbt/tests/adapter/utils/test_any_value.py b/tests/functional/adapter/utils/test_any_value.py similarity index 85% rename from tests/adapter/dbt/tests/adapter/utils/test_any_value.py rename to tests/functional/adapter/utils/test_any_value.py index 38d437ee274..9db412683e6 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_any_value.py +++ b/tests/functional/adapter/utils/test_any_value.py @@ -1,10 +1,11 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_any_value import ( - seeds__data_any_value_csv, - seeds__data_any_value_expected_csv, + +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_any_value import ( models__test_any_value_sql, models__test_any_value_yml, + seeds__data_any_value_csv, + seeds__data_any_value_expected_csv, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_array_append.py b/tests/functional/adapter/utils/test_array_append.py similarity index 73% rename from tests/adapter/dbt/tests/adapter/utils/test_array_append.py rename to tests/functional/adapter/utils/test_array_append.py index 6b295327a9c..c7f518b99e0 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_array_append.py +++ b/tests/functional/adapter/utils/test_array_append.py @@ -1,6 +1,7 @@ import pytest -from dbt.tests.adapter.utils.base_array_utils import BaseArrayUtils -from dbt.tests.adapter.utils.fixture_array_append import ( + +from tests.functional.adapter.utils.base_array_utils import BaseArrayUtils +from tests.functional.adapter.utils.fixture_array_append import ( models__array_append_actual_sql, models__array_append_expected_sql, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_array_concat.py b/tests/functional/adapter/utils/test_array_concat.py similarity index 73% rename from tests/adapter/dbt/tests/adapter/utils/test_array_concat.py rename to tests/functional/adapter/utils/test_array_concat.py index d50540cc0d4..535223e36f1 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_array_concat.py +++ b/tests/functional/adapter/utils/test_array_concat.py @@ -1,6 +1,7 @@ import pytest -from dbt.tests.adapter.utils.base_array_utils import BaseArrayUtils -from dbt.tests.adapter.utils.fixture_array_concat import ( + +from tests.functional.adapter.utils.base_array_utils import BaseArrayUtils +from tests.functional.adapter.utils.fixture_array_concat import ( models__array_concat_actual_sql, models__array_concat_expected_sql, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_array_construct.py b/tests/functional/adapter/utils/test_array_construct.py similarity index 73% rename from tests/adapter/dbt/tests/adapter/utils/test_array_construct.py rename to tests/functional/adapter/utils/test_array_construct.py index e0e3df04cbb..8184d0f1a30 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_array_construct.py +++ b/tests/functional/adapter/utils/test_array_construct.py @@ -1,6 +1,7 @@ import pytest -from dbt.tests.adapter.utils.base_array_utils import BaseArrayUtils -from dbt.tests.adapter.utils.fixture_array_construct import ( + +from tests.functional.adapter.utils.base_array_utils import BaseArrayUtils +from tests.functional.adapter.utils.fixture_array_construct import ( models__array_construct_actual_sql, models__array_construct_expected_sql, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_bool_or.py b/tests/functional/adapter/utils/test_bool_or.py similarity index 84% rename from tests/adapter/dbt/tests/adapter/utils/test_bool_or.py rename to tests/functional/adapter/utils/test_bool_or.py index 40be6b0fc7c..70d87dbc9b1 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_bool_or.py +++ b/tests/functional/adapter/utils/test_bool_or.py @@ -1,10 +1,11 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_bool_or import ( - seeds__data_bool_or_csv, - seeds__data_bool_or_expected_csv, + +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_bool_or import ( models__test_bool_or_sql, models__test_bool_or_yml, + seeds__data_bool_or_csv, + seeds__data_bool_or_expected_csv, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_cast_bool_to_text.py b/tests/functional/adapter/utils/test_cast_bool_to_text.py similarity index 79% rename from tests/adapter/dbt/tests/adapter/utils/test_cast_bool_to_text.py rename to tests/functional/adapter/utils/test_cast_bool_to_text.py index ecfd0b45353..c9a9b8a89b6 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_cast_bool_to_text.py +++ b/tests/functional/adapter/utils/test_cast_bool_to_text.py @@ -1,6 +1,7 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_cast_bool_to_text import ( + +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_cast_bool_to_text import ( models__test_cast_bool_to_text_sql, models__test_cast_bool_to_text_yml, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_concat.py b/tests/functional/adapter/utils/test_concat.py similarity index 80% rename from tests/adapter/dbt/tests/adapter/utils/test_concat.py rename to tests/functional/adapter/utils/test_concat.py index 03a5159148f..7d9a09adc5d 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_concat.py +++ b/tests/functional/adapter/utils/test_concat.py @@ -1,9 +1,10 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_concat import ( - seeds__data_concat_csv, + +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_concat import ( models__test_concat_sql, models__test_concat_yml, + seeds__data_concat_csv, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_current_timestamp.py b/tests/functional/adapter/utils/test_current_timestamp.py similarity index 88% rename from tests/adapter/dbt/tests/adapter/utils/test_current_timestamp.py rename to tests/functional/adapter/utils/test_current_timestamp.py index af154267b90..de97c71024c 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_current_timestamp.py +++ b/tests/functional/adapter/utils/test_current_timestamp.py @@ -1,12 +1,8 @@ -import pytest - -from datetime import datetime -from datetime import timezone -from datetime import timedelta +from datetime import datetime, timedelta, timezone -from dbt.tests.util import run_dbt -from dbt.tests.util import relation_from_name +import pytest +from dbt.tests.util import relation_from_name, run_dbt models__current_ts_sql = """ select {{ dbt.current_timestamp() }} as current_ts_column @@ -40,7 +36,8 @@ def test_current_timestamp_matches_utc(self, current_timestamp): sql_timestamp = current_timestamp now_utc = self.utcnow_matching_type(sql_timestamp) # Plenty of wiggle room if clocks aren't perfectly sync'd, etc - tolerance = timedelta(minutes=1) + # The clock on the macos image appears to be a few minutes slow in GHA, causing false negatives + tolerance = timedelta(minutes=5) assert (sql_timestamp > (now_utc - tolerance)) and ( sql_timestamp < (now_utc + tolerance) ), f"SQL timestamp {sql_timestamp.isoformat()} is not close enough to Python UTC {now_utc.isoformat()}" diff --git a/tests/functional/adapter/utils/test_date_spine.py b/tests/functional/adapter/utils/test_date_spine.py new file mode 100644 index 00000000000..0c6545cdba0 --- /dev/null +++ b/tests/functional/adapter/utils/test_date_spine.py @@ -0,0 +1,22 @@ +import pytest + +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_date_spine import ( + models__test_date_spine_sql, + models__test_date_spine_yml, +) + + +class BaseDateSpine(BaseUtils): + @pytest.fixture(scope="class") + def models(self): + return { + "test_date_spine.yml": models__test_date_spine_yml, + "test_date_spine.sql": self.interpolate_macro_namespace( + models__test_date_spine_sql, "date_spine" + ), + } + + +class TestDateSpine(BaseDateSpine): + pass diff --git a/tests/adapter/dbt/tests/adapter/utils/test_date_trunc.py b/tests/functional/adapter/utils/test_date_trunc.py similarity index 82% rename from tests/adapter/dbt/tests/adapter/utils/test_date_trunc.py rename to tests/functional/adapter/utils/test_date_trunc.py index 946aa010ac3..7c781c1fbd3 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_date_trunc.py +++ b/tests/functional/adapter/utils/test_date_trunc.py @@ -1,9 +1,10 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_date_trunc import ( - seeds__data_date_trunc_csv, + +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_date_trunc import ( models__test_date_trunc_sql, models__test_date_trunc_yml, + seeds__data_date_trunc_csv, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_dateadd.py b/tests/functional/adapter/utils/test_dateadd.py similarity index 90% rename from tests/adapter/dbt/tests/adapter/utils/test_dateadd.py rename to tests/functional/adapter/utils/test_dateadd.py index cb645121c1c..dcb8e78e38c 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_dateadd.py +++ b/tests/functional/adapter/utils/test_dateadd.py @@ -1,9 +1,10 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_dateadd import ( - seeds__data_dateadd_csv, + +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_dateadd import ( models__test_dateadd_sql, models__test_dateadd_yml, + seeds__data_dateadd_csv, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_datediff.py b/tests/functional/adapter/utils/test_datediff.py similarity index 82% rename from tests/adapter/dbt/tests/adapter/utils/test_datediff.py rename to tests/functional/adapter/utils/test_datediff.py index b366e7a4587..df21dd5a361 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_datediff.py +++ b/tests/functional/adapter/utils/test_datediff.py @@ -1,9 +1,10 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_datediff import ( - seeds__data_datediff_csv, + +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_datediff import ( models__test_datediff_sql, models__test_datediff_yml, + seeds__data_datediff_csv, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_equals.py b/tests/functional/adapter/utils/test_equals.py similarity index 87% rename from tests/adapter/dbt/tests/adapter/utils/test_equals.py rename to tests/functional/adapter/utils/test_equals.py index 51e7fe84bd3..46a8c9ec462 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_equals.py +++ b/tests/functional/adapter/utils/test_equals.py @@ -1,11 +1,12 @@ import pytest -from dbt.tests.adapter.utils.base_utils import macros__equals_sql -from dbt.tests.adapter.utils.fixture_equals import ( - SEEDS__DATA_EQUALS_CSV, + +from dbt.tests.util import relation_from_name, run_dbt +from tests.functional.adapter.utils.base_utils import macros__equals_sql +from tests.functional.adapter.utils.fixture_equals import ( MODELS__EQUAL_VALUES_SQL, MODELS__NOT_EQUAL_VALUES_SQL, + SEEDS__DATA_EQUALS_CSV, ) -from dbt.tests.util import run_dbt, relation_from_name class BaseEquals: diff --git a/tests/adapter/dbt/tests/adapter/utils/test_escape_single_quotes.py b/tests/functional/adapter/utils/test_escape_single_quotes.py similarity index 88% rename from tests/adapter/dbt/tests/adapter/utils/test_escape_single_quotes.py rename to tests/functional/adapter/utils/test_escape_single_quotes.py index 1b0af36b0f8..a9f2715130d 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_escape_single_quotes.py +++ b/tests/functional/adapter/utils/test_escape_single_quotes.py @@ -1,8 +1,9 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_escape_single_quotes import ( - models__test_escape_single_quotes_quote_sql, + +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_escape_single_quotes import ( models__test_escape_single_quotes_backslash_sql, + models__test_escape_single_quotes_quote_sql, models__test_escape_single_quotes_yml, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_except.py b/tests/functional/adapter/utils/test_except.py similarity index 94% rename from tests/adapter/dbt/tests/adapter/utils/test_except.py rename to tests/functional/adapter/utils/test_except.py index 2c058e91c2c..bb73a3aa181 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_except.py +++ b/tests/functional/adapter/utils/test_except.py @@ -1,18 +1,19 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_except import ( - seeds__data_except_a_csv, - seeds__data_except_b_csv, - seeds__data_except_a_minus_b_csv, - seeds__data_except_b_minus_a_csv, + +from dbt.tests.util import check_relations_equal, run_dbt +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_except import ( models__data_except_empty_sql, - models__test_except_a_minus_b_sql, - models__test_except_b_minus_a_sql, models__test_except_a_minus_a_sql, + models__test_except_a_minus_b_sql, models__test_except_a_minus_empty_sql, + models__test_except_b_minus_a_sql, models__test_except_empty_minus_a_sql, models__test_except_empty_minus_empty_sql, + seeds__data_except_a_csv, + seeds__data_except_a_minus_b_csv, + seeds__data_except_b_csv, + seeds__data_except_b_minus_a_csv, ) diff --git a/tests/functional/adapter/utils/test_generate_series.py b/tests/functional/adapter/utils/test_generate_series.py new file mode 100644 index 00000000000..19252b40c81 --- /dev/null +++ b/tests/functional/adapter/utils/test_generate_series.py @@ -0,0 +1,22 @@ +import pytest + +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_generate_series import ( + models__test_generate_series_sql, + models__test_generate_series_yml, +) + + +class BaseGenerateSeries(BaseUtils): + @pytest.fixture(scope="class") + def models(self): + return { + "test_generate_series.yml": models__test_generate_series_yml, + "test_generate_series.sql": self.interpolate_macro_namespace( + models__test_generate_series_sql, "generate_series" + ), + } + + +class TestGenerateSeries(BaseGenerateSeries): + pass diff --git a/tests/functional/adapter/utils/test_get_intervals_between.py b/tests/functional/adapter/utils/test_get_intervals_between.py new file mode 100644 index 00000000000..f73b9ba4885 --- /dev/null +++ b/tests/functional/adapter/utils/test_get_intervals_between.py @@ -0,0 +1,22 @@ +import pytest + +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_get_intervals_between import ( + models__test_get_intervals_between_sql, + models__test_get_intervals_between_yml, +) + + +class BaseGetIntervalsBetween(BaseUtils): + @pytest.fixture(scope="class") + def models(self): + return { + "test_get_intervals_between.yml": models__test_get_intervals_between_yml, + "test_get_intervals_between.sql": self.interpolate_macro_namespace( + models__test_get_intervals_between_sql, "get_intervals_between" + ), + } + + +class TestGetIntervalsBetween(BaseGetIntervalsBetween): + pass diff --git a/tests/functional/adapter/utils/test_get_powers_of_two.py b/tests/functional/adapter/utils/test_get_powers_of_two.py new file mode 100644 index 00000000000..dd1922a2f4c --- /dev/null +++ b/tests/functional/adapter/utils/test_get_powers_of_two.py @@ -0,0 +1,22 @@ +import pytest + +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_get_powers_of_two import ( + models__test_get_powers_of_two_sql, + models__test_get_powers_of_two_yml, +) + + +class BaseGetPowersOfTwo(BaseUtils): + @pytest.fixture(scope="class") + def models(self): + return { + "test_get_powers_of_two.yml": models__test_get_powers_of_two_yml, + "test_get_powers_of_two.sql": self.interpolate_macro_namespace( + models__test_get_powers_of_two_sql, "get_powers_of_two" + ), + } + + +class TestGetPowersOfTwo(BaseGetPowersOfTwo): + pass diff --git a/tests/adapter/dbt/tests/adapter/utils/test_hash.py b/tests/functional/adapter/utils/test_hash.py similarity index 80% rename from tests/adapter/dbt/tests/adapter/utils/test_hash.py rename to tests/functional/adapter/utils/test_hash.py index d1a12ebab75..4237ec14d8a 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_hash.py +++ b/tests/functional/adapter/utils/test_hash.py @@ -1,9 +1,10 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_hash import ( - seeds__data_hash_csv, + +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_hash import ( models__test_hash_sql, models__test_hash_yml, + seeds__data_hash_csv, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_intersect.py b/tests/functional/adapter/utils/test_intersect.py similarity index 94% rename from tests/adapter/dbt/tests/adapter/utils/test_intersect.py rename to tests/functional/adapter/utils/test_intersect.py index 737e317c6f2..a7911afeb93 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_intersect.py +++ b/tests/functional/adapter/utils/test_intersect.py @@ -1,17 +1,18 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_intersect import ( - seeds__data_intersect_a_csv, - seeds__data_intersect_b_csv, - seeds__data_intersect_a_overlap_b_csv, + +from dbt.tests.util import check_relations_equal, run_dbt +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_intersect import ( models__data_intersect_empty_sql, - models__test_intersect_a_overlap_b_sql, - models__test_intersect_b_overlap_a_sql, models__test_intersect_a_overlap_a_sql, + models__test_intersect_a_overlap_b_sql, models__test_intersect_a_overlap_empty_sql, + models__test_intersect_b_overlap_a_sql, models__test_intersect_empty_overlap_a_sql, models__test_intersect_empty_overlap_empty_sql, + seeds__data_intersect_a_csv, + seeds__data_intersect_a_overlap_b_csv, + seeds__data_intersect_b_csv, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_last_day.py b/tests/functional/adapter/utils/test_last_day.py similarity index 82% rename from tests/adapter/dbt/tests/adapter/utils/test_last_day.py rename to tests/functional/adapter/utils/test_last_day.py index d7d82deae50..4b05d869738 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_last_day.py +++ b/tests/functional/adapter/utils/test_last_day.py @@ -1,9 +1,10 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_last_day import ( - seeds__data_last_day_csv, + +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_last_day import ( models__test_last_day_sql, models__test_last_day_yml, + seeds__data_last_day_csv, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_length.py b/tests/functional/adapter/utils/test_length.py similarity index 80% rename from tests/adapter/dbt/tests/adapter/utils/test_length.py rename to tests/functional/adapter/utils/test_length.py index ca047318ad4..ede2b0821a9 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_length.py +++ b/tests/functional/adapter/utils/test_length.py @@ -1,9 +1,10 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_length import ( - seeds__data_length_csv, + +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_length import ( models__test_length_sql, models__test_length_yml, + seeds__data_length_csv, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_listagg.py b/tests/functional/adapter/utils/test_listagg.py similarity index 84% rename from tests/adapter/dbt/tests/adapter/utils/test_listagg.py rename to tests/functional/adapter/utils/test_listagg.py index 362d835c220..e838a369ebb 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_listagg.py +++ b/tests/functional/adapter/utils/test_listagg.py @@ -1,10 +1,11 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_listagg import ( - seeds__data_listagg_csv, - seeds__data_listagg_output_csv, + +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_listagg import ( models__test_listagg_sql, models__test_listagg_yml, + seeds__data_listagg_csv, + seeds__data_listagg_output_csv, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_null_compare.py b/tests/functional/adapter/utils/test_null_compare.py similarity index 88% rename from tests/adapter/dbt/tests/adapter/utils/test_null_compare.py rename to tests/functional/adapter/utils/test_null_compare.py index eac901f3972..161b6bb0110 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_null_compare.py +++ b/tests/functional/adapter/utils/test_null_compare.py @@ -1,13 +1,13 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_null_compare import ( +from dbt.tests.util import run_dbt +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_null_compare import ( MODELS__TEST_MIXED_NULL_COMPARE_SQL, MODELS__TEST_MIXED_NULL_COMPARE_YML, MODELS__TEST_NULL_COMPARE_SQL, MODELS__TEST_NULL_COMPARE_YML, ) -from dbt.tests.util import run_dbt class BaseMixedNullCompare(BaseUtils): diff --git a/tests/adapter/dbt/tests/adapter/utils/test_position.py b/tests/functional/adapter/utils/test_position.py similarity index 82% rename from tests/adapter/dbt/tests/adapter/utils/test_position.py rename to tests/functional/adapter/utils/test_position.py index f4a54eed03a..4558ed17be5 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_position.py +++ b/tests/functional/adapter/utils/test_position.py @@ -1,9 +1,10 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_position import ( - seeds__data_position_csv, + +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_position import ( models__test_position_sql, models__test_position_yml, + seeds__data_position_csv, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_replace.py b/tests/functional/adapter/utils/test_replace.py similarity index 81% rename from tests/adapter/dbt/tests/adapter/utils/test_replace.py rename to tests/functional/adapter/utils/test_replace.py index bab75842d98..7e7d558064c 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_replace.py +++ b/tests/functional/adapter/utils/test_replace.py @@ -1,9 +1,10 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_replace import ( - seeds__data_replace_csv, + +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_replace import ( models__test_replace_sql, models__test_replace_yml, + seeds__data_replace_csv, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_right.py b/tests/functional/adapter/utils/test_right.py similarity index 80% rename from tests/adapter/dbt/tests/adapter/utils/test_right.py rename to tests/functional/adapter/utils/test_right.py index 215a9228a07..05fbad4ddf5 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_right.py +++ b/tests/functional/adapter/utils/test_right.py @@ -1,9 +1,10 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_right import ( - seeds__data_right_csv, + +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_right import ( models__test_right_sql, models__test_right_yml, + seeds__data_right_csv, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_safe_cast.py b/tests/functional/adapter/utils/test_safe_cast.py similarity index 83% rename from tests/adapter/dbt/tests/adapter/utils/test_safe_cast.py rename to tests/functional/adapter/utils/test_safe_cast.py index 6c8310c7838..1052cec209d 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_safe_cast.py +++ b/tests/functional/adapter/utils/test_safe_cast.py @@ -1,9 +1,10 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_safe_cast import ( - seeds__data_safe_cast_csv, + +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_safe_cast import ( models__test_safe_cast_sql, models__test_safe_cast_yml, + seeds__data_safe_cast_csv, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_split_part.py b/tests/functional/adapter/utils/test_split_part.py similarity index 82% rename from tests/adapter/dbt/tests/adapter/utils/test_split_part.py rename to tests/functional/adapter/utils/test_split_part.py index 6caa3afb991..0fee3db618f 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_split_part.py +++ b/tests/functional/adapter/utils/test_split_part.py @@ -1,9 +1,10 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_split_part import ( - seeds__data_split_part_csv, + +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_split_part import ( models__test_split_part_sql, models__test_split_part_yml, + seeds__data_split_part_csv, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_string_literal.py b/tests/functional/adapter/utils/test_string_literal.py similarity index 78% rename from tests/adapter/dbt/tests/adapter/utils/test_string_literal.py rename to tests/functional/adapter/utils/test_string_literal.py index a6b632e1e18..f83a8f43438 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_string_literal.py +++ b/tests/functional/adapter/utils/test_string_literal.py @@ -1,6 +1,7 @@ import pytest -from dbt.tests.adapter.utils.base_utils import BaseUtils -from dbt.tests.adapter.utils.fixture_string_literal import ( + +from tests.functional.adapter.utils.base_utils import BaseUtils +from tests.functional.adapter.utils.fixture_string_literal import ( models__test_string_literal_sql, models__test_string_literal_yml, ) diff --git a/tests/adapter/dbt/tests/adapter/utils/test_timestamps.py b/tests/functional/adapter/utils/test_timestamps.py similarity index 99% rename from tests/adapter/dbt/tests/adapter/utils/test_timestamps.py rename to tests/functional/adapter/utils/test_timestamps.py index 3fb3b2cd13d..adc5477373e 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_timestamps.py +++ b/tests/functional/adapter/utils/test_timestamps.py @@ -1,5 +1,7 @@ -import pytest import re + +import pytest + from dbt.tests.util import check_relation_has_expected_schema, run_dbt _MODEL_CURRENT_TIMESTAMP = """ diff --git a/tests/adapter/dbt/tests/adapter/utils/test_validate_sql.py b/tests/functional/adapter/utils/test_validate_sql.py similarity index 93% rename from tests/adapter/dbt/tests/adapter/utils/test_validate_sql.py rename to tests/functional/adapter/utils/test_validate_sql.py index 24bdd287f94..4c125d57721 100644 --- a/tests/adapter/dbt/tests/adapter/utils/test_validate_sql.py +++ b/tests/functional/adapter/utils/test_validate_sql.py @@ -3,7 +3,8 @@ import pytest from dbt.adapters.base.impl import BaseAdapter -from dbt.exceptions import DbtRuntimeError, InvalidConnectionError +from dbt.adapters.exceptions import InvalidConnectionError +from dbt.exceptions import DbtRuntimeError class BaseValidateSqlMethod: @@ -41,7 +42,7 @@ def invalid_sql(self) -> str: def expected_exception(self) -> Type[Exception]: """Returns the Exception type thrown by a failed query. - Defaults to dbt.exceptions.DbtRuntimeError because that is the most common + Defaults to dbt_common.exceptions.DbtRuntimeError because that is the most common base exception for adapters to throw.""" return DbtRuntimeError diff --git a/tests/functional/analysis/test_analyses.py b/tests/functional/analysis/test_analyses.py index 61f66bc1dbc..e061039117f 100644 --- a/tests/functional/analysis/test_analyses.py +++ b/tests/functional/analysis/test_analyses.py @@ -1,8 +1,8 @@ import os -import pytest -from dbt.tests.util import run_dbt, get_manifest +import pytest +from dbt.tests.util import get_manifest, run_dbt my_model_sql = """ select 1 as id diff --git a/tests/functional/artifacts/data/results/v4/run_results.json b/tests/functional/artifacts/data/results/v4/run_results.json new file mode 100644 index 00000000000..0767eb8e801 --- /dev/null +++ b/tests/functional/artifacts/data/results/v4/run_results.json @@ -0,0 +1 @@ +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/run-results/v4.json", "dbt_version": "1.6.7", "generated_at": "2023-11-06T20:40:37.557735Z", "invocation_id": "42f85a60-4f7b-4cc1-a197-62687104fecc", "env": {}}, "results": [{"status": "success", "timing": [{"name": "compile", "started_at": "2023-11-06T20:40:37.486980Z", "completed_at": "2023-11-06T20:40:37.488837Z"}, {"name": "execute", "started_at": "2023-11-06T20:40:37.490290Z", "completed_at": "2023-11-06T20:40:37.539787Z"}], "thread_id": "Thread-9 (worker)", "execution_time": 0.0566411018371582, "adapter_response": {"_message": "CREATE VIEW", "code": "CREATE VIEW", "rows_affected": -1}, "message": "CREATE VIEW", "failures": null, "unique_id": "model.test.my_model"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2023-11-06T20:40:37.485334Z", "completed_at": "2023-11-06T20:40:37.489266Z"}, {"name": "execute", "started_at": "2023-11-06T20:40:37.494545Z", "completed_at": "2023-11-06T20:40:37.542811Z"}], "thread_id": "Thread-8 (worker)", "execution_time": 0.060118675231933594, "adapter_response": {"_message": "CREATE VIEW", "code": "CREATE VIEW", "rows_affected": -1}, "message": "CREATE VIEW", "failures": null, "unique_id": "model.test.metricflow_time_spine"}], "elapsed_time": 0.18144583702087402, "args": {"defer": false, "indirect_selection": "eager", "select": [], "log_level_file": "debug", "use_colors": true, "cache_selected_only": false, "strict_mode": false, "use_colors_file": true, "partial_parse_file_diff": true, "static_parser": true, "write_json": true, "warn_error_options": {"include": [], "exclude": []}, "print": true, "log_level": "info", "profiles_dir": "/private/var/folders/7h/hj5_fw9j291c58hwfdvy5xbm0000gp/T/pytest-of-jerco/pytest-16/profile0", "log_path": "/Users/jerco/dev/product/dbt-core/logs/test16993032361853467608", "partial_parse": true, "quiet": false, "log_format_file": "debug", "version_check": true, "send_anonymous_usage_stats": false, "project_dir": "/private/var/folders/7h/hj5_fw9j291c58hwfdvy5xbm0000gp/T/pytest-of-jerco/pytest-16/project0", "log_format": "default", "enable_legacy_logger": false, "exclude": [], "populate_cache": true, "log_file_max_bytes": 10485760, "macro_debugging": false, "printer_width": 80, "invocation_command": "dbt tests/functional/artifacts/test_previous_version_state.py::TestPreviousVersionState", "which": "run", "favor_state": false, "introspect": true, "vars": {}}} diff --git a/tests/functional/artifacts/data/results/v5/run_results.json b/tests/functional/artifacts/data/results/v5/run_results.json new file mode 100644 index 00000000000..63a7a58eabc --- /dev/null +++ b/tests/functional/artifacts/data/results/v5/run_results.json @@ -0,0 +1 @@ +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/run-results/v5.json", "dbt_version": "1.8.0a1", "generated_at": "2023-11-06T20:43:08.231028Z", "invocation_id": "a9238a29-6764-47f0-ba7d-f7d61ae5e6c0", "env": {}}, "results": [{"status": "success", "timing": [{"name": "compile", "started_at": "2023-11-06T20:43:08.146847Z", "completed_at": "2023-11-06T20:43:08.149862Z"}, {"name": "execute", "started_at": "2023-11-06T20:43:08.151676Z", "completed_at": "2023-11-06T20:43:08.206208Z"}], "thread_id": "Thread-9 (worker)", "execution_time": 0.06433510780334473, "adapter_response": {"_message": "CREATE VIEW", "code": "CREATE VIEW", "rows_affected": -1}, "message": "CREATE VIEW", "failures": null, "unique_id": "model.test.my_model", "compiled": true, "compiled_code": "select 1 as id", "relation_name": "\"dbt\".\"test16993033859513627134_test_previous_version_state\".\"my_model\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2023-11-06T20:43:08.144982Z", "completed_at": "2023-11-06T20:43:08.150320Z"}, {"name": "execute", "started_at": "2023-11-06T20:43:08.155222Z", "completed_at": "2023-11-06T20:43:08.209881Z"}], "thread_id": "Thread-8 (worker)", "execution_time": 0.06822013854980469, "adapter_response": {"_message": "CREATE VIEW", "code": "CREATE VIEW", "rows_affected": -1}, "message": "CREATE VIEW", "failures": null, "unique_id": "model.test.metricflow_time_spine", "compiled": true, "compiled_code": "SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day", "relation_name": "\"dbt\".\"test16993033859513627134_test_previous_version_state\".\"metricflow_time_spine\""}], "elapsed_time": 0.18284392356872559, "args": {"send_anonymous_usage_stats": false, "profiles_dir": "/private/var/folders/7h/hj5_fw9j291c58hwfdvy5xbm0000gp/T/pytest-of-jerco/pytest-19/profile0", "static_parser": true, "partial_parse_file_diff": true, "printer_width": 80, "log_level_file": "debug", "project_dir": "/private/var/folders/7h/hj5_fw9j291c58hwfdvy5xbm0000gp/T/pytest-of-jerco/pytest-19/project0", "log_format": "default", "strict_mode": false, "macro_debugging": false, "indirect_selection": "eager", "version_check": true, "use_colors_file": true, "select": [], "log_file_max_bytes": 10485760, "warn_error_options": {"include": [], "exclude": []}, "log_format_file": "debug", "invocation_command": "dbt tests/functional/artifacts/test_previous_version_state.py::TestPreviousVersionState", "write_json": true, "log_level": "info", "cache_selected_only": false, "quiet": false, "favor_state": false, "enable_legacy_logger": false, "log_path": "/Users/jerco/dev/product/dbt-core/logs/test16993033859513627134", "which": "run", "partial_parse": true, "introspect": true, "show_resource_report": false, "exclude": [], "populate_cache": true, "vars": {}, "use_colors": true, "defer": false, "print": true}} diff --git a/tests/functional/artifacts/data/results/v6/run_results.json b/tests/functional/artifacts/data/results/v6/run_results.json new file mode 100644 index 00000000000..f78176c930c --- /dev/null +++ b/tests/functional/artifacts/data/results/v6/run_results.json @@ -0,0 +1 @@ +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/run-results/v6.json", "dbt_version": "1.8.0a1", "generated_at": "2023-12-06T18:53:19.641690Z", "invocation_id": "ad4ef714-e6c6-425e-b7c8-c1c4369df4ea", "env": {}}, "results": [{"status": "success", "timing": [{"name": "compile", "started_at": "2023-12-06T18:53:19.554953Z", "completed_at": "2023-12-06T18:53:19.559711Z"}, {"name": "execute", "started_at": "2023-12-06T18:53:19.564874Z", "completed_at": "2023-12-06T18:53:19.620151Z"}], "thread_id": "Thread-8", "execution_time": 0.06995701789855957, "adapter_response": {"_message": "CREATE VIEW", "code": "CREATE VIEW", "rows_affected": -1}, "message": "CREATE VIEW", "failures": null, "unique_id": "model.test.metricflow_time_spine", "compiled": true, "compiled_code": "SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day", "relation_name": "\"dbt\".\"test17018887966812726006_test_previous_version_state\".\"metricflow_time_spine\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2023-12-06T18:53:19.557019Z", "completed_at": "2023-12-06T18:53:19.559247Z"}, {"name": "execute", "started_at": "2023-12-06T18:53:19.561000Z", "completed_at": "2023-12-06T18:53:19.622080Z"}], "thread_id": "Thread-9", "execution_time": 0.07100677490234375, "adapter_response": {"_message": "CREATE VIEW", "code": "CREATE VIEW", "rows_affected": -1}, "message": "CREATE VIEW", "failures": null, "unique_id": "model.test.my_model", "compiled": true, "compiled_code": "select 1 as id", "relation_name": "\"dbt\".\"test17018887966812726006_test_previous_version_state\".\"my_model\""}], "elapsed_time": 0.13903093338012695, "args": {"print": true, "log_level_file": "debug", "quiet": false, "warn_error_options": {"include": [], "exclude": []}, "write_json": true, "invocation_command": "dbt --cov=core --cov-append --cov-report=xml tests/functional/artifacts/test_previous_version_state.py", "log_level": "info", "select": [], "project_dir": "/private/var/folders/67/r0f0jlj54h95zl3fhmb217jh0000gp/T/pytest-of-william/pytest-68/project0", "static_parser": true, "log_file_max_bytes": 10485760, "empty": false, "introspect": true, "log_format_file": "debug", "vars": {}, "strict_mode": false, "indirect_selection": "eager", "show_resource_report": false, "favor_state": false, "version_check": true, "cache_selected_only": false, "enable_legacy_logger": false, "partial_parse": true, "profiles_dir": "/private/var/folders/67/r0f0jlj54h95zl3fhmb217jh0000gp/T/pytest-of-william/pytest-68/profile0", "defer": false, "printer_width": 80, "send_anonymous_usage_stats": false, "use_colors": true, "log_path": "/Users/william/git/dbt-core/logs/test17018887966812726006", "partial_parse_file_diff": true, "populate_cache": true, "macro_debugging": false, "use_colors_file": true, "log_format": "default", "which": "run", "exclude": []}} diff --git a/tests/functional/artifacts/data/state/v10/manifest.json b/tests/functional/artifacts/data/state/v10/manifest.json index 07ba4f56c0a..9c73cf33703 100644 --- a/tests/functional/artifacts/data/state/v10/manifest.json +++ b/tests/functional/artifacts/data/state/v10/manifest.json @@ -1 +1 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v10.json", "dbt_version": "1.6.0b4", "generated_at": "2023-06-15T22:05:40.848437Z", "invocation_id": "07d49dc8-6d0d-440c-aa9d-64fc456ee50d", "env": {}, "project_name": "test", "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"database": "dbt", "schema": "test16868667402551820116_test_previous_version_state", "name": "my_model", "resource_type": "model", "package_name": "test", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "unique_id": "model.test.my_model", "fqn": ["test", "my_model"], "alias": "my_model", "checksum": {"name": "sha256", "checksum": "3ea0f972fa1b56aa2dc2f56ee784b6a5796312f9a813d59ae70fd8855f10d16d"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "Example model", "columns": {"id": {"name": "id", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1686866740.297339, "relation_name": "\"dbt\".\"test16868667402551820116_test_previous_version_state\".\"my_model\"", "raw_code": "select 1 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "snapshot.test.snapshot_seed": {"database": "dbt", "schema": "test16868667402551820116_test_previous_version_state", "name": "snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "snapshot_seed.sql", "original_file_path": "snapshots/snapshot_seed.sql", "unique_id": "snapshot.test.snapshot_seed", "fqn": ["test", "snapshot_seed", "snapshot_seed"], "alias": "snapshot_seed", "checksum": {"name": "sha256", "checksum": "5fc998f39655f8fe52443a919e749b6e23883ef90202b040412baac13c6bfe18"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "strategy": "check", "target_schema": "test16868667402551820116_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all", "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16868667402551820116_test_previous_version_state"}, "created_at": 1686866740.231673, "relation_name": "\"dbt\".\"test16868667402551820116_test_previous_version_state\".\"snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.test.my_seed"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}, "analysis.test.a": {"database": "dbt", "schema": "test16868667402551820116_test_previous_version_state", "name": "a", "resource_type": "analysis", "package_name": "test", "path": "analysis/a.sql", "original_file_path": "analyses/a.sql", "unique_id": "analysis.test.a", "fqn": ["test", "analysis", "a"], "alias": "a", "checksum": {"name": "sha256", "checksum": "a389c282f569f0bbdc2a8a4f174dea746c28582fdaf2048d31d9226af9feab23"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1686866740.257885, "relation_name": null, "raw_code": "select 4 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}, "test.test.just_my": {"database": "dbt", "schema": "test16868667402551820116_test_previous_version_state_dbt_test__audit", "name": "just_my", "resource_type": "test", "package_name": "test", "path": "just_my.sql", "original_file_path": "tests/just_my.sql", "unique_id": "test.test.just_my", "fqn": ["test", "just_my"], "alias": "just_my", "checksum": {"name": "sha256", "checksum": "744889a2e2d9ce380619265e1217d7ccf6e6ca896c048d42ebe0f9cfb74d7156"}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["data_test_tag"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["data_test_tag"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["data_test_tag"]}, "created_at": 1686866740.279339, "relation_name": null, "raw_code": "{{ config(tags = ['data_test_tag']) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}, "seed.test.my_seed": {"database": "dbt", "schema": "test16868667402551820116_test_previous_version_state", "name": "my_seed", "resource_type": "seed", "package_name": "test", "path": "my_seed.csv", "original_file_path": "seeds/my_seed.csv", "unique_id": "seed.test.my_seed", "fqn": ["test", "my_seed"], "alias": "my_seed", "checksum": {"name": "sha256", "checksum": "f7ede83f36165ac6b7a047aa2c3f212dff385bfa9f35f395108cd06fc8e96943"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "quote_columns": null, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1686866740.2853289, "relation_name": "\"dbt\".\"test16868667402551820116_test_previous_version_state\".\"my_seed\"", "raw_code": "", "root_path": "/private/var/folders/k6/gtt07v8j2vn51m_z05xk_fjc0000gp/T/pytest-of-michelleark/pytest-1078/project7", "depends_on": {"macros": []}}, "test.test.not_null_my_model_id.43e0e9183a": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "id", "model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16868667402551820116_test_previous_version_state_dbt_test__audit", "name": "not_null_my_model_id", "resource_type": "test", "package_name": "test", "path": "not_null_my_model_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.not_null_my_model_id.43e0e9183a", "fqn": ["test", "not_null_my_model_id"], "alias": "not_null_my_model_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1686866740.2977989, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "column_name": "id", "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}, "test.test.check_nothing_my_model_.d5a5e66110": {"test_metadata": {"name": "check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16868667402551820116_test_previous_version_state_dbt_test__audit", "name": "check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.check_nothing_my_model_.d5a5e66110", "fqn": ["test", "check_nothing_my_model_"], "alias": "check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1686866740.298699, "relation_name": null, "raw_code": "{{ test_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_check_nothing", "macro.dbt.get_where_subquery"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}}, "sources": {"source.test.my_source.my_table": {"database": "dbt", "schema": "my_source", "name": "my_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.my_table", "fqn": ["test", "my_source", "my_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "my_seed", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "My table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"dbt\".\"my_source\".\"my_seed\"", "created_at": 1686866740.325227}}, "macros": {"macro.test.test_check_nothing": {"name": "test_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/dummy_test.sql", "original_file_path": "macros/dummy_test.sql", "unique_id": "macro.test.test_check_nothing", "macro_sql": "{% test check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.905021, "supported_languages": null}, "macro.test.test_disabled_check_nothing": {"name": "test_disabled_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/disabled_dummy_test.sql", "original_file_path": "macros/disabled_dummy_test.sql", "unique_id": "macro.test.test_disabled_check_nothing", "macro_sql": "{% test disabled_check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\n{{ config(enabled=False) }}\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.905236, "supported_languages": null}, "macro.test.do_nothing": {"name": "do_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/do_nothing.sql", "original_file_path": "macros/do_nothing.sql", "unique_id": "macro.test.do_nothing", "macro_sql": "{% macro do_nothing(foo2, bar2) %}\n select\n '{{ foo2 }}' as foo2,\n '{{ bar2 }}' as bar2\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9054358, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp": {"name": "postgres__current_timestamp", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9057338, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"name": "postgres__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.905937, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_get_time": {"name": "postgres__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.906058, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_backcompat": {"name": "postgres__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_backcompat", "macro_sql": "{% macro postgres__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.906174, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat": {"name": "postgres__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro postgres__current_timestamp_in_utc_backcompat() %}\n (current_timestamp at time zone 'utc')::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9062881, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog": {"name": "postgres__get_catalog", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n\n {%- call statement('catalog', fetch_result=True) -%}\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n\n where (\n {%- for schema in schemas -%}\n upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table, [m]aterialized view\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.907404, "supported_languages": null}, "macro.dbt_postgres.postgres_get_relations": {"name": "postgres_get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres_get_relations", "macro_sql": "{% macro postgres_get_relations () -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v', 'm')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9080598, "supported_languages": null}, "macro.dbt_postgres.postgres__create_table_as": {"name": "postgres__create_table_as", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }} ;\n insert into {{ relation }} (\n {{ adapter.dispatch('get_column_names', 'dbt')() }}\n )\n {%- set sql = get_select_subquery(sql) %}\n {% else %}\n as\n {% endif %}\n (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.default__get_column_names", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.916847, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_index_sql": {"name": "postgres__get_create_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.91736, "supported_languages": null}, "macro.dbt_postgres.postgres__create_schema": {"name": "postgres__create_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.917681, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_schema": {"name": "postgres__drop_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9180028, "supported_languages": null}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"name": "postgres__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.918491, "supported_languages": null}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"name": "postgres__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n matviewname as name,\n schemaname as schema,\n 'materialized_view' as type\n from pg_matviews\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9189389, "supported_languages": null}, "macro.dbt_postgres.postgres__information_schema_name": {"name": "postgres__information_schema_name", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.919116, "supported_languages": null}, "macro.dbt_postgres.postgres__list_schemas": {"name": "postgres__list_schemas", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.919468, "supported_languages": null}, "macro.dbt_postgres.postgres__check_schema_exists": {"name": "postgres__check_schema_exists", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.919866, "supported_languages": null}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"name": "postgres__make_relation_with_suffix", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9207418, "supported_languages": null}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"name": "postgres__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.920945, "supported_languages": null}, "macro.dbt_postgres.postgres__make_temp_relation": {"name": "postgres__make_temp_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.92128, "supported_languages": null}, "macro.dbt_postgres.postgres__make_backup_relation": {"name": "postgres__make_backup_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9215639, "supported_languages": null}, "macro.dbt_postgres.postgres_escape_comment": {"name": "postgres_escape_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.921994, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_relation_comment": {"name": "postgres__alter_relation_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9222221, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_column_comment": {"name": "postgres__alter_column_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.922845, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"name": "postgres__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9230561, "supported_languages": null}, "macro.dbt_postgres.postgres__copy_grants": {"name": "postgres__copy_grants", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.923174, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_indexes_sql": {"name": "postgres__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_indexes_sql", "macro_sql": "{% macro postgres__get_show_indexes_sql(relation) %}\n select\n i.relname as name,\n m.amname as method,\n ix.indisunique as \"unique\",\n array_to_string(array_agg(a.attname), ',') as column_names\n from pg_index ix\n join pg_class i\n on i.oid = ix.indexrelid\n join pg_am m\n on m.oid=i.relam\n join pg_class t\n on t.oid = ix.indrelid\n join pg_namespace n\n on n.oid = t.relnamespace\n join pg_attribute a\n on a.attrelid = t.oid\n and a.attnum = ANY(ix.indkey)\n where t.relname = '{{ relation.identifier }}'\n and n.nspname = '{{ relation.schema }}'\n and t.relkind in ('r', 'm')\n group by 1, 2, 3\n order by 1, 2, 3\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.923362, "supported_languages": null}, "macro.dbt_postgres.postgres__get_drop_index_sql": {"name": "postgres__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_drop_index_sql", "macro_sql": "\n\n\n{%- macro postgres__get_drop_index_sql(relation, index_name) -%}\n drop index if exists \"{{ index_name }}\"\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9234781, "supported_languages": null}, "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql": {"name": "postgres__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n\n -- apply a full refresh immediately if needed\n {% if configuration_changes.requires_full_refresh %}\n\n {{ get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) }}\n\n -- otherwise apply individual changes as needed\n {% else %}\n\n {{ postgres__update_indexes_on_materialized_view(relation, configuration_changes.indexes) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_materialized_view_as_sql", "macro.dbt_postgres.postgres__update_indexes_on_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.925513, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql": {"name": "postgres__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_create_materialized_view_as_sql(relation, sql) %}\n create materialized view if not exists {{ relation }} as {{ sql }};\n\n {% for _index_dict in config.get('indexes', []) -%}\n {{- get_create_index_sql(relation, _index_dict) -}}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.925809, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_materialized_view_as_sql": {"name": "postgres__get_replace_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %}\n {{- get_create_materialized_view_as_sql(intermediate_relation, sql) -}}\n\n {% if existing_relation is not none %}\n alter materialized view {{ existing_relation }} rename to {{ backup_relation.include(database=False, schema=False) }};\n {% endif %}\n\n alter materialized view {{ intermediate_relation }} rename to {{ relation.include(database=False, schema=False) }};\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.926251, "supported_languages": null}, "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes": {"name": "postgres__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes", "macro_sql": "{% macro postgres__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {% set _existing_materialized_view = postgres__describe_materialized_view(existing_relation) %}\n {% set _configuration_changes = existing_relation.get_materialized_view_config_change_collection(_existing_materialized_view, new_config) %}\n {% do return(_configuration_changes) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__describe_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.926552, "supported_languages": null}, "macro.dbt_postgres.postgres__refresh_materialized_view": {"name": "postgres__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__refresh_materialized_view", "macro_sql": "{% macro postgres__refresh_materialized_view(relation) %}\n refresh materialized view {{ relation }};\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9266639, "supported_languages": null}, "macro.dbt_postgres.postgres__update_indexes_on_materialized_view": {"name": "postgres__update_indexes_on_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__update_indexes_on_materialized_view", "macro_sql": "\n\n\n{%- macro postgres__update_indexes_on_materialized_view(relation, index_changes) -%}\n {{- log(\"Applying UPDATE INDEXES to: \" ~ relation) -}}\n\n {%- for _index_change in index_changes -%}\n {%- set _index = _index_change.context -%}\n\n {%- if _index_change.action == \"drop\" -%}\n\n {{ postgres__get_drop_index_sql(relation, _index.name) }};\n\n {%- elif _index_change.action == \"create\" -%}\n\n {{ postgres__get_create_index_sql(relation, _index.as_node_config) }}\n\n {%- endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql", "macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.927538, "supported_languages": null}, "macro.dbt_postgres.postgres__describe_materialized_view": {"name": "postgres__describe_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__describe_materialized_view", "macro_sql": "{% macro postgres__describe_materialized_view(relation) %}\n -- for now just get the indexes, we don't need the name or the query yet\n {% set _indexes = run_query(get_show_indexes_sql(relation)) %}\n {% do return({'indexes': _indexes}) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.927799, "supported_languages": null}, "macro.dbt_postgres.postgres__get_incremental_default_sql": {"name": "postgres__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/incremental_strategies.sql", "original_file_path": "macros/materializations/incremental_strategies.sql", "unique_id": "macro.dbt_postgres.postgres__get_incremental_default_sql", "macro_sql": "{% macro postgres__get_incremental_default_sql(arg_dict) %}\n\n {% if arg_dict[\"unique_key\"] %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n {% else %}\n {% do return(get_incremental_append_sql(arg_dict)) %}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql", "macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9282198, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"name": "postgres__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.928952, "supported_languages": null}, "macro.dbt_postgres.postgres__dateadd": {"name": "postgres__dateadd", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_postgres.postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9291751, "supported_languages": null}, "macro.dbt_postgres.postgres__listagg": {"name": "postgres__listagg", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_postgres.postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.929821, "supported_languages": null}, "macro.dbt_postgres.postgres__datediff": {"name": "postgres__datediff", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_postgres.postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.933095, "supported_languages": null}, "macro.dbt_postgres.postgres__any_value": {"name": "postgres__any_value", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt_postgres.postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.933248, "supported_languages": null}, "macro.dbt_postgres.postgres__last_day": {"name": "postgres__last_day", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt_postgres.postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9337611, "supported_languages": null}, "macro.dbt_postgres.postgres__split_part": {"name": "postgres__split_part", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt_postgres.postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.93418, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9352992, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9355001, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.935651, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.935796, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.93594, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.936321, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9366488, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9369779, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9374368, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.937716, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9414232, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9415958, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.941828, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.942587, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9427662, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.942945, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n {#-- N.B. The whitespace below is necessary to avoid edge case issue with comments --#}\n {#-- See: https://github.com/dbt-labs/dbt-core/issues/6781 --#}\n select {{ check_cols_config | join(', ') }} from (\n {{ node['compiled_code'] }}\n ) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.944406, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9457479, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.949944, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9502442, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.950419, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.950509, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.950659, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.950779, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9509912, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.951912, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.952109, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.952372, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.952814, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.959171, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.961309, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.96177, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.962092, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.962476, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9628649, "supported_languages": null}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.964499, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.965066, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.96576, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.966012, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.966753, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.973333, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.975059, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.975334, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9763808, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.976661, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9773371, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.977994, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9788969, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.979143, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.979338, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.97965, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.979845, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.980153, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.980345, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.980617, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9808059, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.980958, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9812422, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.986599, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.9923131, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.993597, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.994841, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866739.995734, "supported_languages": null}, "macro.dbt.materialization_materialized_view_default": {"name": "materialization_materialized_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialization_materialized_view_default", "macro_sql": "{% materialization materialized_view, default %}\n {% set existing_relation = load_cached_relation(this) %}\n {% set target_relation = this.incorporate(type=this.MaterializedView) %}\n {% set intermediate_relation = make_intermediate_relation(target_relation) %}\n {% set backup_relation_type = target_relation.MaterializedView if existing_relation is none else existing_relation.type %}\n {% set backup_relation = make_backup_relation(target_relation, backup_relation_type) %}\n\n {{ materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) }}\n\n {% set build_sql = materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% if build_sql == '' %}\n {{ materialized_view_execute_no_op(target_relation) }}\n {% else %}\n {{ materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) }}\n {% endif %}\n\n {{ materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.materialized_view_setup", "macro.dbt.materialized_view_get_build_sql", "macro.dbt.materialized_view_execute_no_op", "macro.dbt.materialized_view_execute_build_sql", "macro.dbt.materialized_view_teardown"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.00081, "supported_languages": ["sql"]}, "macro.dbt.materialized_view_setup": {"name": "materialized_view_setup", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_setup", "macro_sql": "{% macro materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) %}\n\n -- backup_relation and intermediate_relation should not already exist in the database\n -- it's possible these exist because of a previous run that exited unexpectedly\n {% set preexisting_backup_relation = load_cached_relation(backup_relation) %}\n {% set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.001211, "supported_languages": null}, "macro.dbt.materialized_view_teardown": {"name": "materialized_view_teardown", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_teardown", "macro_sql": "{% macro materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) %}\n\n -- drop the temp relations if they exist to leave the database clean for the next run\n {{ drop_relation_if_exists(backup_relation) }}\n {{ drop_relation_if_exists(intermediate_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.001477, "supported_languages": null}, "macro.dbt.materialized_view_get_build_sql": {"name": "materialized_view_get_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_get_build_sql", "macro_sql": "{% macro materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% set full_refresh_mode = should_full_refresh() %}\n\n -- determine the scenario we're in: create, full_refresh, alter, refresh data\n {% if existing_relation is none %}\n {% set build_sql = get_create_materialized_view_as_sql(target_relation, sql) %}\n {% elif full_refresh_mode or not existing_relation.is_materialized_view %}\n {% set build_sql = get_replace_materialized_view_as_sql(target_relation, sql, existing_relation, backup_relation, intermediate_relation) %}\n {% else %}\n\n -- get config options\n {% set on_configuration_change = config.get('on_configuration_change') %}\n {% set configuration_changes = get_materialized_view_configuration_changes(existing_relation, config) %}\n\n {% if configuration_changes is none %}\n {% set build_sql = refresh_materialized_view(target_relation) %}\n\n {% elif on_configuration_change == 'apply' %}\n {% set build_sql = get_alter_materialized_view_as_sql(target_relation, configuration_changes, sql, existing_relation, backup_relation, intermediate_relation) %}\n {% elif on_configuration_change == 'continue' %}\n {% set build_sql = '' %}\n {{ exceptions.warn(\"Configuration changes were identified and `on_configuration_change` was set to `continue` for `\" ~ target_relation ~ \"`\") }}\n {% elif on_configuration_change == 'fail' %}\n {{ exceptions.raise_fail_fast_error(\"Configuration changes were identified and `on_configuration_change` was set to `fail` for `\" ~ target_relation ~ \"`\") }}\n\n {% else %}\n -- this only happens if the user provides a value other than `apply`, 'skip', 'fail'\n {{ exceptions.raise_compiler_error(\"Unexpected configuration scenario\") }}\n\n {% endif %}\n\n {% endif %}\n\n {% do return(build_sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.get_create_materialized_view_as_sql", "macro.dbt.get_replace_materialized_view_as_sql", "macro.dbt.get_materialized_view_configuration_changes", "macro.dbt.refresh_materialized_view", "macro.dbt.get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0028448, "supported_languages": null}, "macro.dbt.materialized_view_execute_no_op": {"name": "materialized_view_execute_no_op", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_no_op", "macro_sql": "{% macro materialized_view_execute_no_op(target_relation) %}\n {% do store_raw_result(\n name=\"main\",\n message=\"skip \" ~ target_relation,\n code=\"skip\",\n rows_affected=\"-1\"\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0030851, "supported_languages": null}, "macro.dbt.materialized_view_execute_build_sql": {"name": "materialized_view_execute_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_build_sql", "macro_sql": "{% macro materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) %}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set grant_config = config.get('grants') %}\n\n {% call statement(name=\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0037708, "supported_languages": null}, "macro.dbt.get_materialized_view_configuration_changes": {"name": "get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/get_materialized_view_configuration_changes.sql", "original_file_path": "macros/materializations/models/materialized_view/get_materialized_view_configuration_changes.sql", "unique_id": "macro.dbt.get_materialized_view_configuration_changes", "macro_sql": "{% macro get_materialized_view_configuration_changes(existing_relation, new_config) %}\n /* {#\n It's recommended that configuration changes be formatted as follows:\n {\"\": [{\"action\": \"\", \"context\": ...}]}\n\n For example:\n {\n \"indexes\": [\n {\"action\": \"drop\", \"context\": \"index_abc\"},\n {\"action\": \"create\", \"context\": {\"columns\": [\"column_1\", \"column_2\"], \"type\": \"hash\", \"unique\": True}},\n ],\n }\n\n Either way, `get_materialized_view_configuration_changes` needs to align with `get_alter_materialized_view_as_sql`.\n #} */\n {{- log('Determining configuration changes on: ' ~ existing_relation) -}}\n {%- do return(adapter.dispatch('get_materialized_view_configuration_changes', 'dbt')(existing_relation, new_config)) -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_materialized_view_configuration_changes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.004201, "supported_languages": null}, "macro.dbt.default__get_materialized_view_configuration_changes": {"name": "default__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/get_materialized_view_configuration_changes.sql", "original_file_path": "macros/materializations/models/materialized_view/get_materialized_view_configuration_changes.sql", "unique_id": "macro.dbt.default__get_materialized_view_configuration_changes", "macro_sql": "{% macro default__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.004351, "supported_languages": null}, "macro.dbt.get_alter_materialized_view_as_sql": {"name": "get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/alter_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/alter_materialized_view.sql", "unique_id": "macro.dbt.get_alter_materialized_view_as_sql", "macro_sql": "{% macro get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{- log('Applying ALTER to: ' ~ relation) -}}\n {{- adapter.dispatch('get_alter_materialized_view_as_sql', 'dbt')(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n ) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.004844, "supported_languages": null}, "macro.dbt.default__get_alter_materialized_view_as_sql": {"name": "default__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/alter_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/alter_materialized_view.sql", "unique_id": "macro.dbt.default__get_alter_materialized_view_as_sql", "macro_sql": "{% macro default__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0050392, "supported_languages": null}, "macro.dbt.refresh_materialized_view": {"name": "refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/refresh_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/refresh_materialized_view.sql", "unique_id": "macro.dbt.refresh_materialized_view", "macro_sql": "{% macro refresh_materialized_view(relation) %}\n {{- log('Applying REFRESH to: ' ~ relation) -}}\n {{- adapter.dispatch('refresh_materialized_view', 'dbt')(relation) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__refresh_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.005354, "supported_languages": null}, "macro.dbt.default__refresh_materialized_view": {"name": "default__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/refresh_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/refresh_materialized_view.sql", "unique_id": "macro.dbt.default__refresh_materialized_view", "macro_sql": "{% macro default__refresh_materialized_view(relation) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.005497, "supported_languages": null}, "macro.dbt.get_replace_materialized_view_as_sql": {"name": "get_replace_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/replace_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/replace_materialized_view.sql", "unique_id": "macro.dbt.get_replace_materialized_view_as_sql", "macro_sql": "{% macro get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %}\n {{- log('Applying REPLACE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_replace_materialized_view_as_sql', 'dbt')(relation, sql, existing_relation, backup_relation, intermediate_relation) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.005922, "supported_languages": null}, "macro.dbt.default__get_replace_materialized_view_as_sql": {"name": "default__get_replace_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/replace_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/replace_materialized_view.sql", "unique_id": "macro.dbt.default__get_replace_materialized_view_as_sql", "macro_sql": "{% macro default__get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0061, "supported_languages": null}, "macro.dbt.get_create_materialized_view_as_sql": {"name": "get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/create_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/create_materialized_view.sql", "unique_id": "macro.dbt.get_create_materialized_view_as_sql", "macro_sql": "{% macro get_create_materialized_view_as_sql(relation, sql) -%}\n {{- log('Applying CREATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_materialized_view_as_sql', 'dbt')(relation, sql) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.006444, "supported_languages": null}, "macro.dbt.default__get_create_materialized_view_as_sql": {"name": "default__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/create_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/create_materialized_view.sql", "unique_id": "macro.dbt.default__get_create_materialized_view_as_sql", "macro_sql": "{% macro default__get_create_materialized_view_as_sql(relation, sql) -%}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.006588, "supported_languages": null}, "macro.dbt.get_table_columns_and_constraints": {"name": "get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_table_columns_and_constraints", "macro_sql": "{%- macro get_table_columns_and_constraints() -%}\n {{ adapter.dispatch('get_table_columns_and_constraints', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.007582, "supported_languages": null}, "macro.dbt.default__get_table_columns_and_constraints": {"name": "default__get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_table_columns_and_constraints", "macro_sql": "{% macro default__get_table_columns_and_constraints() -%}\n {{ return(table_columns_and_constraints()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.007775, "supported_languages": null}, "macro.dbt.table_columns_and_constraints": {"name": "table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.table_columns_and_constraints", "macro_sql": "{% macro table_columns_and_constraints() %}\n {# loop through user_provided_columns to create DDL with data types and constraints #}\n {%- set raw_column_constraints = adapter.render_raw_columns_constraints(raw_columns=model['columns']) -%}\n {%- set raw_model_constraints = adapter.render_raw_model_constraints(raw_constraints=model['constraints']) -%}\n (\n {% for c in raw_column_constraints -%}\n {{ c }}{{ \",\" if not loop.last or raw_model_constraints }}\n {% endfor %}\n {% for c in raw_model_constraints -%}\n {{ c }}{{ \",\" if not loop.last }}\n {% endfor -%}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.008335, "supported_languages": null}, "macro.dbt.get_assert_columns_equivalent": {"name": "get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_assert_columns_equivalent", "macro_sql": "\n\n{%- macro get_assert_columns_equivalent(sql) -%}\n {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0085068, "supported_languages": null}, "macro.dbt.default__get_assert_columns_equivalent": {"name": "default__get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_assert_columns_equivalent", "macro_sql": "{% macro default__get_assert_columns_equivalent(sql) -%}\n {{ return(assert_columns_equivalent(sql)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.008647, "supported_languages": null}, "macro.dbt.assert_columns_equivalent": {"name": "assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.assert_columns_equivalent", "macro_sql": "{% macro assert_columns_equivalent(sql) %}\n {#-- Obtain the column schema provided by sql file. #}\n {%- set sql_file_provided_columns = get_column_schema_from_query(sql, config.get('sql_header', none)) -%}\n {#--Obtain the column schema provided by the schema file by generating an 'empty schema' query from the model's columns. #}\n {%- set schema_file_provided_columns = get_column_schema_from_query(get_empty_schema_sql(model['columns'])) -%}\n\n {#-- create dictionaries with name and formatted data type and strings for exception #}\n {%- set sql_columns = format_columns(sql_file_provided_columns) -%}\n {%- set yaml_columns = format_columns(schema_file_provided_columns) -%}\n\n {%- if sql_columns|length != yaml_columns|length -%}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n\n {%- for sql_col in sql_columns -%}\n {%- set yaml_col = [] -%}\n {%- for this_col in yaml_columns -%}\n {%- if this_col['name'] == sql_col['name'] -%}\n {%- do yaml_col.append(this_col) -%}\n {%- break -%}\n {%- endif -%}\n {%- endfor -%}\n {%- if not yaml_col -%}\n {#-- Column with name not found in yaml #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- if sql_col['formatted'] != yaml_col[0]['formatted'] -%}\n {#-- Column data types don't match #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_column_schema_from_query", "macro.dbt.get_empty_schema_sql", "macro.dbt.format_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.009815, "supported_languages": null}, "macro.dbt.format_columns": {"name": "format_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.format_columns", "macro_sql": "{% macro format_columns(columns) %}\n {% set formatted_columns = [] %}\n {% for column in columns %}\n {%- set formatted_column = adapter.dispatch('format_column', 'dbt')(column) -%}\n {%- do formatted_columns.append(formatted_column) -%}\n {% endfor %}\n {{ return(formatted_columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__format_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.010207, "supported_languages": null}, "macro.dbt.default__format_column": {"name": "default__format_column", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__format_column", "macro_sql": "{% macro default__format_column(column) -%}\n {% set data_type = column.dtype %}\n {% set formatted = column.column.lower() ~ \" \" ~ data_type %}\n {{ return({'name': column.name, 'data_type': data_type, 'formatted': formatted}) }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.010566, "supported_languages": null}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/table.sql", "original_file_path": "macros/materializations/models/table/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0132139, "supported_languages": ["sql"]}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0141, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0142899, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.01474, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.015427, "supported_languages": null}, "macro.dbt.default__get_column_names": {"name": "default__get_column_names", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__get_column_names", "macro_sql": "{% macro default__get_column_names() %}\n {#- loop through user_provided_columns to get column names -#}\n {%- set user_provided_columns = model['columns'] -%}\n {%- for i in user_provided_columns %}\n {%- set col = user_provided_columns[i] -%}\n {%- set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] -%}\n {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.015902, "supported_languages": null}, "macro.dbt.get_select_subquery": {"name": "get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.get_select_subquery", "macro_sql": "{% macro get_select_subquery(sql) %}\n {{ return(adapter.dispatch('get_select_subquery', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.016098, "supported_languages": null}, "macro.dbt.default__get_select_subquery": {"name": "default__get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__get_select_subquery", "macro_sql": "{% macro default__get_select_subquery(sql) %}\n select {{ adapter.dispatch('get_column_names', 'dbt')() }}\n from (\n {{ sql }}\n ) as model_subq\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_column_names"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0162828, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/view.sql", "original_file_path": "macros/materializations/models/view/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.018986, "supported_languages": ["sql"]}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0193, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0195289, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/models/view/create_or_replace_view.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0210712, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.021506, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.021672, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0218592, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.022293, "supported_languages": null}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.025664, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.030952, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0318878, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.03213, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0326211, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.032821, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.03296, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0331059, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.033222, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.033383, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.033503, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.033992, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.034178, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.035498, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.035939, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name -%}\n\n {{ custom_alias_name | trim }}\n\n {%- elif node.version -%}\n\n {{ return(node.name ~ \"_v\" ~ (node.version | replace(\".\", \"_\"))) }}\n\n {%- else -%}\n\n {{ node.name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.03633, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0368562, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.037127, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.03742, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0378208, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.038083, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.038441, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.038743, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0389981, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.039579, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0410838, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.041682, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.041986, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.043896, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.045207, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.045981, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0462232, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.046456, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0465338, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0468748, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.047046, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.047293, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.047421, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.047677, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.047786, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0481331, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.048299, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.048528, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.048606, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0488791, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0490198, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.049322, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.049462, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0501099, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0505269, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.050875, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.051044, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.05133, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.05154, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0518012, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0519629, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0522099, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.052376, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.052629, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.052742, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.053036, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.053173, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.053419, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.053525, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0544379, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0545988, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.05477, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.05493, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.055095, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.05525, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.055417, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.055597, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.055763, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.055924, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.056152, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0563042, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.056467, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.056619, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0569038, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.05704, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.05729, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.057397, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0577438, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.058016, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.058172, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0587149, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0588832, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0591109, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.059392, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.059523, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.059903, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.060155, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.060443, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.060589, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0609689, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0611598, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0613241, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.061701, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0621881, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0623412, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.062486, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0626, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.062769, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.062847, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.063015, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.063187, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.06404, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.06418, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0643399, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.064756, "supported_languages": null}, "macro.dbt.get_drop_index_sql": {"name": "get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_drop_index_sql", "macro_sql": "{% macro get_drop_index_sql(relation, index_name) -%}\n {{ adapter.dispatch('get_drop_index_sql', 'dbt')(relation, index_name) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.064949, "supported_languages": null}, "macro.dbt.default__get_drop_index_sql": {"name": "default__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_drop_index_sql", "macro_sql": "{% macro default__get_drop_index_sql(relation, index_name) -%}\n {{ exceptions.raise_compiler_error(\"`get_drop_index_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.06509, "supported_languages": null}, "macro.dbt.get_show_indexes_sql": {"name": "get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_show_indexes_sql", "macro_sql": "{% macro get_show_indexes_sql(relation) -%}\n {{ adapter.dispatch('get_show_indexes_sql', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.065251, "supported_languages": null}, "macro.dbt.default__get_show_indexes_sql": {"name": "default__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_show_indexes_sql", "macro_sql": "{% macro default__get_show_indexes_sql(relation) -%}\n {{ exceptions.raise_compiler_error(\"`get_show_indexes_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0653791, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.068177, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.068349, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0685818, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.068946, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.069203, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.069538, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.069727, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.069891, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.070103, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0703921, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0706549, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.071231, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.071469, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.071618, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.071816, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.072244, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.072669, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.074408, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.074529, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0747058, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.074825, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.07518, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.07537, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.075477, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.075714, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.075927, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.076239, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.076437, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.076677, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.077376, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.077572, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.077827, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.078071, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.079268, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.079964, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.080141, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0803561, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.080527, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0808089, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0813088, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.082826, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.083091, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.083284, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.083443, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0836291, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.083881, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.084092, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0844018, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.084596, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.084832, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.085769, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n {%- if relation.is_table -%}\n {{- drop_table(relation) -}}\n {%- elif relation.is_view -%}\n {{- drop_view(relation) -}}\n {%- elif relation.is_materialized_view -%}\n {{- drop_materialized_view(relation) -}}\n {%- else -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endif -%}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.drop_table", "macro.dbt.drop_view", "macro.dbt.drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.086236, "supported_languages": null}, "macro.dbt.drop_table": {"name": "drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.drop_table", "macro_sql": "{% macro drop_table(relation) -%}\n {{ return(adapter.dispatch('drop_table', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.086426, "supported_languages": null}, "macro.dbt.default__drop_table": {"name": "default__drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.default__drop_table", "macro_sql": "{% macro default__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.086535, "supported_languages": null}, "macro.dbt.drop_view": {"name": "drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.drop_view", "macro_sql": "{% macro drop_view(relation) -%}\n {{ return(adapter.dispatch('drop_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.086717, "supported_languages": null}, "macro.dbt.default__drop_view": {"name": "default__drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.default__drop_view", "macro_sql": "{% macro default__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.086825, "supported_languages": null}, "macro.dbt.drop_materialized_view": {"name": "drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.drop_materialized_view", "macro_sql": "{% macro drop_materialized_view(relation) -%}\n {{ return(adapter.dispatch('drop_materialized_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.087008, "supported_languages": null}, "macro.dbt.default__drop_materialized_view": {"name": "default__drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.default__drop_materialized_view", "macro_sql": "{% macro default__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.087115, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0894122, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.08958, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0898979, "supported_languages": null}, "macro.dbt.get_empty_subquery_sql": {"name": "get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_subquery_sql", "macro_sql": "{% macro get_empty_subquery_sql(select_sql, select_sql_header=none) -%}\n {{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql, select_sql_header)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.090126, "supported_languages": null}, "macro.dbt.default__get_empty_subquery_sql": {"name": "default__get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_subquery_sql", "macro_sql": "{% macro default__get_empty_subquery_sql(select_sql, select_sql_header=none) %}\n {%- if select_sql_header is not none -%}\n {{ select_sql_header }}\n {%- endif -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.090341, "supported_languages": null}, "macro.dbt.get_empty_schema_sql": {"name": "get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_schema_sql", "macro_sql": "{% macro get_empty_schema_sql(columns) -%}\n {{ return(adapter.dispatch('get_empty_schema_sql', 'dbt')(columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_schema_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.090529, "supported_languages": null}, "macro.dbt.default__get_empty_schema_sql": {"name": "default__get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_schema_sql", "macro_sql": "{% macro default__get_empty_schema_sql(columns) %}\n {%- set col_err = [] -%}\n select\n {% for i in columns %}\n {%- set col = columns[i] -%}\n {%- if col['data_type'] is not defined -%}\n {{ col_err.append(col['name']) }}\n {%- endif -%}\n {% set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] %}\n cast(null as {{ col['data_type'] }}) as {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n {%- if (col_err | length) > 0 -%}\n {{ exceptions.column_type_missing(column_names=col_err) }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.091316, "supported_languages": null}, "macro.dbt.get_column_schema_from_query": {"name": "get_column_schema_from_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_column_schema_from_query", "macro_sql": "{% macro get_column_schema_from_query(select_sql, select_sql_header=none) -%}\n {% set columns = [] %}\n {# -- Using an 'empty subquery' here to get the same schema as the given select_sql statement, without necessitating a data scan.#}\n {% set sql = get_empty_subquery_sql(select_sql, select_sql_header) %}\n {% set column_schema = adapter.get_column_schema_from_query(sql) %}\n {{ return(column_schema) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.091671, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.09186, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n {{ get_empty_subquery_sql(select_sql) }}\n {% endcall %}\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.092225, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.092521, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0931442, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.09341, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.094219, "supported_languages": null}, "macro.dbt.resolve_model_name": {"name": "resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.resolve_model_name", "macro_sql": "{% macro resolve_model_name(input_model_name) %}\n {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0958772, "supported_languages": null}, "macro.dbt.default__resolve_model_name": {"name": "default__resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.default__resolve_model_name", "macro_sql": "\n\n{%- macro default__resolve_model_name(input_model_name) -%}\n {{ input_model_name | string | replace('\"', '\\\"') }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0960371, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {% set _ref_args = [_ref.get('package'), _ref['name']] if _ref.get('package') else [_ref['name'],] %}\n {%- set resolved = ref(*_ref_args, v=_ref.get('version')) -%}\n {%- if _ref.get('version') -%}\n {% do _ref_args.extend([\"v\" ~ _ref['version']]) %}\n {%- endif -%}\n {%- do ref_dict.update({_ref_args | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef ref(*args, **kwargs):\n refs = {{ ref_dict | tojson }}\n key = '.'.join(args)\n version = kwargs.get(\"v\") or kwargs.get(\"version\")\n if version:\n key += f\".v{version}\"\n dbt_load_df_function = kwargs.get(\"dbt_load_df_function\")\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.096898, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.097322, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0979211, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = resolve_model_name(this) %}\n def __repr__(self):\n return '{{ this_relation_name }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args, **kwargs: ref(*args, **kwargs, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.resolve_model_name", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0984251, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0985, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0990112, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0992498, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.0995479, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1686866740.09983, "supported_languages": null}}, "docs": {"doc.test.somedoc": {"name": "somedoc", "resource_type": "doc", "package_name": "test", "path": "somedoc.md", "original_file_path": "models/somedoc.md", "unique_id": "doc.test.somedoc", "block_contents": "Testing, testing"}, "doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {"exposure.test.simple_exposure": {"name": "simple_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.simple_exposure", "fqn": ["test", "simple_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": true}, "unrendered_config": {}, "url": null, "depends_on": {"macros": [], "nodes": ["source.test.my_source.my_table", "model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [["my_source", "my_table"]], "metrics": [], "created_at": 1686866740.304866}}, "metrics": {"metric.test.my_metric": {"name": "my_metric", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.my_metric", "fqn": ["test", "my_metric"], "description": "", "label": "Count records", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null}, "measures": [], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": []}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": []}, "refs": [], "metrics": [], "created_at": 1686866740.324069, "group": null}}, "groups": {}, "selectors": {}, "disabled": {"model.test.disabled_model": [{"database": "dbt", "schema": "test16868667402551820116_test_previous_version_state", "name": "disabled_model", "resource_type": "model", "package_name": "test", "path": "disabled_model.sql", "original_file_path": "models/disabled_model.sql", "unique_id": "model.test.disabled_model", "fqn": ["test", "disabled_model"], "alias": "disabled_model", "checksum": {"name": "sha256", "checksum": "597106d23ce34e3cd2430588e5c1cf474ebdd138fc47e09b925a4ab258a27acc"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1686866740.228131, "config_call_dict": {"enabled": false}, "relation_name": "\"dbt\".\"test16868667402551820116_test_previous_version_state\".\"disabled_model\"", "raw_code": "{{ config(enabled=False) }}\nselect 2 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null, "state_relation": null}], "snapshot.test.disabled_snapshot_seed": [{"database": "dbt", "schema": "test16868667402551820116_test_previous_version_state", "name": "disabled_snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "disabled_snapshot_seed.sql", "original_file_path": "snapshots/disabled_snapshot_seed.sql", "unique_id": "snapshot.test.disabled_snapshot_seed", "fqn": ["test", "disabled_snapshot_seed", "disabled_snapshot_seed"], "alias": "disabled_snapshot_seed", "checksum": {"name": "sha256", "checksum": "fe76c9dd437341c9e82a0f2a8baf3148f961b768eaa0a4410cd27d3c071bd617"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "strategy": "check", "target_schema": "test16868667402551820116_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all", "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16868667402551820116_test_previous_version_state", "enabled": false}, "created_at": 1686866740.2343721, "config_call_dict": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16868667402551820116_test_previous_version_state", "enabled": false}, "relation_name": "\"dbt\".\"test16868667402551820116_test_previous_version_state\".\"disabled_snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n enabled=False,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "state_relation": null}], "analysis.test.disabled_al": [{"database": "dbt", "schema": "test16868667402551820116_test_previous_version_state", "name": "disabled_al", "resource_type": "analysis", "package_name": "test", "path": "analysis/disabled_al.sql", "original_file_path": "analyses/disabled_al.sql", "unique_id": "analysis.test.disabled_al", "fqn": ["test", "analysis", "disabled_al"], "alias": "disabled_al", "checksum": {"name": "sha256", "checksum": "32d36ad6cff0786eb562440ba60ef6c9b9a7f4c282dfb7a52eaf19d36370f0e1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1686866740.255522, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\nselect 9 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}], "test.test.disabled_just_my": [{"database": "dbt", "schema": "test16868667402551820116_test_previous_version_state_dbt_test__audit", "name": "disabled_just_my", "resource_type": "test", "package_name": "test", "path": "disabled_just_my.sql", "original_file_path": "tests/disabled_just_my.sql", "unique_id": "test.test.disabled_just_my", "fqn": ["test", "disabled_just_my"], "alias": "disabled_just_my", "checksum": {"name": "sha256", "checksum": "4f2268fd89a3b4ef899264ada6d7aa33603671cbc5d5acead7dc2eadf1add985"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1686866740.277295, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}], "test.test.disabled_check_nothing_my_model_.f2c6a72d37": [{"test_metadata": {"name": "disabled_check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16868667402551820116_test_previous_version_state_dbt_test__audit", "name": "disabled_check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "disabled_check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.disabled_check_nothing_my_model_.f2c6a72d37", "fqn": ["test", "disabled_check_nothing_my_model_"], "alias": "disabled_check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1686866740.3009222, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ test_disabled_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_disabled_check_nothing", "macro.dbt.get_where_subquery"], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}], "exposure.test.disabled_exposure": [{"name": "disabled_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.disabled_exposure", "fqn": ["test", "disabled_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": false}, "unrendered_config": {"enabled": false}, "url": null, "depends_on": {"macros": [], "nodes": []}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "created_at": 1686866740.305619}], "metric.test.disabled_metric": [{"name": "disabled_metric", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.disabled_metric", "fqn": ["test", "disabled_metric"], "description": "", "label": "Count records", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null}, "measures": [], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": []}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": false, "group": null}, "unrendered_config": {"enabled": false}, "sources": [], "depends_on": {"macros": [], "nodes": []}, "refs": [], "metrics": [], "created_at": 1686866740.324469, "group": null}], "seed.test.disabled_seed": [{"database": "dbt", "schema": "test16868667402551820116_test_previous_version_state", "name": "disabled_seed", "resource_type": "seed", "package_name": "test", "path": "disabled_seed.csv", "original_file_path": "seeds/disabled_seed.csv", "unique_id": "seed.test.disabled_seed", "fqn": ["test", "disabled_seed"], "alias": "disabled_seed", "checksum": {"name": "sha256", "checksum": "31fddd8ec40c6aba6a3a8e7d83fedea2fd0a56c47b64ea3df1847ec1b018e2d1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "quote_columns": null, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1686866740.3037848, "config_call_dict": {}, "relation_name": "\"dbt\".\"test16868667402551820116_test_previous_version_state\".\"disabled_seed\"", "raw_code": "", "root_path": "/private/var/folders/k6/gtt07v8j2vn51m_z05xk_fjc0000gp/T/pytest-of-michelleark/pytest-1078/project7", "depends_on": {"macros": []}, "state_relation": null}], "source.test.my_source.disabled_table": [{"database": "dbt", "schema": "my_source", "name": "disabled_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.disabled_table", "fqn": ["test", "my_source", "disabled_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "disabled_table", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "Disabled table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": false}, "patch_path": null, "unrendered_config": {"enabled": false}, "relation_name": "\"dbt\".\"my_source\".\"disabled_table\"", "created_at": 1686866740.325343}]}, "parent_map": {"model.test.my_model": [], "snapshot.test.snapshot_seed": ["seed.test.my_seed"], "analysis.test.a": [], "test.test.just_my": ["model.test.my_model"], "seed.test.my_seed": [], "test.test.not_null_my_model_id.43e0e9183a": ["model.test.my_model"], "test.test.check_nothing_my_model_.d5a5e66110": ["model.test.my_model"], "source.test.my_source.my_table": [], "exposure.test.simple_exposure": ["model.test.my_model", "source.test.my_source.my_table"], "metric.test.my_metric": []}, "child_map": {"model.test.my_model": ["exposure.test.simple_exposure", "test.test.check_nothing_my_model_.d5a5e66110", "test.test.just_my", "test.test.not_null_my_model_id.43e0e9183a"], "snapshot.test.snapshot_seed": [], "analysis.test.a": [], "test.test.just_my": [], "seed.test.my_seed": ["snapshot.test.snapshot_seed"], "test.test.not_null_my_model_id.43e0e9183a": [], "test.test.check_nothing_my_model_.d5a5e66110": [], "source.test.my_source.my_table": ["exposure.test.simple_exposure"], "exposure.test.simple_exposure": [], "metric.test.my_metric": []}, "group_map": {}, "semantic_nodes": {}} +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v10.json", "dbt_version": "1.6.6", "generated_at": "2023-10-11T20:49:37.080431Z", "invocation_id": "e2f630c5-769a-47a2-89ce-294a00e14e1a", "env": {}, "project_name": "test", "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "my_model", "resource_type": "model", "package_name": "test", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "unique_id": "model.test.my_model", "fqn": ["test", "my_model"], "alias": "my_model", "checksum": {"name": "sha256", "checksum": "3ea0f972fa1b56aa2dc2f56ee784b6a5796312f9a813d59ae70fd8855f10d16d"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "Example model", "columns": {"id": {"name": "id", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1697057377.543413, "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"my_model\"", "raw_code": "select 1 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.test.metricflow_time_spine": {"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "metricflow_time_spine", "resource_type": "model", "package_name": "test", "path": "metricflow_time_spine.sql", "original_file_path": "models/metricflow_time_spine.sql", "unique_id": "model.test.metricflow_time_spine", "fqn": ["test", "metricflow_time_spine"], "alias": "metricflow_time_spine", "checksum": {"name": "sha256", "checksum": "954d9b349821edb5558a373119a7d91eeac9e620aaa96cd112c0d14bab729fdb"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1697057377.456355, "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"metricflow_time_spine\"", "raw_code": "SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "snapshot.test.snapshot_seed": {"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "snapshot_seed.sql", "original_file_path": "snapshots/snapshot_seed.sql", "unique_id": "snapshot.test.snapshot_seed", "fqn": ["test", "snapshot_seed", "snapshot_seed"], "alias": "snapshot_seed", "checksum": {"name": "sha256", "checksum": "5fc998f39655f8fe52443a919e749b6e23883ef90202b040412baac13c6bfe18"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "strategy": "check", "target_schema": "test16970573770617803847_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all", "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16970573770617803847_test_previous_version_state"}, "created_at": 1697057377.471309, "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.test.my_seed"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}, "analysis.test.a": {"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "a", "resource_type": "analysis", "package_name": "test", "path": "analysis/a.sql", "original_file_path": "analyses/a.sql", "unique_id": "analysis.test.a", "fqn": ["test", "analysis", "a"], "alias": "a", "checksum": {"name": "sha256", "checksum": "a389c282f569f0bbdc2a8a4f174dea746c28582fdaf2048d31d9226af9feab23"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1697057377.492032, "relation_name": null, "raw_code": "select 4 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}, "test.test.just_my": {"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state_dbt_test__audit", "name": "just_my", "resource_type": "test", "package_name": "test", "path": "just_my.sql", "original_file_path": "tests/just_my.sql", "unique_id": "test.test.just_my", "fqn": ["test", "just_my"], "alias": "just_my", "checksum": {"name": "sha256", "checksum": "744889a2e2d9ce380619265e1217d7ccf6e6ca896c048d42ebe0f9cfb74d7156"}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["data_test_tag"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["data_test_tag"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["data_test_tag"]}, "created_at": 1697057377.508335, "relation_name": null, "raw_code": "{{ config(tags = ['data_test_tag']) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}, "seed.test.my_seed": {"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "my_seed", "resource_type": "seed", "package_name": "test", "path": "my_seed.csv", "original_file_path": "seeds/my_seed.csv", "unique_id": "seed.test.my_seed", "fqn": ["test", "my_seed"], "alias": "my_seed", "checksum": {"name": "sha256", "checksum": "f7ede83f36165ac6b7a047aa2c3f212dff385bfa9f35f395108cd06fc8e96943"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "quote_columns": null, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1697057377.525708, "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"my_seed\"", "raw_code": "", "root_path": "/private/var/folders/79/5290gpvn3lx5jdryk4844rm80000gn/T/pytest-of-quigleymalcolm/pytest-271/project0", "depends_on": {"macros": []}}, "test.test.not_null_my_model_id.43e0e9183a": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "id", "model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16970573770617803847_test_previous_version_state_dbt_test__audit", "name": "not_null_my_model_id", "resource_type": "test", "package_name": "test", "path": "not_null_my_model_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.not_null_my_model_id.43e0e9183a", "fqn": ["test", "not_null_my_model_id"], "alias": "not_null_my_model_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1697057377.552852, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "column_name": "id", "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}, "test.test.check_nothing_my_model_.d5a5e66110": {"test_metadata": {"name": "check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16970573770617803847_test_previous_version_state_dbt_test__audit", "name": "check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.check_nothing_my_model_.d5a5e66110", "fqn": ["test", "check_nothing_my_model_"], "alias": "check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1697057377.553834, "relation_name": null, "raw_code": "{{ test_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_check_nothing", "macro.dbt.get_where_subquery"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}}, "sources": {"source.test.my_source.my_table": {"database": "dbt", "schema": "my_source", "name": "my_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.my_table", "fqn": ["test", "my_source", "my_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "my_seed", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "My table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"dbt\".\"my_source\".\"my_seed\"", "created_at": 1697057377.594166}}, "macros": {"macro.test.test_check_nothing": {"name": "test_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/dummy_test.sql", "original_file_path": "macros/dummy_test.sql", "unique_id": "macro.test.test_check_nothing", "macro_sql": "{% test check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.099874, "supported_languages": null}, "macro.test.test_disabled_check_nothing": {"name": "test_disabled_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/disabled_dummy_test.sql", "original_file_path": "macros/disabled_dummy_test.sql", "unique_id": "macro.test.test_disabled_check_nothing", "macro_sql": "{% test disabled_check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\n{{ config(enabled=False) }}\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1000938, "supported_languages": null}, "macro.test.do_nothing": {"name": "do_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/do_nothing.sql", "original_file_path": "macros/do_nothing.sql", "unique_id": "macro.test.do_nothing", "macro_sql": "{% macro do_nothing(foo2, bar2) %}\n select\n '{{ foo2 }}' as foo2,\n '{{ bar2 }}' as bar2\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1002848, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp": {"name": "postgres__current_timestamp", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1005828, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"name": "postgres__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.10079, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_get_time": {"name": "postgres__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1009028, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_backcompat": {"name": "postgres__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_backcompat", "macro_sql": "{% macro postgres__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.101016, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat": {"name": "postgres__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro postgres__current_timestamp_in_utc_backcompat() %}\n (current_timestamp at time zone 'utc')::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.101125, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog": {"name": "postgres__get_catalog", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n\n {%- call statement('catalog', fetch_result=True) -%}\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n\n where (\n {%- for schema in schemas -%}\n upper(sch.nspname) = upper('{{ schema }}'){%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table, [m]aterialized view\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1022131, "supported_languages": null}, "macro.dbt_postgres.postgres_get_relations": {"name": "postgres_get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres_get_relations", "macro_sql": "{% macro postgres_get_relations () -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v', 'm')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1028638, "supported_languages": null}, "macro.dbt_postgres.postgres__create_table_as": {"name": "postgres__create_table_as", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced and (not temporary) %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }} ;\n insert into {{ relation }} (\n {{ adapter.dispatch('get_column_names', 'dbt')() }}\n )\n {%- set sql = get_select_subquery(sql) %}\n {% else %}\n as\n {% endif %}\n (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.default__get_column_names", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1119502, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_index_sql": {"name": "postgres__get_create_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.112461, "supported_languages": null}, "macro.dbt_postgres.postgres__create_schema": {"name": "postgres__create_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.112787, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_schema": {"name": "postgres__drop_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.113112, "supported_languages": null}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"name": "postgres__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.113596, "supported_languages": null}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"name": "postgres__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n matviewname as name,\n schemaname as schema,\n 'materialized_view' as type\n from pg_matviews\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.114043, "supported_languages": null}, "macro.dbt_postgres.postgres__information_schema_name": {"name": "postgres__information_schema_name", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.114221, "supported_languages": null}, "macro.dbt_postgres.postgres__list_schemas": {"name": "postgres__list_schemas", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1145759, "supported_languages": null}, "macro.dbt_postgres.postgres__check_schema_exists": {"name": "postgres__check_schema_exists", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1149912, "supported_languages": null}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"name": "postgres__make_relation_with_suffix", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1158679, "supported_languages": null}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"name": "postgres__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1160781, "supported_languages": null}, "macro.dbt_postgres.postgres__make_temp_relation": {"name": "postgres__make_temp_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.116409, "supported_languages": null}, "macro.dbt_postgres.postgres__make_backup_relation": {"name": "postgres__make_backup_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.116695, "supported_languages": null}, "macro.dbt_postgres.postgres_escape_comment": {"name": "postgres_escape_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.117132, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_relation_comment": {"name": "postgres__alter_relation_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.117368, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_column_comment": {"name": "postgres__alter_column_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.117985, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"name": "postgres__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.118195, "supported_languages": null}, "macro.dbt_postgres.postgres__copy_grants": {"name": "postgres__copy_grants", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.118315, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_indexes_sql": {"name": "postgres__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_indexes_sql", "macro_sql": "{% macro postgres__get_show_indexes_sql(relation) %}\n select\n i.relname as name,\n m.amname as method,\n ix.indisunique as \"unique\",\n array_to_string(array_agg(a.attname), ',') as column_names\n from pg_index ix\n join pg_class i\n on i.oid = ix.indexrelid\n join pg_am m\n on m.oid=i.relam\n join pg_class t\n on t.oid = ix.indrelid\n join pg_namespace n\n on n.oid = t.relnamespace\n join pg_attribute a\n on a.attrelid = t.oid\n and a.attnum = ANY(ix.indkey)\n where t.relname = '{{ relation.identifier }}'\n and n.nspname = '{{ relation.schema }}'\n and t.relkind in ('r', 'm')\n group by 1, 2, 3\n order by 1, 2, 3\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.118505, "supported_languages": null}, "macro.dbt_postgres.postgres__get_drop_index_sql": {"name": "postgres__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_drop_index_sql", "macro_sql": "\n\n\n{%- macro postgres__get_drop_index_sql(relation, index_name) -%}\n drop index if exists \"{{ relation.schema }}\".\"{{ index_name }}\"\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.118647, "supported_languages": null}, "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql": {"name": "postgres__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n\n -- apply a full refresh immediately if needed\n {% if configuration_changes.requires_full_refresh %}\n\n {{ get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) }}\n\n -- otherwise apply individual changes as needed\n {% else %}\n\n {{ postgres__update_indexes_on_materialized_view(relation, configuration_changes.indexes) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_materialized_view_as_sql", "macro.dbt_postgres.postgres__update_indexes_on_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.120726, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql": {"name": "postgres__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_create_materialized_view_as_sql(relation, sql) %}\n create materialized view if not exists {{ relation }} as {{ sql }};\n\n {% for _index_dict in config.get('indexes', []) -%}\n {{- get_create_index_sql(relation, _index_dict) -}}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.121023, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_materialized_view_as_sql": {"name": "postgres__get_replace_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %}\n {{- get_create_materialized_view_as_sql(intermediate_relation, sql) -}}\n\n {% if existing_relation is not none %}\n alter materialized view {{ existing_relation }} rename to {{ backup_relation.include(database=False, schema=False) }};\n {% endif %}\n\n alter materialized view {{ intermediate_relation }} rename to {{ relation.include(database=False, schema=False) }};\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.121473, "supported_languages": null}, "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes": {"name": "postgres__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes", "macro_sql": "{% macro postgres__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {% set _existing_materialized_view = postgres__describe_materialized_view(existing_relation) %}\n {% set _configuration_changes = existing_relation.get_materialized_view_config_change_collection(_existing_materialized_view, new_config) %}\n {% do return(_configuration_changes) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__describe_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.121773, "supported_languages": null}, "macro.dbt_postgres.postgres__refresh_materialized_view": {"name": "postgres__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__refresh_materialized_view", "macro_sql": "{% macro postgres__refresh_materialized_view(relation) %}\n refresh materialized view {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1218822, "supported_languages": null}, "macro.dbt_postgres.postgres__update_indexes_on_materialized_view": {"name": "postgres__update_indexes_on_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__update_indexes_on_materialized_view", "macro_sql": "\n\n\n{%- macro postgres__update_indexes_on_materialized_view(relation, index_changes) -%}\n {{- log(\"Applying UPDATE INDEXES to: \" ~ relation) -}}\n\n {%- for _index_change in index_changes -%}\n {%- set _index = _index_change.context -%}\n\n {%- if _index_change.action == \"drop\" -%}\n\n {{ postgres__get_drop_index_sql(relation, _index.name) }};\n\n {%- elif _index_change.action == \"create\" -%}\n\n {{ postgres__get_create_index_sql(relation, _index.as_node_config) }}\n\n {%- endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql", "macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.122449, "supported_languages": null}, "macro.dbt_postgres.postgres__describe_materialized_view": {"name": "postgres__describe_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/materialized_view.sql", "original_file_path": "macros/materializations/materialized_view.sql", "unique_id": "macro.dbt_postgres.postgres__describe_materialized_view", "macro_sql": "{% macro postgres__describe_materialized_view(relation) %}\n -- for now just get the indexes, we don't need the name or the query yet\n {% set _indexes = run_query(get_show_indexes_sql(relation)) %}\n {% do return({'indexes': _indexes}) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.122707, "supported_languages": null}, "macro.dbt_postgres.postgres__get_incremental_default_sql": {"name": "postgres__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/incremental_strategies.sql", "original_file_path": "macros/materializations/incremental_strategies.sql", "unique_id": "macro.dbt_postgres.postgres__get_incremental_default_sql", "macro_sql": "{% macro postgres__get_incremental_default_sql(arg_dict) %}\n\n {% if arg_dict[\"unique_key\"] %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n {% else %}\n {% do return(get_incremental_append_sql(arg_dict)) %}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql", "macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.123094, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"name": "postgres__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.123816, "supported_languages": null}, "macro.dbt_postgres.postgres__dateadd": {"name": "postgres__dateadd", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_postgres.postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.124038, "supported_languages": null}, "macro.dbt_postgres.postgres__listagg": {"name": "postgres__listagg", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_postgres.postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.124678, "supported_languages": null}, "macro.dbt_postgres.postgres__datediff": {"name": "postgres__datediff", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_postgres.postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.127991, "supported_languages": null}, "macro.dbt_postgres.postgres__any_value": {"name": "postgres__any_value", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt_postgres.postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.128149, "supported_languages": null}, "macro.dbt_postgres.postgres__last_day": {"name": "postgres__last_day", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt_postgres.postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1286578, "supported_languages": null}, "macro.dbt_postgres.postgres__split_part": {"name": "postgres__split_part", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt_postgres.postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.129076, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.130199, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.130403, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.130552, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1307, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1308448, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.131226, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.131537, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1318662, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.13231, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.132591, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.136333, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.13651, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1367402, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1374788, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.13765, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.137829, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n {#-- N.B. The whitespace below is necessary to avoid edge case issue with comments --#}\n {#-- See: https://github.com/dbt-labs/dbt-core/issues/6781 --#}\n select {{ check_cols_config | join(', ') }} from (\n {{ node['compiled_code'] }}\n ) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1392791, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.140613, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.144871, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.14516, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.145334, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1454248, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1455739, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.145694, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.145904, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.146819, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1470149, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.147279, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.147718, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.154073, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type='table') -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ create_table_as(False, target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.156199, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.156668, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.156987, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.157378, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.157763, "supported_languages": null}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.159425, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1599932, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.160686, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1609302, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1617038, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.168492, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.170215, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.170489, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.171521, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.171801, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.172469, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.173121, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.17403, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.174277, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.174471, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.174779, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.174974, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.17528, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.175472, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.175742, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.175937, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.176092, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.176376, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1815941, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1874628, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.188724, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.189962, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1908412, "supported_languages": null}, "macro.dbt.materialization_materialized_view_default": {"name": "materialization_materialized_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialization_materialized_view_default", "macro_sql": "{% materialization materialized_view, default %}\n {% set existing_relation = load_cached_relation(this) %}\n {% set target_relation = this.incorporate(type=this.MaterializedView) %}\n {% set intermediate_relation = make_intermediate_relation(target_relation) %}\n {% set backup_relation_type = target_relation.MaterializedView if existing_relation is none else existing_relation.type %}\n {% set backup_relation = make_backup_relation(target_relation, backup_relation_type) %}\n\n {{ materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) }}\n\n {% set build_sql = materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% if build_sql == '' %}\n {{ materialized_view_execute_no_op(target_relation) }}\n {% else %}\n {{ materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) }}\n {% endif %}\n\n {{ materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.materialized_view_setup", "macro.dbt.materialized_view_get_build_sql", "macro.dbt.materialized_view_execute_no_op", "macro.dbt.materialized_view_execute_build_sql", "macro.dbt.materialized_view_teardown"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.196028, "supported_languages": ["sql"]}, "macro.dbt.materialized_view_setup": {"name": "materialized_view_setup", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_setup", "macro_sql": "{% macro materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) %}\n\n -- backup_relation and intermediate_relation should not already exist in the database\n -- it's possible these exist because of a previous run that exited unexpectedly\n {% set preexisting_backup_relation = load_cached_relation(backup_relation) %}\n {% set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1964319, "supported_languages": null}, "macro.dbt.materialized_view_teardown": {"name": "materialized_view_teardown", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_teardown", "macro_sql": "{% macro materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) %}\n\n -- drop the temp relations if they exist to leave the database clean for the next run\n {{ drop_relation_if_exists(backup_relation) }}\n {{ drop_relation_if_exists(intermediate_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.196692, "supported_languages": null}, "macro.dbt.materialized_view_get_build_sql": {"name": "materialized_view_get_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_get_build_sql", "macro_sql": "{% macro materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% set full_refresh_mode = should_full_refresh() %}\n\n -- determine the scenario we're in: create, full_refresh, alter, refresh data\n {% if existing_relation is none %}\n {% set build_sql = get_create_materialized_view_as_sql(target_relation, sql) %}\n {% elif full_refresh_mode or not existing_relation.is_materialized_view %}\n {% set build_sql = get_replace_materialized_view_as_sql(target_relation, sql, existing_relation, backup_relation, intermediate_relation) %}\n {% else %}\n\n -- get config options\n {% set on_configuration_change = config.get('on_configuration_change') %}\n {% set configuration_changes = get_materialized_view_configuration_changes(existing_relation, config) %}\n\n {% if configuration_changes is none %}\n {% set build_sql = refresh_materialized_view(target_relation) %}\n\n {% elif on_configuration_change == 'apply' %}\n {% set build_sql = get_alter_materialized_view_as_sql(target_relation, configuration_changes, sql, existing_relation, backup_relation, intermediate_relation) %}\n {% elif on_configuration_change == 'continue' %}\n {% set build_sql = '' %}\n {{ exceptions.warn(\"Configuration changes were identified and `on_configuration_change` was set to `continue` for `\" ~ target_relation ~ \"`\") }}\n {% elif on_configuration_change == 'fail' %}\n {{ exceptions.raise_fail_fast_error(\"Configuration changes were identified and `on_configuration_change` was set to `fail` for `\" ~ target_relation ~ \"`\") }}\n\n {% else %}\n -- this only happens if the user provides a value other than `apply`, 'skip', 'fail'\n {{ exceptions.raise_compiler_error(\"Unexpected configuration scenario\") }}\n\n {% endif %}\n\n {% endif %}\n\n {% do return(build_sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.get_create_materialized_view_as_sql", "macro.dbt.get_replace_materialized_view_as_sql", "macro.dbt.get_materialized_view_configuration_changes", "macro.dbt.refresh_materialized_view", "macro.dbt.get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.1980631, "supported_languages": null}, "macro.dbt.materialized_view_execute_no_op": {"name": "materialized_view_execute_no_op", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_no_op", "macro_sql": "{% macro materialized_view_execute_no_op(target_relation) %}\n {% do store_raw_result(\n name=\"main\",\n message=\"skip \" ~ target_relation,\n code=\"skip\",\n rows_affected=\"-1\"\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.198303, "supported_languages": null}, "macro.dbt.materialized_view_execute_build_sql": {"name": "materialized_view_execute_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_build_sql", "macro_sql": "{% macro materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) %}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set grant_config = config.get('grants') %}\n\n {% call statement(name=\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.198978, "supported_languages": null}, "macro.dbt.get_materialized_view_configuration_changes": {"name": "get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/get_materialized_view_configuration_changes.sql", "original_file_path": "macros/materializations/models/materialized_view/get_materialized_view_configuration_changes.sql", "unique_id": "macro.dbt.get_materialized_view_configuration_changes", "macro_sql": "{% macro get_materialized_view_configuration_changes(existing_relation, new_config) %}\n /* {#\n It's recommended that configuration changes be formatted as follows:\n {\"\": [{\"action\": \"\", \"context\": ...}]}\n\n For example:\n {\n \"indexes\": [\n {\"action\": \"drop\", \"context\": \"index_abc\"},\n {\"action\": \"create\", \"context\": {\"columns\": [\"column_1\", \"column_2\"], \"type\": \"hash\", \"unique\": True}},\n ],\n }\n\n Either way, `get_materialized_view_configuration_changes` needs to align with `get_alter_materialized_view_as_sql`.\n #} */\n {{- log('Determining configuration changes on: ' ~ existing_relation) -}}\n {%- do return(adapter.dispatch('get_materialized_view_configuration_changes', 'dbt')(existing_relation, new_config)) -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_materialized_view_configuration_changes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.19941, "supported_languages": null}, "macro.dbt.default__get_materialized_view_configuration_changes": {"name": "default__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/get_materialized_view_configuration_changes.sql", "original_file_path": "macros/materializations/models/materialized_view/get_materialized_view_configuration_changes.sql", "unique_id": "macro.dbt.default__get_materialized_view_configuration_changes", "macro_sql": "{% macro default__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.19956, "supported_languages": null}, "macro.dbt.get_alter_materialized_view_as_sql": {"name": "get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/alter_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/alter_materialized_view.sql", "unique_id": "macro.dbt.get_alter_materialized_view_as_sql", "macro_sql": "{% macro get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{- log('Applying ALTER to: ' ~ relation) -}}\n {{- adapter.dispatch('get_alter_materialized_view_as_sql', 'dbt')(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n ) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.200053, "supported_languages": null}, "macro.dbt.default__get_alter_materialized_view_as_sql": {"name": "default__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/alter_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/alter_materialized_view.sql", "unique_id": "macro.dbt.default__get_alter_materialized_view_as_sql", "macro_sql": "{% macro default__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.200245, "supported_languages": null}, "macro.dbt.refresh_materialized_view": {"name": "refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/refresh_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/refresh_materialized_view.sql", "unique_id": "macro.dbt.refresh_materialized_view", "macro_sql": "{% macro refresh_materialized_view(relation) %}\n {{- log('Applying REFRESH to: ' ~ relation) -}}\n {{- adapter.dispatch('refresh_materialized_view', 'dbt')(relation) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__refresh_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2005591, "supported_languages": null}, "macro.dbt.default__refresh_materialized_view": {"name": "default__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/refresh_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/refresh_materialized_view.sql", "unique_id": "macro.dbt.default__refresh_materialized_view", "macro_sql": "{% macro default__refresh_materialized_view(relation) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2006972, "supported_languages": null}, "macro.dbt.get_replace_materialized_view_as_sql": {"name": "get_replace_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/replace_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/replace_materialized_view.sql", "unique_id": "macro.dbt.get_replace_materialized_view_as_sql", "macro_sql": "{% macro get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %}\n {{- log('Applying REPLACE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_replace_materialized_view_as_sql', 'dbt')(relation, sql, existing_relation, backup_relation, intermediate_relation) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2011251, "supported_languages": null}, "macro.dbt.default__get_replace_materialized_view_as_sql": {"name": "default__get_replace_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/replace_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/replace_materialized_view.sql", "unique_id": "macro.dbt.default__get_replace_materialized_view_as_sql", "macro_sql": "{% macro default__get_replace_materialized_view_as_sql(relation, sql, existing_relation, backup_relation, intermediate_relation) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2013052, "supported_languages": null}, "macro.dbt.get_create_materialized_view_as_sql": {"name": "get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/create_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/create_materialized_view.sql", "unique_id": "macro.dbt.get_create_materialized_view_as_sql", "macro_sql": "{% macro get_create_materialized_view_as_sql(relation, sql) -%}\n {{- log('Applying CREATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_materialized_view_as_sql', 'dbt')(relation, sql) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.201658, "supported_languages": null}, "macro.dbt.default__get_create_materialized_view_as_sql": {"name": "default__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view/create_materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view/create_materialized_view.sql", "unique_id": "macro.dbt.default__get_create_materialized_view_as_sql", "macro_sql": "{% macro default__get_create_materialized_view_as_sql(relation, sql) -%}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.201805, "supported_languages": null}, "macro.dbt.can_clone_table": {"name": "can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.can_clone_table", "macro_sql": "{% macro can_clone_table() %}\n {{ return(adapter.dispatch('can_clone_table', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__can_clone_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.202045, "supported_languages": null}, "macro.dbt.default__can_clone_table": {"name": "default__can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.default__can_clone_table", "macro_sql": "{% macro default__can_clone_table() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.202231, "supported_languages": null}, "macro.dbt.create_or_replace_clone": {"name": "create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.create_or_replace_clone", "macro_sql": "{% macro create_or_replace_clone(this_relation, defer_relation) %}\n {{ return(adapter.dispatch('create_or_replace_clone', 'dbt')(this_relation, defer_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_or_replace_clone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.202538, "supported_languages": null}, "macro.dbt.default__create_or_replace_clone": {"name": "default__create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.default__create_or_replace_clone", "macro_sql": "{% macro default__create_or_replace_clone(this_relation, defer_relation) %}\n create or replace table {{ this_relation }} clone {{ defer_relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.202677, "supported_languages": null}, "macro.dbt.materialization_clone_default": {"name": "materialization_clone_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/clone.sql", "original_file_path": "macros/materializations/models/clone/clone.sql", "unique_id": "macro.dbt.materialization_clone_default", "macro_sql": "{%- materialization clone, default -%}\n\n {%- set relations = {'relations': []} -%}\n\n {%- if not defer_relation -%}\n -- nothing to do\n {{ log(\"No relation found in state manifest for \" ~ model.unique_id, info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n\n {%- if existing_relation and not flags.FULL_REFRESH -%}\n -- noop!\n {{ log(\"Relation \" ~ existing_relation ~ \" already exists\", info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set other_existing_relation = load_cached_relation(defer_relation) -%}\n\n -- If this is a database that can do zero-copy cloning of tables, and the other relation is a table, then this will be a table\n -- Otherwise, this will be a view\n\n {% set can_clone_table = can_clone_table() %}\n\n {%- if other_existing_relation and other_existing_relation.type == 'table' and can_clone_table -%}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {% if existing_relation is not none and not existing_relation.is_table %}\n {{ log(\"Dropping relation \" ~ existing_relation ~ \" because it is of type \" ~ existing_relation.type) }}\n {{ drop_relation_if_exists(existing_relation) }}\n {% endif %}\n\n -- as a general rule, data platforms that can clone tables can also do atomic 'create or replace'\n {% call statement('main') %}\n {{ create_or_replace_clone(target_relation, defer_relation) }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n {%- else -%}\n\n {%- set target_relation = this.incorporate(type='view') -%}\n\n -- reuse the view materialization\n -- TODO: support actual dispatch for materialization macros\n -- Tracking ticket: https://github.com/dbt-labs/dbt-core/issues/7799\n {% set search_name = \"materialization_view_\" ~ adapter.type() %}\n {% if not search_name in context %}\n {% set search_name = \"materialization_view_default\" %}\n {% endif %}\n {% set materialization_macro = context[search_name] %}\n {% set relations = materialization_macro() %}\n {{ return(relations) }}\n\n {%- endif -%}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.can_clone_table", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_or_replace_clone", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2059531, "supported_languages": ["sql"]}, "macro.dbt.get_table_columns_and_constraints": {"name": "get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_table_columns_and_constraints", "macro_sql": "{%- macro get_table_columns_and_constraints() -%}\n {{ adapter.dispatch('get_table_columns_and_constraints', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2070122, "supported_languages": null}, "macro.dbt.default__get_table_columns_and_constraints": {"name": "default__get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_table_columns_and_constraints", "macro_sql": "{% macro default__get_table_columns_and_constraints() -%}\n {{ return(table_columns_and_constraints()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2071402, "supported_languages": null}, "macro.dbt.table_columns_and_constraints": {"name": "table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.table_columns_and_constraints", "macro_sql": "{% macro table_columns_and_constraints() %}\n {# loop through user_provided_columns to create DDL with data types and constraints #}\n {%- set raw_column_constraints = adapter.render_raw_columns_constraints(raw_columns=model['columns']) -%}\n {%- set raw_model_constraints = adapter.render_raw_model_constraints(raw_constraints=model['constraints']) -%}\n (\n {% for c in raw_column_constraints -%}\n {{ c }}{{ \",\" if not loop.last or raw_model_constraints }}\n {% endfor %}\n {% for c in raw_model_constraints -%}\n {{ c }}{{ \",\" if not loop.last }}\n {% endfor -%}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.207693, "supported_languages": null}, "macro.dbt.get_assert_columns_equivalent": {"name": "get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_assert_columns_equivalent", "macro_sql": "\n\n{%- macro get_assert_columns_equivalent(sql) -%}\n {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2078662, "supported_languages": null}, "macro.dbt.default__get_assert_columns_equivalent": {"name": "default__get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_assert_columns_equivalent", "macro_sql": "{% macro default__get_assert_columns_equivalent(sql) -%}\n {{ return(assert_columns_equivalent(sql)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2080052, "supported_languages": null}, "macro.dbt.assert_columns_equivalent": {"name": "assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.assert_columns_equivalent", "macro_sql": "{% macro assert_columns_equivalent(sql) %}\n\n {#-- First ensure the user has defined 'columns' in yaml specification --#}\n {%- set user_defined_columns = model['columns'] -%}\n {%- if not user_defined_columns -%}\n {{ exceptions.raise_contract_error([], []) }}\n {%- endif -%}\n\n {#-- Obtain the column schema provided by sql file. #}\n {%- set sql_file_provided_columns = get_column_schema_from_query(sql, config.get('sql_header', none)) -%}\n {#--Obtain the column schema provided by the schema file by generating an 'empty schema' query from the model's columns. #}\n {%- set schema_file_provided_columns = get_column_schema_from_query(get_empty_schema_sql(user_defined_columns)) -%}\n\n {#-- create dictionaries with name and formatted data type and strings for exception #}\n {%- set sql_columns = format_columns(sql_file_provided_columns) -%}\n {%- set yaml_columns = format_columns(schema_file_provided_columns) -%}\n\n {%- if sql_columns|length != yaml_columns|length -%}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n\n {%- for sql_col in sql_columns -%}\n {%- set yaml_col = [] -%}\n {%- for this_col in yaml_columns -%}\n {%- if this_col['name'] == sql_col['name'] -%}\n {%- do yaml_col.append(this_col) -%}\n {%- break -%}\n {%- endif -%}\n {%- endfor -%}\n {%- if not yaml_col -%}\n {#-- Column with name not found in yaml #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- if sql_col['formatted'] != yaml_col[0]['formatted'] -%}\n {#-- Column data types don't match #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_column_schema_from_query", "macro.dbt.get_empty_schema_sql", "macro.dbt.format_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.209325, "supported_languages": null}, "macro.dbt.format_columns": {"name": "format_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.format_columns", "macro_sql": "{% macro format_columns(columns) %}\n {% set formatted_columns = [] %}\n {% for column in columns %}\n {%- set formatted_column = adapter.dispatch('format_column', 'dbt')(column) -%}\n {%- do formatted_columns.append(formatted_column) -%}\n {% endfor %}\n {{ return(formatted_columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__format_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.209718, "supported_languages": null}, "macro.dbt.default__format_column": {"name": "default__format_column", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/columns_spec_ddl.sql", "original_file_path": "macros/materializations/models/table/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__format_column", "macro_sql": "{% macro default__format_column(column) -%}\n {% set data_type = column.dtype %}\n {% set formatted = column.column.lower() ~ \" \" ~ data_type %}\n {{ return({'name': column.name, 'data_type': data_type, 'formatted': formatted}) }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.210073, "supported_languages": null}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/table.sql", "original_file_path": "macros/materializations/models/table/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2131069, "supported_languages": ["sql"]}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.214011, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2142022, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.214653, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced and (not temporary) %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2153769, "supported_languages": null}, "macro.dbt.default__get_column_names": {"name": "default__get_column_names", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__get_column_names", "macro_sql": "{% macro default__get_column_names() %}\n {#- loop through user_provided_columns to get column names -#}\n {%- set user_provided_columns = model['columns'] -%}\n {%- for i in user_provided_columns %}\n {%- set col = user_provided_columns[i] -%}\n {%- set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] -%}\n {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2158551, "supported_languages": null}, "macro.dbt.get_select_subquery": {"name": "get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.get_select_subquery", "macro_sql": "{% macro get_select_subquery(sql) %}\n {{ return(adapter.dispatch('get_select_subquery', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2160509, "supported_languages": null}, "macro.dbt.default__get_select_subquery": {"name": "default__get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table/create_table_as.sql", "original_file_path": "macros/materializations/models/table/create_table_as.sql", "unique_id": "macro.dbt.default__get_select_subquery", "macro_sql": "{% macro default__get_select_subquery(sql) %}\n select {{ adapter.dispatch('get_column_names', 'dbt')() }}\n from (\n {{ sql }}\n ) as model_subq\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_column_names"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.216245, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/view.sql", "original_file_path": "macros/materializations/models/view/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.219257, "supported_languages": ["sql"]}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.219564, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/helpers.sql", "original_file_path": "macros/materializations/models/view/helpers.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.219792, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_or_replace_view.sql", "original_file_path": "macros/materializations/models/view/create_or_replace_view.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2213418, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.221772, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2219388, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.222131, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view/create_view_as.sql", "original_file_path": "macros/materializations/models/view/create_view_as.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.222568, "supported_languages": null}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.225961, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.231348, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.232269, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.232512, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.233001, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2331991, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.233336, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.233479, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.233599, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.233762, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.233883, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2343712, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2345622, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.236012, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2364511, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name -%}\n\n {{ custom_alias_name | trim }}\n\n {%- elif node.version -%}\n\n {{ return(node.name ~ \"_v\" ~ (node.version | replace(\".\", \"_\"))) }}\n\n {%- else -%}\n\n {{ node.name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.236854, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.237386, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.237652, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.237949, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.238351, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.238611, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.238965, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2392662, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.239514, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2400918, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.241607, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.242203, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2425091, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2444658, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partiton start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.245748, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.246516, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2467651, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2469969, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.247075, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2474089, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2475772, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.247827, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.247957, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.248211, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.248318, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2486641, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.248833, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2490602, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2491379, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.249404, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2495492, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.249847, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.249984, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2506409, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.251061, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.251404, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.251571, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.25186, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.252075, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.252336, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.252498, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2527459, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.252909, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.25316, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.253269, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2535648, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2537038, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.253951, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.254057, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.254996, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2551548, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2553222, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.255476, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.255645, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2558029, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.255967, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.256154, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.256317, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.256474, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.256723, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2568839, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.257049, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2572002, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2574809, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.257617, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2578712, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.257977, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.258348, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.258624, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.258776, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.259321, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.259488, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 + {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.259715, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.259997, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.260129, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.26051, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.260766, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2610502, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.261183, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.261564, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.261753, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.261918, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.262178, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2626739, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.262827, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.262975, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.263083, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.263253, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.26333, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2635038, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.263676, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.264548, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.264692, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.264854, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.265263, "supported_languages": null}, "macro.dbt.get_drop_index_sql": {"name": "get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_drop_index_sql", "macro_sql": "{% macro get_drop_index_sql(relation, index_name) -%}\n {{ adapter.dispatch('get_drop_index_sql', 'dbt')(relation, index_name) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.265459, "supported_languages": null}, "macro.dbt.default__get_drop_index_sql": {"name": "default__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_drop_index_sql", "macro_sql": "{% macro default__get_drop_index_sql(relation, index_name) -%}\n {{ exceptions.raise_compiler_error(\"`get_drop_index_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.265602, "supported_languages": null}, "macro.dbt.get_show_indexes_sql": {"name": "get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_show_indexes_sql", "macro_sql": "{% macro get_show_indexes_sql(relation) -%}\n {{ adapter.dispatch('get_show_indexes_sql', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.265763, "supported_languages": null}, "macro.dbt.default__get_show_indexes_sql": {"name": "default__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_show_indexes_sql", "macro_sql": "{% macro default__get_show_indexes_sql(relation) -%}\n {{ exceptions.raise_compiler_error(\"`get_show_indexes_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.265894, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.268777, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.268948, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.269174, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.269548, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.269803, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.270133, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.270325, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2704918, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.270706, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.270998, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.271255, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.271829, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.27207, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.27222, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.272419, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.272854, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.273275, "supported_languages": null}, "macro.dbt.validate_sql": {"name": "validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.validate_sql", "macro_sql": "{% macro validate_sql(sql) -%}\n {{ return(adapter.dispatch('validate_sql', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__validate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2735639, "supported_languages": null}, "macro.dbt.default__validate_sql": {"name": "default__validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.default__validate_sql", "macro_sql": "{% macro default__validate_sql(sql) -%}\n {% call statement('validate_sql') -%}\n explain {{ sql }}\n {% endcall %}\n {{ return(load_result('validate_sql')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.273793, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.275553, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2756748, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2758532, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.275969, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.276321, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.276514, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2766201, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.276926, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.277123, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.277358, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2775512, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.277785, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.278485, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.278682, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2789361, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2791739, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2803478, "supported_languages": null}, "macro.dbt.get_show_sql": {"name": "get_show_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_show_sql", "macro_sql": "{% macro get_show_sql(compiled_code, sql_header, limit) -%}\n {%- if sql_header -%}\n {{ sql_header }}\n {%- endif -%}\n {%- if limit is not none -%}\n {{ get_limit_subquery_sql(compiled_code, limit) }}\n {%- else -%}\n {{ compiled_code }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2808928, "supported_languages": null}, "macro.dbt.get_limit_subquery_sql": {"name": "get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_limit_subquery_sql", "macro_sql": "{% macro get_limit_subquery_sql(sql, limit) %}\n {{ adapter.dispatch('get_limit_subquery_sql', 'dbt')(sql, limit) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.281089, "supported_languages": null}, "macro.dbt.default__get_limit_subquery_sql": {"name": "default__get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.default__get_limit_subquery_sql", "macro_sql": "{% macro default__get_limit_subquery_sql(sql, limit) %}\n select *\n from (\n {{ sql }}\n ) as model_limit_subq\n limit {{ limit }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.281228, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.281921, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2820952, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.282303, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.282475, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.282752, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.283254, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2848241, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.285086, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2852778, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.285444, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.285694, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.28595, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2861598, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2864761, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.286675, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2868428, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2877948, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n {%- if relation.is_table -%}\n {{- drop_table(relation) -}}\n {%- elif relation.is_view -%}\n {{- drop_view(relation) -}}\n {%- elif relation.is_materialized_view -%}\n {{- drop_materialized_view(relation) -}}\n {%- else -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n {%- endif -%}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.drop_table", "macro.dbt.drop_view", "macro.dbt.drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.28826, "supported_languages": null}, "macro.dbt.drop_table": {"name": "drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.drop_table", "macro_sql": "{% macro drop_table(relation) -%}\n {{ return(adapter.dispatch('drop_table', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.288448, "supported_languages": null}, "macro.dbt.default__drop_table": {"name": "default__drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.default__drop_table", "macro_sql": "{% macro default__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.288556, "supported_languages": null}, "macro.dbt.drop_view": {"name": "drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.drop_view", "macro_sql": "{% macro drop_view(relation) -%}\n {{ return(adapter.dispatch('drop_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.288737, "supported_languages": null}, "macro.dbt.default__drop_view": {"name": "default__drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.default__drop_view", "macro_sql": "{% macro default__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.288845, "supported_languages": null}, "macro.dbt.drop_materialized_view": {"name": "drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.drop_materialized_view", "macro_sql": "{% macro drop_materialized_view(relation) -%}\n {{ return(adapter.dispatch('drop_materialized_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.289025, "supported_languages": null}, "macro.dbt.default__drop_materialized_view": {"name": "default__drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/drop_relation.sql", "original_file_path": "macros/adapters/drop_relation.sql", "unique_id": "macro.dbt.default__drop_materialized_view", "macro_sql": "{% macro default__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.289133, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.291446, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2916129, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2919302, "supported_languages": null}, "macro.dbt.get_empty_subquery_sql": {"name": "get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_subquery_sql", "macro_sql": "{% macro get_empty_subquery_sql(select_sql, select_sql_header=none) -%}\n {{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql, select_sql_header)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2921631, "supported_languages": null}, "macro.dbt.default__get_empty_subquery_sql": {"name": "default__get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_subquery_sql", "macro_sql": "{% macro default__get_empty_subquery_sql(select_sql, select_sql_header=none) %}\n {%- if select_sql_header is not none -%}\n {{ select_sql_header }}\n {%- endif -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.292375, "supported_languages": null}, "macro.dbt.get_empty_schema_sql": {"name": "get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_schema_sql", "macro_sql": "{% macro get_empty_schema_sql(columns) -%}\n {{ return(adapter.dispatch('get_empty_schema_sql', 'dbt')(columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_schema_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.292562, "supported_languages": null}, "macro.dbt.default__get_empty_schema_sql": {"name": "default__get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_schema_sql", "macro_sql": "{% macro default__get_empty_schema_sql(columns) %}\n {%- set col_err = [] -%}\n select\n {% for i in columns %}\n {%- set col = columns[i] -%}\n {%- if col['data_type'] is not defined -%}\n {{ col_err.append(col['name']) }}\n {%- endif -%}\n {% set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] %}\n cast(null as {{ col['data_type'] }}) as {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n {%- if (col_err | length) > 0 -%}\n {{ exceptions.column_type_missing(column_names=col_err) }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.293418, "supported_languages": null}, "macro.dbt.get_column_schema_from_query": {"name": "get_column_schema_from_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_column_schema_from_query", "macro_sql": "{% macro get_column_schema_from_query(select_sql, select_sql_header=none) -%}\n {% set columns = [] %}\n {# -- Using an 'empty subquery' here to get the same schema as the given select_sql statement, without necessitating a data scan.#}\n {% set sql = get_empty_subquery_sql(select_sql, select_sql_header) %}\n {% set column_schema = adapter.get_column_schema_from_query(sql) %}\n {{ return(column_schema) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2937758, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2939641, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n {{ get_empty_subquery_sql(select_sql) }}\n {% endcall %}\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.294328, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2945652, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.295179, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.295451, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.296254, "supported_languages": null}, "macro.dbt.resolve_model_name": {"name": "resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.resolve_model_name", "macro_sql": "{% macro resolve_model_name(input_model_name) %}\n {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.29796, "supported_languages": null}, "macro.dbt.default__resolve_model_name": {"name": "default__resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.default__resolve_model_name", "macro_sql": "\n\n{%- macro default__resolve_model_name(input_model_name) -%}\n {{ input_model_name | string | replace('\"', '\\\"') }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.2981188, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {% set _ref_args = [_ref.get('package'), _ref['name']] if _ref.get('package') else [_ref['name'],] %}\n {%- set resolved = ref(*_ref_args, v=_ref.get('version')) -%}\n {%- if _ref.get('version') -%}\n {% do _ref_args.extend([\"v\" ~ _ref['version']]) %}\n {%- endif -%}\n {%- do ref_dict.update({_ref_args | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef ref(*args, **kwargs):\n refs = {{ ref_dict | tojson }}\n key = '.'.join(args)\n version = kwargs.get(\"v\") or kwargs.get(\"version\")\n if version:\n key += f\".v{version}\"\n dbt_load_df_function = kwargs.get(\"dbt_load_df_function\")\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.298965, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.299389, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.299982, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = resolve_model_name(this) %}\n def __repr__(self):\n return '{{ this_relation_name }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args, **kwargs: ref(*args, **kwargs, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.resolve_model_name", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.30047, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.3005419, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.3010602, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.3013, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.301594, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1697057377.301873, "supported_languages": null}}, "docs": {"doc.test.somedoc": {"name": "somedoc", "resource_type": "doc", "package_name": "test", "path": "somedoc.md", "original_file_path": "models/somedoc.md", "unique_id": "doc.test.somedoc", "block_contents": "Testing, testing"}, "doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {"exposure.test.simple_exposure": {"name": "simple_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.simple_exposure", "fqn": ["test", "simple_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": true}, "unrendered_config": {}, "url": null, "depends_on": {"macros": [], "nodes": ["source.test.my_source.my_table", "model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [["my_source", "my_table"]], "metrics": [], "created_at": 1697057377.578206}}, "metrics": {"metric.test.blue_customers_post_2010": {"name": "blue_customers_post_2010", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.blue_customers_post_2010", "fqn": ["test", "blue_customers_post_2010"], "description": "", "label": "Blue Customers since 2010", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": {"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [{"name": "customers", "filter": {"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": []}, "filter": {"where_sql_template": "{{ TimeDimension('id__created_at', 'day') }} > '2010-01-01'"}, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["semantic_model.test.semantic_people"]}, "refs": [], "metrics": [], "created_at": 1697057377.583621, "group": null}, "metric.test.customers": {"name": "customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.customers", "fqn": ["test", "customers"], "description": "", "label": "Customers Metric", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": []}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["semantic_model.test.semantic_people"]}, "refs": [], "metrics": [], "created_at": 1697057377.5840042, "group": null}, "metric.test.ratio_of_blue_customers_to_red_customers": {"name": "ratio_of_blue_customers_to_red_customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.ratio_of_blue_customers_to_red_customers", "fqn": ["test", "ratio_of_blue_customers_to_red_customers"], "description": "", "label": "Very Important Customer Color Ratio", "type": "ratio", "type_params": {"measure": null, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": {"name": "customers", "filter": {"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'blue'"}, "alias": null, "offset_window": null, "offset_to_grain": null}, "denominator": {"name": "customers", "filter": {"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'red'"}, "alias": null, "offset_window": null, "offset_to_grain": null}, "expr": null, "window": null, "grain_to_date": null, "metrics": []}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["metric.test.customers"]}, "refs": [], "metrics": [], "created_at": 1697057377.585288, "group": null}, "metric.test.doubled_blue_customers": {"name": "doubled_blue_customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.doubled_blue_customers", "fqn": ["test", "doubled_blue_customers"], "description": "", "label": "Inflated blue customer numbers", "type": "derived", "type_params": {"measure": null, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": "customers * 2", "window": null, "grain_to_date": null, "metrics": [{"name": "customers", "filter": {"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'blue'"}, "alias": null, "offset_window": null, "offset_to_grain": null}]}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["metric.test.customers"]}, "refs": [], "metrics": [], "created_at": 1697057377.5861351, "group": null}}, "groups": {}, "selectors": {}, "disabled": {"model.test.disabled_model": [{"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "disabled_model", "resource_type": "model", "package_name": "test", "path": "disabled_model.sql", "original_file_path": "models/disabled_model.sql", "unique_id": "model.test.disabled_model", "fqn": ["test", "disabled_model"], "alias": "disabled_model", "checksum": {"name": "sha256", "checksum": "597106d23ce34e3cd2430588e5c1cf474ebdd138fc47e09b925a4ab258a27acc"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1697057377.4547698, "config_call_dict": {"enabled": false}, "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"disabled_model\"", "raw_code": "{{ config(enabled=False) }}\nselect 2 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null, "defer_relation": null}], "snapshot.test.disabled_snapshot_seed": [{"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "disabled_snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "disabled_snapshot_seed.sql", "original_file_path": "snapshots/disabled_snapshot_seed.sql", "unique_id": "snapshot.test.disabled_snapshot_seed", "fqn": ["test", "disabled_snapshot_seed", "disabled_snapshot_seed"], "alias": "disabled_snapshot_seed", "checksum": {"name": "sha256", "checksum": "fe76c9dd437341c9e82a0f2a8baf3148f961b768eaa0a4410cd27d3c071bd617"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "strategy": "check", "target_schema": "test16970573770617803847_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all", "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16970573770617803847_test_previous_version_state", "enabled": false}, "created_at": 1697057377.4774349, "config_call_dict": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test16970573770617803847_test_previous_version_state", "enabled": false}, "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"disabled_snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n enabled=False,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "defer_relation": null}], "analysis.test.disabled_al": [{"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "disabled_al", "resource_type": "analysis", "package_name": "test", "path": "analysis/disabled_al.sql", "original_file_path": "analyses/disabled_al.sql", "unique_id": "analysis.test.disabled_al", "fqn": ["test", "analysis", "disabled_al"], "alias": "disabled_al", "checksum": {"name": "sha256", "checksum": "32d36ad6cff0786eb562440ba60ef6c9b9a7f4c282dfb7a52eaf19d36370f0e1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1697057377.489575, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\nselect 9 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}], "test.test.disabled_just_my": [{"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state_dbt_test__audit", "name": "disabled_just_my", "resource_type": "test", "package_name": "test", "path": "disabled_just_my.sql", "original_file_path": "tests/disabled_just_my.sql", "unique_id": "test.test.disabled_just_my", "fqn": ["test", "disabled_just_my"], "alias": "disabled_just_my", "checksum": {"name": "sha256", "checksum": "4f2268fd89a3b4ef899264ada6d7aa33603671cbc5d5acead7dc2eadf1add985"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1697057377.5060952, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}}], "test.test.disabled_check_nothing_my_model_.f2c6a72d37": [{"test_metadata": {"name": "disabled_check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test16970573770617803847_test_previous_version_state_dbt_test__audit", "name": "disabled_check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "disabled_check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.disabled_check_nothing_my_model_.f2c6a72d37", "fqn": ["test", "disabled_check_nothing_my_model_"], "alias": "disabled_check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1697057377.558094, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ test_disabled_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_disabled_check_nothing", "macro.dbt.get_where_subquery"], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}], "exposure.test.disabled_exposure": [{"name": "disabled_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.disabled_exposure", "fqn": ["test", "disabled_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": false}, "unrendered_config": {"enabled": false}, "url": null, "depends_on": {"macros": [], "nodes": []}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "created_at": 1697057377.5790222}], "metric.test.disabled_metric": [{"name": "disabled_metric", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.disabled_metric", "fqn": ["test", "disabled_metric"], "description": "", "label": "Count records", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": []}, "filter": {"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": false, "group": null}, "unrendered_config": {"enabled": false}, "sources": [], "depends_on": {"macros": [], "nodes": []}, "refs": [], "metrics": [], "created_at": 1697057377.584552, "group": null}], "seed.test.disabled_seed": [{"database": "dbt", "schema": "test16970573770617803847_test_previous_version_state", "name": "disabled_seed", "resource_type": "seed", "package_name": "test", "path": "disabled_seed.csv", "original_file_path": "seeds/disabled_seed.csv", "unique_id": "seed.test.disabled_seed", "fqn": ["test", "disabled_seed"], "alias": "disabled_seed", "checksum": {"name": "sha256", "checksum": "31fddd8ec40c6aba6a3a8e7d83fedea2fd0a56c47b64ea3df1847ec1b018e2d1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false}, "quote_columns": null, "post-hook": [], "pre-hook": []}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1697057377.5646772, "config_call_dict": {}, "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"disabled_seed\"", "raw_code": "", "root_path": "/private/var/folders/79/5290gpvn3lx5jdryk4844rm80000gn/T/pytest-of-quigleymalcolm/pytest-271/project0", "depends_on": {"macros": []}, "defer_relation": null}], "source.test.my_source.disabled_table": [{"database": "dbt", "schema": "my_source", "name": "disabled_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.disabled_table", "fqn": ["test", "my_source", "disabled_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "disabled_table", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "Disabled table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": false}, "patch_path": null, "unrendered_config": {"enabled": false}, "relation_name": "\"dbt\".\"my_source\".\"disabled_table\"", "created_at": 1697057377.5942788}]}, "parent_map": {"model.test.my_model": [], "model.test.metricflow_time_spine": [], "snapshot.test.snapshot_seed": ["seed.test.my_seed"], "analysis.test.a": [], "test.test.just_my": ["model.test.my_model"], "seed.test.my_seed": [], "test.test.not_null_my_model_id.43e0e9183a": ["model.test.my_model"], "test.test.check_nothing_my_model_.d5a5e66110": ["model.test.my_model"], "source.test.my_source.my_table": [], "exposure.test.simple_exposure": ["model.test.my_model", "source.test.my_source.my_table"], "metric.test.blue_customers_post_2010": ["semantic_model.test.semantic_people"], "metric.test.customers": ["semantic_model.test.semantic_people"], "metric.test.ratio_of_blue_customers_to_red_customers": ["metric.test.customers"], "metric.test.doubled_blue_customers": ["metric.test.customers"], "semantic_model.test.semantic_people": ["model.test.my_model"]}, "child_map": {"model.test.my_model": ["exposure.test.simple_exposure", "semantic_model.test.semantic_people", "test.test.check_nothing_my_model_.d5a5e66110", "test.test.just_my", "test.test.not_null_my_model_id.43e0e9183a"], "model.test.metricflow_time_spine": [], "snapshot.test.snapshot_seed": [], "analysis.test.a": [], "test.test.just_my": [], "seed.test.my_seed": ["snapshot.test.snapshot_seed"], "test.test.not_null_my_model_id.43e0e9183a": [], "test.test.check_nothing_my_model_.d5a5e66110": [], "source.test.my_source.my_table": ["exposure.test.simple_exposure"], "exposure.test.simple_exposure": [], "metric.test.blue_customers_post_2010": [], "metric.test.customers": ["metric.test.doubled_blue_customers", "metric.test.ratio_of_blue_customers_to_red_customers"], "metric.test.ratio_of_blue_customers_to_red_customers": [], "metric.test.doubled_blue_customers": [], "semantic_model.test.semantic_people": ["metric.test.blue_customers_post_2010", "metric.test.customers"]}, "group_map": {}, "semantic_models": {"semantic_model.test.semantic_people": {"name": "semantic_people", "resource_type": "semantic_model", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "semantic_model.test.semantic_people", "fqn": ["test", "semantic_people"], "model": "ref('my_model')", "node_relation": {"alias": "my_model", "schema_name": "test16970573770617803847_test_previous_version_state", "database": "dbt", "relation_name": "\"dbt\".\"test16970573770617803847_test_previous_version_state\".\"my_model\""}, "description": null, "label": null, "defaults": {"agg_time_dimension": "created_at"}, "entities": [{"name": "id", "type": "primary", "description": null, "label": null, "role": null, "expr": null}], "measures": [{"name": "years_tenure", "agg": "sum", "description": null, "label": null, "create_metric": false, "expr": "tenure", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}, {"name": "people", "agg": "count", "description": null, "label": null, "create_metric": false, "expr": "id", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}, {"name": "customers", "agg": "count", "description": null, "label": null, "create_metric": false, "expr": "id", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}], "dimensions": [{"name": "favorite_color", "type": "categorical", "description": null, "label": null, "is_partition": false, "type_params": null, "expr": null, "metadata": null}, {"name": "created_at", "type": "time", "description": null, "label": null, "is_partition": false, "type_params": {"time_granularity": "day", "validity_params": null}, "expr": null, "metadata": null}], "metadata": null, "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "created_at": 1697057377.5929039, "config": {"enabled": true}, "primary_entity": null}}} diff --git a/tests/functional/artifacts/data/state/v11/manifest.json b/tests/functional/artifacts/data/state/v11/manifest.json new file mode 100644 index 00000000000..905c9e949dd --- /dev/null +++ b/tests/functional/artifacts/data/state/v11/manifest.json @@ -0,0 +1 @@ +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v11.json", "dbt_version": "1.7.6", "generated_at": "2024-01-30T11:27:16.415331Z", "invocation_id": "05b2865a-32e1-4aae-bf19-bace9c34ecf6", "env": {}, "project_name": "test", "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"database": "dbt", "schema": "test17066140326720908695_test_previous_version_state", "name": "my_model", "resource_type": "model", "package_name": "test", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "unique_id": "model.test.my_model", "fqn": ["test", "my_model"], "alias": "my_model", "checksum": {"name": "sha256", "checksum": "3ea0f972fa1b56aa2dc2f56ee784b6a5796312f9a813d59ae70fd8855f10d16d"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Example model", "columns": {"id": {"name": "id", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1706614035.6203048, "relation_name": "\"dbt\".\"test17066140326720908695_test_previous_version_state\".\"my_model\"", "raw_code": "select 1 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.test.metricflow_time_spine": {"database": "dbt", "schema": "test17066140326720908695_test_previous_version_state", "name": "metricflow_time_spine", "resource_type": "model", "package_name": "test", "path": "metricflow_time_spine.sql", "original_file_path": "models/metricflow_time_spine.sql", "unique_id": "model.test.metricflow_time_spine", "fqn": ["test", "metricflow_time_spine"], "alias": "metricflow_time_spine", "checksum": {"name": "sha256", "checksum": "954d9b349821edb5558a373119a7d91eeac9e620aaa96cd112c0d14bab729fdb"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1706614035.2472441, "relation_name": "\"dbt\".\"test17066140326720908695_test_previous_version_state\".\"metricflow_time_spine\"", "raw_code": "SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "snapshot.test.snapshot_seed": {"database": "dbt", "schema": "test17066140326720908695_test_previous_version_state", "name": "snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "snapshot_seed.sql", "original_file_path": "snapshots/snapshot_seed.sql", "unique_id": "snapshot.test.snapshot_seed", "fqn": ["test", "snapshot_seed", "snapshot_seed"], "alias": "snapshot_seed", "checksum": {"name": "sha256", "checksum": "5fc998f39655f8fe52443a919e749b6e23883ef90202b040412baac13c6bfe18"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "strategy": "check", "target_schema": "test17066140326720908695_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17066140326720908695_test_previous_version_state"}, "created_at": 1706614035.331658, "relation_name": "\"dbt\".\"test17066140326720908695_test_previous_version_state\".\"snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.test.my_seed"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "analysis.test.a": {"database": "dbt", "schema": "test17066140326720908695_test_previous_version_state", "name": "a", "resource_type": "analysis", "package_name": "test", "path": "analysis/a.sql", "original_file_path": "analyses/a.sql", "unique_id": "analysis.test.a", "fqn": ["test", "analysis", "a"], "alias": "a", "checksum": {"name": "sha256", "checksum": "a389c282f569f0bbdc2a8a4f174dea746c28582fdaf2048d31d9226af9feab23"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1706614035.426197, "relation_name": null, "raw_code": "select 4 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "test.test.just_my": {"database": "dbt", "schema": "test17066140326720908695_test_previous_version_state_dbt_test__audit", "name": "just_my", "resource_type": "test", "package_name": "test", "path": "just_my.sql", "original_file_path": "tests/just_my.sql", "unique_id": "test.test.just_my", "fqn": ["test", "just_my"], "alias": "just_my", "checksum": {"name": "sha256", "checksum": "744889a2e2d9ce380619265e1217d7ccf6e6ca896c048d42ebe0f9cfb74d7156"}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["data_test_tag"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["data_test_tag"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["data_test_tag"]}, "created_at": 1706614035.493541, "relation_name": null, "raw_code": "{{ config(tags = ['data_test_tag']) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "seed.test.my_seed": {"database": "dbt", "schema": "test17066140326720908695_test_previous_version_state", "name": "my_seed", "resource_type": "seed", "package_name": "test", "path": "my_seed.csv", "original_file_path": "seeds/my_seed.csv", "unique_id": "seed.test.my_seed", "fqn": ["test", "my_seed"], "alias": "my_seed", "checksum": {"name": "sha256", "checksum": "f7ede83f36165ac6b7a047aa2c3f212dff385bfa9f35f395108cd06fc8e96943"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": null}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1706614035.5665338, "relation_name": "\"dbt\".\"test17066140326720908695_test_previous_version_state\".\"my_seed\"", "raw_code": "", "root_path": "/private/var/folders/7h/hj5_fw9j291c58hwfdvy5xbm0000gp/T/pytest-of-jerco/pytest-10/project0", "depends_on": {"macros": []}}, "test.test.not_null_my_model_id.43e0e9183a": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "id", "model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test17066140326720908695_test_previous_version_state_dbt_test__audit", "name": "not_null_my_model_id", "resource_type": "test", "package_name": "test", "path": "not_null_my_model_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.not_null_my_model_id.43e0e9183a", "fqn": ["test", "not_null_my_model_id"], "alias": "not_null_my_model_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1706614035.668334, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "id", "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}, "test.test.check_nothing_my_model_.d5a5e66110": {"test_metadata": {"name": "check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test17066140326720908695_test_previous_version_state_dbt_test__audit", "name": "check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.check_nothing_my_model_.d5a5e66110", "fqn": ["test", "check_nothing_my_model_"], "alias": "check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1706614035.6698759, "relation_name": null, "raw_code": "{{ test_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_check_nothing", "macro.dbt.get_where_subquery"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}}, "sources": {"source.test.my_source.my_table": {"database": "dbt", "schema": "my_source", "name": "my_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.my_table", "fqn": ["test", "my_source", "my_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "my_seed", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "My table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"dbt\".\"my_source\".\"my_seed\"", "created_at": 1706614035.938528}}, "macros": {"macro.test.test_check_nothing": {"name": "test_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/dummy_test.sql", "original_file_path": "macros/dummy_test.sql", "unique_id": "macro.test.test_check_nothing", "macro_sql": "{% test check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.49347, "supported_languages": null}, "macro.test.test_disabled_check_nothing": {"name": "test_disabled_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/disabled_dummy_test.sql", "original_file_path": "macros/disabled_dummy_test.sql", "unique_id": "macro.test.test_disabled_check_nothing", "macro_sql": "{% test disabled_check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\n{{ config(enabled=False) }}\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.493846, "supported_languages": null}, "macro.test.do_nothing": {"name": "do_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/do_nothing.sql", "original_file_path": "macros/do_nothing.sql", "unique_id": "macro.test.do_nothing", "macro_sql": "{% macro do_nothing(foo2, bar2) %}\n select\n '{{ foo2 }}' as foo2,\n '{{ bar2 }}' as bar2\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.494155, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp": {"name": "postgres__current_timestamp", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.4946299, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"name": "postgres__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.494959, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_get_time": {"name": "postgres__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.495212, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_backcompat": {"name": "postgres__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_backcompat", "macro_sql": "{% macro postgres__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.4953868, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat": {"name": "postgres__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro postgres__current_timestamp_in_utc_backcompat() %}\n (current_timestamp at time zone 'utc')::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.495561, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog_relations": {"name": "postgres__get_catalog_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog_relations", "macro_sql": "{% macro postgres__get_catalog_relations(information_schema, relations) -%}\n {%- call statement('catalog', fetch_result=True) -%}\n\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n when 'm' then 'MATERIALIZED VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n where (\n {%- for relation in relations -%}\n {%- if relation.identifier -%}\n (upper(sch.nspname) = upper('{{ relation.schema }}') and\n upper(tbl.relname) = upper('{{ relation.identifier }}'))\n {%- else-%}\n upper(sch.nspname) = upper('{{ relation.schema }}')\n {%- endif -%}\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p', 'm') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table, [m]aterialized view. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.498032, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog": {"name": "postgres__get_catalog", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n {%- set relations = [] -%}\n {%- for schema in schemas -%}\n {%- set dummy = relations.append({'schema': schema}) -%}\n {%- endfor -%}\n {{ return(postgres__get_catalog_relations(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.498621, "supported_languages": null}, "macro.dbt_postgres.postgres__get_relations": {"name": "postgres__get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres__get_relations", "macro_sql": "{% macro postgres__get_relations() -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v', 'm')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.4997098, "supported_languages": null}, "macro.dbt_postgres.postgres_get_relations": {"name": "postgres_get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres_get_relations", "macro_sql": "{% macro postgres_get_relations() %}\n {{ return(postgres__get_relations()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.499924, "supported_languages": null}, "macro.dbt_postgres.postgres__create_table_as": {"name": "postgres__create_table_as", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {% endif -%}\n {% if contract_config.enforced and (not temporary) -%}\n {{ get_table_columns_and_constraints() }} ;\n insert into {{ relation }} (\n {{ adapter.dispatch('get_column_names', 'dbt')() }}\n )\n {%- set sql = get_select_subquery(sql) %}\n {% else %}\n as\n {% endif %}\n (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.default__get_column_names", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.5139258, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_index_sql": {"name": "postgres__get_create_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.514729, "supported_languages": null}, "macro.dbt_postgres.postgres__create_schema": {"name": "postgres__create_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.515243, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_schema": {"name": "postgres__drop_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.5157518, "supported_languages": null}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"name": "postgres__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.5165281, "supported_languages": null}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"name": "postgres__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n matviewname as name,\n schemaname as schema,\n 'materialized_view' as type\n from pg_matviews\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.5172532, "supported_languages": null}, "macro.dbt_postgres.postgres__information_schema_name": {"name": "postgres__information_schema_name", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.5175521, "supported_languages": null}, "macro.dbt_postgres.postgres__list_schemas": {"name": "postgres__list_schemas", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.518124, "supported_languages": null}, "macro.dbt_postgres.postgres__check_schema_exists": {"name": "postgres__check_schema_exists", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.518896, "supported_languages": null}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"name": "postgres__make_relation_with_suffix", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.520294, "supported_languages": null}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"name": "postgres__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.520627, "supported_languages": null}, "macro.dbt_postgres.postgres__make_temp_relation": {"name": "postgres__make_temp_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.521166, "supported_languages": null}, "macro.dbt_postgres.postgres__make_backup_relation": {"name": "postgres__make_backup_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.521612, "supported_languages": null}, "macro.dbt_postgres.postgres_escape_comment": {"name": "postgres_escape_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.5223072, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_relation_comment": {"name": "postgres__alter_relation_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.522679, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_column_comment": {"name": "postgres__alter_column_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.52365, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"name": "postgres__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.523999, "supported_languages": null}, "macro.dbt_postgres.postgres__copy_grants": {"name": "postgres__copy_grants", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.524189, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_indexes_sql": {"name": "postgres__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_indexes_sql", "macro_sql": "{% macro postgres__get_show_indexes_sql(relation) %}\n select\n i.relname as name,\n m.amname as method,\n ix.indisunique as \"unique\",\n array_to_string(array_agg(a.attname), ',') as column_names\n from pg_index ix\n join pg_class i\n on i.oid = ix.indexrelid\n join pg_am m\n on m.oid=i.relam\n join pg_class t\n on t.oid = ix.indrelid\n join pg_namespace n\n on n.oid = t.relnamespace\n join pg_attribute a\n on a.attrelid = t.oid\n and a.attnum = ANY(ix.indkey)\n where t.relname = '{{ relation.identifier }}'\n and n.nspname = '{{ relation.schema }}'\n and t.relkind in ('r', 'm')\n group by 1, 2, 3\n order by 1, 2, 3\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.524483, "supported_languages": null}, "macro.dbt_postgres.postgres__get_drop_index_sql": {"name": "postgres__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_drop_index_sql", "macro_sql": "\n\n\n{%- macro postgres__get_drop_index_sql(relation, index_name) -%}\n drop index if exists \"{{ relation.schema }}\".\"{{ index_name }}\"\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.524713, "supported_languages": null}, "macro.dbt_postgres.postgres__get_incremental_default_sql": {"name": "postgres__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/incremental_strategies.sql", "original_file_path": "macros/materializations/incremental_strategies.sql", "unique_id": "macro.dbt_postgres.postgres__get_incremental_default_sql", "macro_sql": "{% macro postgres__get_incremental_default_sql(arg_dict) %}\n\n {% if arg_dict[\"unique_key\"] %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n {% else %}\n {% do return(get_incremental_append_sql(arg_dict)) %}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql", "macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.525357, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"name": "postgres__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.526551, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_materialized_view": {"name": "postgres__drop_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_materialized_view", "macro_sql": "{% macro postgres__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.5267892, "supported_languages": null}, "macro.dbt_postgres.postgres__describe_materialized_view": {"name": "postgres__describe_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/describe.sql", "original_file_path": "macros/relations/materialized_view/describe.sql", "unique_id": "macro.dbt_postgres.postgres__describe_materialized_view", "macro_sql": "{% macro postgres__describe_materialized_view(relation) %}\n -- for now just get the indexes, we don't need the name or the query yet\n {% set _indexes = run_query(get_show_indexes_sql(relation)) %}\n {% do return({'indexes': _indexes}) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.527286, "supported_languages": null}, "macro.dbt_postgres.postgres__refresh_materialized_view": {"name": "postgres__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt_postgres.postgres__refresh_materialized_view", "macro_sql": "{% macro postgres__refresh_materialized_view(relation) %}\n refresh materialized view {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.527518, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_materialized_view_sql": {"name": "postgres__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_materialized_view_sql", "macro_sql": "{% macro postgres__get_rename_materialized_view_sql(relation, new_name) %}\n alter materialized view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.527804, "supported_languages": null}, "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql": {"name": "postgres__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n\n -- apply a full refresh immediately if needed\n {% if configuration_changes.requires_full_refresh %}\n\n {{ get_replace_sql(existing_relation, relation, sql) }}\n\n -- otherwise apply individual changes as needed\n {% else %}\n\n {{ postgres__update_indexes_on_materialized_view(relation, configuration_changes.indexes) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_sql", "macro.dbt_postgres.postgres__update_indexes_on_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.529207, "supported_languages": null}, "macro.dbt_postgres.postgres__update_indexes_on_materialized_view": {"name": "postgres__update_indexes_on_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__update_indexes_on_materialized_view", "macro_sql": "\n\n\n{%- macro postgres__update_indexes_on_materialized_view(relation, index_changes) -%}\n {{- log(\"Applying UPDATE INDEXES to: \" ~ relation) -}}\n\n {%- for _index_change in index_changes -%}\n {%- set _index = _index_change.context -%}\n\n {%- if _index_change.action == \"drop\" -%}\n\n {{ postgres__get_drop_index_sql(relation, _index.name) }};\n\n {%- elif _index_change.action == \"create\" -%}\n\n {{ postgres__get_create_index_sql(relation, _index.as_node_config) }}\n\n {%- endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql", "macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.530157, "supported_languages": null}, "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes": {"name": "postgres__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes", "macro_sql": "{% macro postgres__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {% set _existing_materialized_view = postgres__describe_materialized_view(existing_relation) %}\n {% set _configuration_changes = existing_relation.get_materialized_view_config_change_collection(_existing_materialized_view, new_config) %}\n {% do return(_configuration_changes) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__describe_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.530636, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql": {"name": "postgres__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_create_materialized_view_as_sql(relation, sql) %}\n create materialized view if not exists {{ relation }} as {{ sql }};\n\n {% for _index_dict in config.get('indexes', []) -%}\n {{- get_create_index_sql(relation, _index_dict) -}}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.5312471, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_table": {"name": "postgres__drop_table", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_table", "macro_sql": "{% macro postgres__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.5314791, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_table_sql": {"name": "postgres__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_table_sql", "macro_sql": "{% macro postgres__get_replace_table_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.532609, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_table_sql": {"name": "postgres__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_table_sql", "macro_sql": "{% macro postgres__get_rename_table_sql(relation, new_name) %}\n alter table {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.5328991, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_view": {"name": "postgres__drop_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_view", "macro_sql": "{% macro postgres__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.5331168, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_view_sql": {"name": "postgres__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_view_sql", "macro_sql": "{% macro postgres__get_replace_view_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.5340202, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_view_sql": {"name": "postgres__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_view_sql", "macro_sql": "{% macro postgres__get_rename_view_sql(relation, new_name) %}\n alter view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.534313, "supported_languages": null}, "macro.dbt_postgres.postgres__dateadd": {"name": "postgres__dateadd", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_postgres.postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.534663, "supported_languages": null}, "macro.dbt_postgres.postgres__listagg": {"name": "postgres__listagg", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_postgres.postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.5356429, "supported_languages": null}, "macro.dbt_postgres.postgres__datediff": {"name": "postgres__datediff", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_postgres.postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.5409238, "supported_languages": null}, "macro.dbt_postgres.postgres__any_value": {"name": "postgres__any_value", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt_postgres.postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.541182, "supported_languages": null}, "macro.dbt_postgres.postgres__last_day": {"name": "postgres__last_day", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt_postgres.postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.541986, "supported_languages": null}, "macro.dbt_postgres.postgres__split_part": {"name": "postgres__split_part", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt_postgres.postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.542672, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.544426, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.5447628, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.545001, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.545242, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.545506, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.5461252, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.5466251, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.547284, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.548049, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.548515, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.5546088, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.554924, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.55531, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.556515, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.5568, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.557099, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n {#-- N.B. The whitespace below is necessary to avoid edge case issue with comments --#}\n {#-- See: https://github.com/dbt-labs/dbt-core/issues/6781 --#}\n select {{ check_cols_config | join(', ') }} from (\n {{ node['compiled_code'] }}\n ) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.559448, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.561697, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.567965, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.568439, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.568725, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.568873, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.569119, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.569309, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.569651, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.571101, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.571421, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.571839, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.5725482, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.582187, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% set store_failures_as = config.get('store_failures_as') %}\n -- if `--store-failures` is invoked via command line and `store_failures_as` is not set,\n -- config.get('store_failures_as', 'table') returns None, not 'table'\n {% if store_failures_as == none %}{% set store_failures_as = 'table' %}{% endif %}\n {% if store_failures_as not in ['table', 'view'] %}\n {{ exceptions.raise_compiler_error(\n \"'\" ~ store_failures_as ~ \"' is not a valid value for `store_failures_as`. \"\n \"Accepted values are: ['ephemeral', 'table', 'view']\"\n ) }}\n {% endif %}\n\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type=store_failures_as) -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ get_create_sql(target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.get_create_sql", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.586581, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.587331, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.587847, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.5884538, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.589078, "supported_languages": null}, "macro.dbt.materialization_materialized_view_default": {"name": "materialization_materialized_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialization_materialized_view_default", "macro_sql": "{% materialization materialized_view, default %}\n {% set existing_relation = load_cached_relation(this) %}\n {% set target_relation = this.incorporate(type=this.MaterializedView) %}\n {% set intermediate_relation = make_intermediate_relation(target_relation) %}\n {% set backup_relation_type = target_relation.MaterializedView if existing_relation is none else existing_relation.type %}\n {% set backup_relation = make_backup_relation(target_relation, backup_relation_type) %}\n\n {{ materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) }}\n\n {% set build_sql = materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% if build_sql == '' %}\n {{ materialized_view_execute_no_op(target_relation) }}\n {% else %}\n {{ materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) }}\n {% endif %}\n\n {{ materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.materialized_view_setup", "macro.dbt.materialized_view_get_build_sql", "macro.dbt.materialized_view_execute_no_op", "macro.dbt.materialized_view_execute_build_sql", "macro.dbt.materialized_view_teardown"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.5966778, "supported_languages": ["sql"]}, "macro.dbt.materialized_view_setup": {"name": "materialized_view_setup", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_setup", "macro_sql": "{% macro materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) %}\n\n -- backup_relation and intermediate_relation should not already exist in the database\n -- it's possible these exist because of a previous run that exited unexpectedly\n {% set preexisting_backup_relation = load_cached_relation(backup_relation) %}\n {% set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.5973198, "supported_languages": null}, "macro.dbt.materialized_view_teardown": {"name": "materialized_view_teardown", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_teardown", "macro_sql": "{% macro materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) %}\n\n -- drop the temp relations if they exist to leave the database clean for the next run\n {{ drop_relation_if_exists(backup_relation) }}\n {{ drop_relation_if_exists(intermediate_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.597737, "supported_languages": null}, "macro.dbt.materialized_view_get_build_sql": {"name": "materialized_view_get_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_get_build_sql", "macro_sql": "{% macro materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% set full_refresh_mode = should_full_refresh() %}\n\n -- determine the scenario we're in: create, full_refresh, alter, refresh data\n {% if existing_relation is none %}\n {% set build_sql = get_create_materialized_view_as_sql(target_relation, sql) %}\n {% elif full_refresh_mode or not existing_relation.is_materialized_view %}\n {% set build_sql = get_replace_sql(existing_relation, target_relation, sql) %}\n {% else %}\n\n -- get config options\n {% set on_configuration_change = config.get('on_configuration_change') %}\n {% set configuration_changes = get_materialized_view_configuration_changes(existing_relation, config) %}\n\n {% if configuration_changes is none %}\n {% set build_sql = refresh_materialized_view(target_relation) %}\n\n {% elif on_configuration_change == 'apply' %}\n {% set build_sql = get_alter_materialized_view_as_sql(target_relation, configuration_changes, sql, existing_relation, backup_relation, intermediate_relation) %}\n {% elif on_configuration_change == 'continue' %}\n {% set build_sql = '' %}\n {{ exceptions.warn(\"Configuration changes were identified and `on_configuration_change` was set to `continue` for `\" ~ target_relation ~ \"`\") }}\n {% elif on_configuration_change == 'fail' %}\n {{ exceptions.raise_fail_fast_error(\"Configuration changes were identified and `on_configuration_change` was set to `fail` for `\" ~ target_relation ~ \"`\") }}\n\n {% else %}\n -- this only happens if the user provides a value other than `apply`, 'skip', 'fail'\n {{ exceptions.raise_compiler_error(\"Unexpected configuration scenario\") }}\n\n {% endif %}\n\n {% endif %}\n\n {% do return(build_sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.get_create_materialized_view_as_sql", "macro.dbt.get_replace_sql", "macro.dbt.get_materialized_view_configuration_changes", "macro.dbt.refresh_materialized_view", "macro.dbt.get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.599859, "supported_languages": null}, "macro.dbt.materialized_view_execute_no_op": {"name": "materialized_view_execute_no_op", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_no_op", "macro_sql": "{% macro materialized_view_execute_no_op(target_relation) %}\n {% do store_raw_result(\n name=\"main\",\n message=\"skip \" ~ target_relation,\n code=\"skip\",\n rows_affected=\"-1\"\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.600245, "supported_languages": null}, "macro.dbt.materialized_view_execute_build_sql": {"name": "materialized_view_execute_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_build_sql", "macro_sql": "{% macro materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) %}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set grant_config = config.get('grants') %}\n\n {% call statement(name=\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.601315, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view.sql", "original_file_path": "macros/materializations/models/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.6060789, "supported_languages": ["sql"]}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table.sql", "original_file_path": "macros/materializations/models/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.610741, "supported_languages": ["sql"]}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.613347, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.6142552, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.615365, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.615764, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.6169448, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.6264052, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.6290119, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.6294458, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.63107, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.631517, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.632575, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.633599, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.635051, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.635458, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.63577, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.6362581, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.636678, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.637161, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.637471, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.6379, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.6382148, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.63846, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.6389172, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.647358, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.655884, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.657853, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.659792, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.6611788, "supported_languages": null}, "macro.dbt.can_clone_table": {"name": "can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.can_clone_table", "macro_sql": "{% macro can_clone_table() %}\n {{ return(adapter.dispatch('can_clone_table', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__can_clone_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.661578, "supported_languages": null}, "macro.dbt.default__can_clone_table": {"name": "default__can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.default__can_clone_table", "macro_sql": "{% macro default__can_clone_table() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.6617699, "supported_languages": null}, "macro.dbt.create_or_replace_clone": {"name": "create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.create_or_replace_clone", "macro_sql": "{% macro create_or_replace_clone(this_relation, defer_relation) %}\n {{ return(adapter.dispatch('create_or_replace_clone', 'dbt')(this_relation, defer_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_or_replace_clone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.662251, "supported_languages": null}, "macro.dbt.default__create_or_replace_clone": {"name": "default__create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.default__create_or_replace_clone", "macro_sql": "{% macro default__create_or_replace_clone(this_relation, defer_relation) %}\n create or replace table {{ this_relation }} clone {{ defer_relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.662475, "supported_languages": null}, "macro.dbt.materialization_clone_default": {"name": "materialization_clone_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/clone.sql", "original_file_path": "macros/materializations/models/clone/clone.sql", "unique_id": "macro.dbt.materialization_clone_default", "macro_sql": "{%- materialization clone, default -%}\n\n {%- set relations = {'relations': []} -%}\n\n {%- if not defer_relation -%}\n -- nothing to do\n {{ log(\"No relation found in state manifest for \" ~ model.unique_id, info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n\n {%- if existing_relation and not flags.FULL_REFRESH -%}\n -- noop!\n {{ log(\"Relation \" ~ existing_relation ~ \" already exists\", info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set other_existing_relation = load_cached_relation(defer_relation) -%}\n\n -- If this is a database that can do zero-copy cloning of tables, and the other relation is a table, then this will be a table\n -- Otherwise, this will be a view\n\n {% set can_clone_table = can_clone_table() %}\n\n {%- if other_existing_relation and other_existing_relation.type == 'table' and can_clone_table -%}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {% if existing_relation is not none and not existing_relation.is_table %}\n {{ log(\"Dropping relation \" ~ existing_relation ~ \" because it is of type \" ~ existing_relation.type) }}\n {{ drop_relation_if_exists(existing_relation) }}\n {% endif %}\n\n -- as a general rule, data platforms that can clone tables can also do atomic 'create or replace'\n {% call statement('main') %}\n {% if target_relation and defer_relation and target_relation == defer_relation %}\n {{ log(\"Target relation and defer relation are the same, skipping clone for relation: \" ~ target_relation) }}\n {% else %}\n {{ create_or_replace_clone(target_relation, defer_relation) }}\n {% endif %}\n\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n {%- else -%}\n\n {%- set target_relation = this.incorporate(type='view') -%}\n\n -- reuse the view materialization\n -- TODO: support actual dispatch for materialization macros\n -- Tracking ticket: https://github.com/dbt-labs/dbt-core/issues/7799\n {% set search_name = \"materialization_view_\" ~ adapter.type() %}\n {% if not search_name in context %}\n {% set search_name = \"materialization_view_default\" %}\n {% endif %}\n {% set materialization_macro = context[search_name] %}\n {% set relations = materialization_macro() %}\n {{ return(relations) }}\n\n {%- endif -%}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.can_clone_table", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_or_replace_clone", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.668271, "supported_languages": ["sql"]}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.673449, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.681057, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.682514, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.6828969, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.6836748, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.6839921, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.684214, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.684445, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.684631, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.684887, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.68508, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.6858501, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.686155, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.688233, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.68893, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name -%}\n\n {{ custom_alias_name | trim }}\n\n {%- elif node.version -%}\n\n {{ return(node.name ~ \"_v\" ~ (node.version | replace(\".\", \"_\"))) }}\n\n {%- else -%}\n\n {{ node.name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.6895602, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.69043, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.690857, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.6913202, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.691966, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.692378, "supported_languages": null}, "macro.dbt.get_drop_sql": {"name": "get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.get_drop_sql", "macro_sql": "{%- macro get_drop_sql(relation) -%}\n {{- log('Applying DROP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.693515, "supported_languages": null}, "macro.dbt.default__get_drop_sql": {"name": "default__get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__get_drop_sql", "macro_sql": "{%- macro default__get_drop_sql(relation) -%}\n\n {%- if relation.is_view -%}\n {{ drop_view(relation) }}\n\n {%- elif relation.is_table -%}\n {{ drop_table(relation) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ drop_materialized_view(relation) }}\n\n {%- else -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.drop_view", "macro.dbt.drop_table", "macro.dbt.drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.694123, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.694428, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n {{ get_drop_sql(relation) }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.694753, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.695075, "supported_languages": null}, "macro.dbt.get_replace_sql": {"name": "get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.get_replace_sql", "macro_sql": "{% macro get_replace_sql(existing_relation, target_relation, sql) %}\n {{- log('Applying REPLACE to: ' ~ existing_relation) -}}\n {{- adapter.dispatch('get_replace_sql', 'dbt')(existing_relation, target_relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.6964202, "supported_languages": null}, "macro.dbt.default__get_replace_sql": {"name": "default__get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.default__get_replace_sql", "macro_sql": "{% macro default__get_replace_sql(existing_relation, target_relation, sql) %}\n\n {# /* use a create or replace statement if possible */ #}\n\n {% set is_replaceable = existing_relation.type == target_relation_type and existing_relation.can_be_replaced %}\n\n {% if is_replaceable and existing_relation.is_view %}\n {{ get_replace_view_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_table %}\n {{ get_replace_table_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_materialized_view %}\n {{ get_replace_materialized_view_sql(target_relation, sql) }}\n\n {# /* a create or replace statement is not possible, so try to stage and/or backup to be safe */ #}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one using a backup */ #}\n {%- elif target_relation.can_be_renamed and existing_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one without using a backup */ #}\n {%- elif target_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_drop_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }}\n\n {# /* create target_relation in place by first backing up the existing relation */ #}\n {%- elif existing_relation.can_be_renamed -%}\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* no renaming is allowed, so just drop and create */ #}\n {%- else -%}\n {{ get_drop_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_view_sql", "macro.dbt.get_replace_table_sql", "macro.dbt.get_replace_materialized_view_sql", "macro.dbt.get_create_intermediate_sql", "macro.dbt.get_create_backup_sql", "macro.dbt.get_rename_intermediate_sql", "macro.dbt.get_drop_backup_sql", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.698515, "supported_languages": null}, "macro.dbt.get_create_intermediate_sql": {"name": "get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.get_create_intermediate_sql", "macro_sql": "{%- macro get_create_intermediate_sql(relation, sql) -%}\n {{- log('Applying CREATE INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_intermediate_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.699143, "supported_languages": null}, "macro.dbt.default__get_create_intermediate_sql": {"name": "default__get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.default__get_create_intermediate_sql", "macro_sql": "{%- macro default__get_create_intermediate_sql(relation, sql) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n -- drop any pre-existing intermediate\n {{ get_drop_sql(intermediate_relation) }};\n\n {{ get_create_sql(intermediate_relation, sql) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.699562, "supported_languages": null}, "macro.dbt.get_drop_backup_sql": {"name": "get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.get_drop_backup_sql", "macro_sql": "{%- macro get_drop_backup_sql(relation) -%}\n {{- log('Applying DROP BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.700088, "supported_languages": null}, "macro.dbt.default__get_drop_backup_sql": {"name": "default__get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.default__get_drop_backup_sql", "macro_sql": "{%- macro default__get_drop_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n {{ get_drop_sql(backup_relation) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7004328, "supported_languages": null}, "macro.dbt.get_rename_sql": {"name": "get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.get_rename_sql", "macro_sql": "{%- macro get_rename_sql(relation, new_name) -%}\n {{- log('Applying RENAME to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_sql', 'dbt')(relation, new_name) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.701566, "supported_languages": null}, "macro.dbt.default__get_rename_sql": {"name": "default__get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__get_rename_sql", "macro_sql": "{%- macro default__get_rename_sql(relation, new_name) -%}\n\n {%- if relation.is_view -%}\n {{ get_rename_view_sql(relation, new_name) }}\n\n {%- elif relation.is_table -%}\n {{ get_rename_table_sql(relation, new_name) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_rename_materialized_view_sql(relation, new_name) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_rename_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.get_rename_view_sql", "macro.dbt.get_rename_table_sql", "macro.dbt.get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.702281, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.702626, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.703085, "supported_languages": null}, "macro.dbt.get_create_backup_sql": {"name": "get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.get_create_backup_sql", "macro_sql": "{%- macro get_create_backup_sql(relation) -%}\n {{- log('Applying CREATE BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.703655, "supported_languages": null}, "macro.dbt.default__get_create_backup_sql": {"name": "default__get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.default__get_create_backup_sql", "macro_sql": "{%- macro default__get_create_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n -- drop any pre-existing backup\n {{ get_drop_sql(backup_relation) }};\n\n {{ get_rename_sql(relation, backup_relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.704103, "supported_languages": null}, "macro.dbt.get_create_sql": {"name": "get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.get_create_sql", "macro_sql": "{%- macro get_create_sql(relation, sql) -%}\n {{- log('Applying CREATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.704864, "supported_languages": null}, "macro.dbt.default__get_create_sql": {"name": "default__get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.default__get_create_sql", "macro_sql": "{%- macro default__get_create_sql(relation, sql) -%}\n\n {%- if relation.is_view -%}\n {{ get_create_view_as_sql(relation, sql) }}\n\n {%- elif relation.is_table -%}\n {{ get_create_table_as_sql(False, relation, sql) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_create_materialized_view_as_sql(relation, sql) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_create_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.get_create_view_as_sql", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7055922, "supported_languages": null}, "macro.dbt.get_rename_intermediate_sql": {"name": "get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.get_rename_intermediate_sql", "macro_sql": "{%- macro get_rename_intermediate_sql(relation) -%}\n {{- log('Applying RENAME INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_intermediate_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.706131, "supported_languages": null}, "macro.dbt.default__get_rename_intermediate_sql": {"name": "default__get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.default__get_rename_intermediate_sql", "macro_sql": "{%- macro default__get_rename_intermediate_sql(relation) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n {{ get_rename_sql(intermediate_relation, relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.706486, "supported_languages": null}, "macro.dbt.drop_materialized_view": {"name": "drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.drop_materialized_view", "macro_sql": "{% macro drop_materialized_view(relation) -%}\n {{ return(adapter.dispatch('drop_materialized_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.706933, "supported_languages": null}, "macro.dbt.default__drop_materialized_view": {"name": "default__drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.default__drop_materialized_view", "macro_sql": "{% macro default__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.707106, "supported_languages": null}, "macro.dbt.get_replace_materialized_view_sql": {"name": "get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.get_replace_materialized_view_sql", "macro_sql": "{% macro get_replace_materialized_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_materialized_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.707551, "supported_languages": null}, "macro.dbt.default__get_replace_materialized_view_sql": {"name": "default__get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.default__get_replace_materialized_view_sql", "macro_sql": "{% macro default__get_replace_materialized_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.707791, "supported_languages": null}, "macro.dbt.refresh_materialized_view": {"name": "refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.refresh_materialized_view", "macro_sql": "{% macro refresh_materialized_view(relation) %}\n {{- log('Applying REFRESH to: ' ~ relation) -}}\n {{- adapter.dispatch('refresh_materialized_view', 'dbt')(relation) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__refresh_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.708298, "supported_languages": null}, "macro.dbt.default__refresh_materialized_view": {"name": "default__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.default__refresh_materialized_view", "macro_sql": "{% macro default__refresh_materialized_view(relation) %}\n {{ exceptions.raise_compiler_error(\"`refresh_materialized_view` has not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.708629, "supported_languages": null}, "macro.dbt.get_rename_materialized_view_sql": {"name": "get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.get_rename_materialized_view_sql", "macro_sql": "{% macro get_rename_materialized_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_materialized_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.709074, "supported_languages": null}, "macro.dbt.default__get_rename_materialized_view_sql": {"name": "default__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.default__get_rename_materialized_view_sql", "macro_sql": "{% macro default__get_rename_materialized_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7093172, "supported_languages": null}, "macro.dbt.get_alter_materialized_view_as_sql": {"name": "get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_alter_materialized_view_as_sql", "macro_sql": "{% macro get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{- log('Applying ALTER to: ' ~ relation) -}}\n {{- adapter.dispatch('get_alter_materialized_view_as_sql', 'dbt')(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n ) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.710311, "supported_languages": null}, "macro.dbt.default__get_alter_materialized_view_as_sql": {"name": "default__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_alter_materialized_view_as_sql", "macro_sql": "{% macro default__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7106209, "supported_languages": null}, "macro.dbt.get_materialized_view_configuration_changes": {"name": "get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_materialized_view_configuration_changes", "macro_sql": "{% macro get_materialized_view_configuration_changes(existing_relation, new_config) %}\n /* {#\n It's recommended that configuration changes be formatted as follows:\n {\"\": [{\"action\": \"\", \"context\": ...}]}\n\n For example:\n {\n \"indexes\": [\n {\"action\": \"drop\", \"context\": \"index_abc\"},\n {\"action\": \"create\", \"context\": {\"columns\": [\"column_1\", \"column_2\"], \"type\": \"hash\", \"unique\": True}},\n ],\n }\n\n Either way, `get_materialized_view_configuration_changes` needs to align with `get_alter_materialized_view_as_sql`.\n #} */\n {{- log('Determining configuration changes on: ' ~ existing_relation) -}}\n {%- do return(adapter.dispatch('get_materialized_view_configuration_changes', 'dbt')(existing_relation, new_config)) -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_materialized_view_configuration_changes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.711103, "supported_languages": null}, "macro.dbt.default__get_materialized_view_configuration_changes": {"name": "default__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_materialized_view_configuration_changes", "macro_sql": "{% macro default__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7113469, "supported_languages": null}, "macro.dbt.get_create_materialized_view_as_sql": {"name": "get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.get_create_materialized_view_as_sql", "macro_sql": "{% macro get_create_materialized_view_as_sql(relation, sql) -%}\n {{- adapter.dispatch('get_create_materialized_view_as_sql', 'dbt')(relation, sql) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7117891, "supported_languages": null}, "macro.dbt.default__get_create_materialized_view_as_sql": {"name": "default__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.default__get_create_materialized_view_as_sql", "macro_sql": "{% macro default__get_create_materialized_view_as_sql(relation, sql) -%}\n {{ exceptions.raise_compiler_error(\n \"`get_create_materialized_view_as_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.712033, "supported_languages": null}, "macro.dbt.get_table_columns_and_constraints": {"name": "get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_table_columns_and_constraints", "macro_sql": "{%- macro get_table_columns_and_constraints() -%}\n {{ adapter.dispatch('get_table_columns_and_constraints', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.713679, "supported_languages": null}, "macro.dbt.default__get_table_columns_and_constraints": {"name": "default__get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_table_columns_and_constraints", "macro_sql": "{% macro default__get_table_columns_and_constraints() -%}\n {{ return(table_columns_and_constraints()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.713873, "supported_languages": null}, "macro.dbt.table_columns_and_constraints": {"name": "table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.table_columns_and_constraints", "macro_sql": "{% macro table_columns_and_constraints() %}\n {# loop through user_provided_columns to create DDL with data types and constraints #}\n {%- set raw_column_constraints = adapter.render_raw_columns_constraints(raw_columns=model['columns']) -%}\n {%- set raw_model_constraints = adapter.render_raw_model_constraints(raw_constraints=model['constraints']) -%}\n (\n {% for c in raw_column_constraints -%}\n {{ c }}{{ \",\" if not loop.last or raw_model_constraints }}\n {% endfor %}\n {% for c in raw_model_constraints -%}\n {{ c }}{{ \",\" if not loop.last }}\n {% endfor -%}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7147489, "supported_languages": null}, "macro.dbt.get_assert_columns_equivalent": {"name": "get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_assert_columns_equivalent", "macro_sql": "\n\n{%- macro get_assert_columns_equivalent(sql) -%}\n {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.715026, "supported_languages": null}, "macro.dbt.default__get_assert_columns_equivalent": {"name": "default__get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_assert_columns_equivalent", "macro_sql": "{% macro default__get_assert_columns_equivalent(sql) -%}\n {{ return(assert_columns_equivalent(sql)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7152472, "supported_languages": null}, "macro.dbt.assert_columns_equivalent": {"name": "assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.assert_columns_equivalent", "macro_sql": "{% macro assert_columns_equivalent(sql) %}\n\n {#-- First ensure the user has defined 'columns' in yaml specification --#}\n {%- set user_defined_columns = model['columns'] -%}\n {%- if not user_defined_columns -%}\n {{ exceptions.raise_contract_error([], []) }}\n {%- endif -%}\n\n {#-- Obtain the column schema provided by sql file. #}\n {%- set sql_file_provided_columns = get_column_schema_from_query(sql, config.get('sql_header', none)) -%}\n {#--Obtain the column schema provided by the schema file by generating an 'empty schema' query from the model's columns. #}\n {%- set schema_file_provided_columns = get_column_schema_from_query(get_empty_schema_sql(user_defined_columns)) -%}\n\n {#-- create dictionaries with name and formatted data type and strings for exception #}\n {%- set sql_columns = format_columns(sql_file_provided_columns) -%}\n {%- set yaml_columns = format_columns(schema_file_provided_columns) -%}\n\n {%- if sql_columns|length != yaml_columns|length -%}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n\n {%- for sql_col in sql_columns -%}\n {%- set yaml_col = [] -%}\n {%- for this_col in yaml_columns -%}\n {%- if this_col['name'] == sql_col['name'] -%}\n {%- do yaml_col.append(this_col) -%}\n {%- break -%}\n {%- endif -%}\n {%- endfor -%}\n {%- if not yaml_col -%}\n {#-- Column with name not found in yaml #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- if sql_col['formatted'] != yaml_col[0]['formatted'] -%}\n {#-- Column data types don't match #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_column_schema_from_query", "macro.dbt.get_empty_schema_sql", "macro.dbt.format_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.717453, "supported_languages": null}, "macro.dbt.format_columns": {"name": "format_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.format_columns", "macro_sql": "{% macro format_columns(columns) %}\n {% set formatted_columns = [] %}\n {% for column in columns %}\n {%- set formatted_column = adapter.dispatch('format_column', 'dbt')(column) -%}\n {%- do formatted_columns.append(formatted_column) -%}\n {% endfor %}\n {{ return(formatted_columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__format_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.718085, "supported_languages": null}, "macro.dbt.default__format_column": {"name": "default__format_column", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__format_column", "macro_sql": "{% macro default__format_column(column) -%}\n {% set data_type = column.dtype %}\n {% set formatted = column.column.lower() ~ \" \" ~ data_type %}\n {{ return({'name': column.name, 'data_type': data_type, 'formatted': formatted}) }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.718652, "supported_languages": null}, "macro.dbt.drop_table": {"name": "drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.drop_table", "macro_sql": "{% macro drop_table(relation) -%}\n {{ return(adapter.dispatch('drop_table', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.719095, "supported_languages": null}, "macro.dbt.default__drop_table": {"name": "default__drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.default__drop_table", "macro_sql": "{% macro default__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.719268, "supported_languages": null}, "macro.dbt.get_replace_table_sql": {"name": "get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.get_replace_table_sql", "macro_sql": "{% macro get_replace_table_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_table_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.719711, "supported_languages": null}, "macro.dbt.default__get_replace_table_sql": {"name": "default__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.default__get_replace_table_sql", "macro_sql": "{% macro default__get_replace_table_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.719948, "supported_languages": null}, "macro.dbt.get_rename_table_sql": {"name": "get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.get_rename_table_sql", "macro_sql": "{% macro get_rename_table_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_table_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7203882, "supported_languages": null}, "macro.dbt.default__get_rename_table_sql": {"name": "default__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.default__get_rename_table_sql", "macro_sql": "{% macro default__get_rename_table_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7206268, "supported_languages": null}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.722004, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.722313, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.723018, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced and (not temporary) %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.724143, "supported_languages": null}, "macro.dbt.default__get_column_names": {"name": "default__get_column_names", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_column_names", "macro_sql": "{% macro default__get_column_names() %}\n {#- loop through user_provided_columns to get column names -#}\n {%- set user_provided_columns = model['columns'] -%}\n {%- for i in user_provided_columns %}\n {%- set col = user_provided_columns[i] -%}\n {%- set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] -%}\n {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.724905, "supported_languages": null}, "macro.dbt.get_select_subquery": {"name": "get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_select_subquery", "macro_sql": "{% macro get_select_subquery(sql) %}\n {{ return(adapter.dispatch('get_select_subquery', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.725209, "supported_languages": null}, "macro.dbt.default__get_select_subquery": {"name": "default__get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_select_subquery", "macro_sql": "{% macro default__get_select_subquery(sql) %}\n select {{ adapter.dispatch('get_column_names', 'dbt')() }}\n from (\n {{ sql }}\n ) as model_subq\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_column_names"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7255082, "supported_languages": null}, "macro.dbt.drop_view": {"name": "drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.drop_view", "macro_sql": "{% macro drop_view(relation) -%}\n {{ return(adapter.dispatch('drop_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.72594, "supported_languages": null}, "macro.dbt.default__drop_view": {"name": "default__drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.default__drop_view", "macro_sql": "{% macro default__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.726114, "supported_languages": null}, "macro.dbt.get_replace_view_sql": {"name": "get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.get_replace_view_sql", "macro_sql": "{% macro get_replace_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.72751, "supported_languages": null}, "macro.dbt.default__get_replace_view_sql": {"name": "default__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__get_replace_view_sql", "macro_sql": "{% macro default__get_replace_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7277539, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.729735, "supported_languages": null}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.730062, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7304268, "supported_languages": null}, "macro.dbt.get_rename_view_sql": {"name": "get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.get_rename_view_sql", "macro_sql": "{% macro get_rename_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.730877, "supported_languages": null}, "macro.dbt.default__get_rename_view_sql": {"name": "default__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.default__get_rename_view_sql", "macro_sql": "{% macro default__get_rename_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7311208, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7317948, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.732067, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7323651, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7330492, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7336361, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.734132, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7345579, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.735542, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7379699, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7389178, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.739396, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7423072, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7443829, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.745625, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.746016, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7463942, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7465222, "supported_languages": null}, "macro.dbt.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7477372, "supported_languages": null}, "macro.dbt.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.748717, "supported_languages": null}, "macro.dbt.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.749108, "supported_languages": null}, "macro.dbt.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n {# call as follows:\n\n date_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n ) #}\n\n\n with rawdata as (\n\n {{dbt.generate_series(\n dbt.get_intervals_between(start_date, end_date, datepart)\n )}}\n\n ),\n\n all_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"row_number() over (order by 1) - 1\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n ),\n\n filtered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n )\n\n select * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.generate_series", "macro.dbt.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.749716, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.750282, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.750558, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.75096, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7511628, "supported_languages": null}, "macro.dbt.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7524989, "supported_languages": null}, "macro.dbt.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.753182, "supported_languages": null}, "macro.dbt.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7535, "supported_languages": null}, "macro.dbt.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.754335, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7547672, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7549481, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7555032, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7557771, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.75615, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7564, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.756841, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7570739, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.75756, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.757789, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.758796, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7594528, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.759997, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7602718, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.760739, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.760973, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.76143, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.761698, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.762109, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.762371, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.762775, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7629561, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7634618, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.763691, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.764083, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.764363, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.765888, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.766141, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.766407, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7666519, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7669199, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.767169, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.767443, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7677372, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7680051, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.768262, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.768537, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.768776, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.769038, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.769278, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.769752, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.769978, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.770388, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.770564, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.771231, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.771678, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.771925, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.772767, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.773036, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 + {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7733939, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.773841, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.774057, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7746718, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.77508, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.775543, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.775769, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.776381, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.776683, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.776957, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.777257, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.778059, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.778315, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.778553, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7787268, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7791228, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.779256, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.779534, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.779809, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.781184, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7814138, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.781677, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.782335, "supported_languages": null}, "macro.dbt.get_drop_index_sql": {"name": "get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_drop_index_sql", "macro_sql": "{% macro get_drop_index_sql(relation, index_name) -%}\n {{ adapter.dispatch('get_drop_index_sql', 'dbt')(relation, index_name) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.782665, "supported_languages": null}, "macro.dbt.default__get_drop_index_sql": {"name": "default__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_drop_index_sql", "macro_sql": "{% macro default__get_drop_index_sql(relation, index_name) -%}\n {{ exceptions.raise_compiler_error(\"`get_drop_index_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.782913, "supported_languages": null}, "macro.dbt.get_show_indexes_sql": {"name": "get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_show_indexes_sql", "macro_sql": "{% macro get_show_indexes_sql(relation) -%}\n {{ adapter.dispatch('get_show_indexes_sql', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.783176, "supported_languages": null}, "macro.dbt.default__get_show_indexes_sql": {"name": "default__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_show_indexes_sql", "macro_sql": "{% macro default__get_show_indexes_sql(relation) -%}\n {{ exceptions.raise_compiler_error(\"`get_show_indexes_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.783384, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7865632, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7868469, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7872078, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7876961, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.788102, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.788644, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.788949, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.789593, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.790008, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7909272, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.791318, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.791553, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7922392, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7929611, "supported_languages": null}, "macro.dbt.validate_sql": {"name": "validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.validate_sql", "macro_sql": "{% macro validate_sql(sql) -%}\n {{ return(adapter.dispatch('validate_sql', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__validate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.793429, "supported_languages": null}, "macro.dbt.default__validate_sql": {"name": "default__validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.default__validate_sql", "macro_sql": "{% macro default__validate_sql(sql) -%}\n {% call statement('validate_sql') -%}\n explain {{ sql }}\n {% endcall %}\n {{ return(load_result('validate_sql')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.793799, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.796642, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7968378, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.797121, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.797313, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.797886, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7982059, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.798379, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7987661, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.799092, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.799478, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.7999208, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.8003159, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.80143, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.801748, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.802148, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.802525, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.804396, "supported_languages": null}, "macro.dbt.get_show_sql": {"name": "get_show_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_show_sql", "macro_sql": "{% macro get_show_sql(compiled_code, sql_header, limit) -%}\n {%- if sql_header -%}\n {{ sql_header }}\n {%- endif -%}\n {%- if limit is not none -%}\n {{ get_limit_subquery_sql(compiled_code, limit) }}\n {%- else -%}\n {{ compiled_code }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.805258, "supported_languages": null}, "macro.dbt.get_limit_subquery_sql": {"name": "get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_limit_subquery_sql", "macro_sql": "{% macro get_limit_subquery_sql(sql, limit) %}\n {{ adapter.dispatch('get_limit_subquery_sql', 'dbt')(sql, limit) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.805578, "supported_languages": null}, "macro.dbt.default__get_limit_subquery_sql": {"name": "default__get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.default__get_limit_subquery_sql", "macro_sql": "{% macro default__get_limit_subquery_sql(sql, limit) %}\n select *\n from (\n {{ sql }}\n ) as model_limit_subq\n limit {{ limit }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.805804, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.80692, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.8072052, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.80755, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.807842, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.8083148, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.80914, "supported_languages": null}, "macro.dbt.get_catalog_relations": {"name": "get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog_relations", "macro_sql": "{% macro get_catalog_relations(information_schema, relations) -%}\n {{ return(adapter.dispatch('get_catalog_relations', 'dbt')(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.813777, "supported_languages": null}, "macro.dbt.default__get_catalog_relations": {"name": "default__get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog_relations", "macro_sql": "{% macro default__get_catalog_relations(information_schema, relations) -%}\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog_relations not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.814204, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.814544, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.815083, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.815398, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.8156521, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.8159559, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.816355, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.816694, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.817198, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.817512, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.8177779, "supported_languages": null}, "macro.dbt.get_relations": {"name": "get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relations", "macro_sql": "{% macro get_relations() %}\n {{ return(adapter.dispatch('get_relations', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.818053, "supported_languages": null}, "macro.dbt.default__get_relations": {"name": "default__get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relations", "macro_sql": "{% macro default__get_relations() %}\n {{ exceptions.raise_not_implemented(\n 'get_relations macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.818312, "supported_languages": null}, "macro.dbt.get_relation_last_modified": {"name": "get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relation_last_modified", "macro_sql": "{% macro get_relation_last_modified(information_schema, relations) %}\n {{ return(adapter.dispatch('get_relation_last_modified', 'dbt')(information_schema, relations)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_relation_last_modified"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.818652, "supported_languages": null}, "macro.dbt.default__get_relation_last_modified": {"name": "default__get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relation_last_modified", "macro_sql": "{% macro default__get_relation_last_modified(information_schema, relations) %}\n {{ exceptions.raise_not_implemented(\n 'get_relation_last_modified macro not implemented for adapter ' + adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.818933, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.822517, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.822782, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.82329, "supported_languages": null}, "macro.dbt.get_empty_subquery_sql": {"name": "get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_subquery_sql", "macro_sql": "{% macro get_empty_subquery_sql(select_sql, select_sql_header=none) -%}\n {{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql, select_sql_header)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.823643, "supported_languages": null}, "macro.dbt.default__get_empty_subquery_sql": {"name": "default__get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_subquery_sql", "macro_sql": "{% macro default__get_empty_subquery_sql(select_sql, select_sql_header=none) %}\n {%- if select_sql_header is not none -%}\n {{ select_sql_header }}\n {%- endif -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.823992, "supported_languages": null}, "macro.dbt.get_empty_schema_sql": {"name": "get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_schema_sql", "macro_sql": "{% macro get_empty_schema_sql(columns) -%}\n {{ return(adapter.dispatch('get_empty_schema_sql', 'dbt')(columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_schema_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.824285, "supported_languages": null}, "macro.dbt.default__get_empty_schema_sql": {"name": "default__get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_schema_sql", "macro_sql": "{% macro default__get_empty_schema_sql(columns) %}\n {%- set col_err = [] -%}\n {%- set col_naked_numeric = [] -%}\n select\n {% for i in columns %}\n {%- set col = columns[i] -%}\n {%- if col['data_type'] is not defined -%}\n {%- do col_err.append(col['name']) -%}\n {#-- If this column's type is just 'numeric' then it is missing precision/scale, raise a warning --#}\n {%- elif col['data_type'].strip().lower() in ('numeric', 'decimal', 'number') -%}\n {%- do col_naked_numeric.append(col['name']) -%}\n {%- endif -%}\n {% set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] %}\n cast(null as {{ col['data_type'] }}) as {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n {%- if (col_err | length) > 0 -%}\n {{ exceptions.column_type_missing(column_names=col_err) }}\n {%- elif (col_naked_numeric | length) > 0 -%}\n {{ exceptions.warn(\"Detected columns with numeric type and unspecified precision/scale, this can lead to unintended rounding: \" ~ col_naked_numeric ~ \"`\") }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.8263, "supported_languages": null}, "macro.dbt.get_column_schema_from_query": {"name": "get_column_schema_from_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_column_schema_from_query", "macro_sql": "{% macro get_column_schema_from_query(select_sql, select_sql_header=none) -%}\n {% set columns = [] %}\n {# -- Using an 'empty subquery' here to get the same schema as the given select_sql statement, without necessitating a data scan.#}\n {% set sql = get_empty_subquery_sql(select_sql, select_sql_header) %}\n {% set column_schema = adapter.get_column_schema_from_query(sql) %}\n {{ return(column_schema) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.826881, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.8271859, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n {{ get_empty_subquery_sql(select_sql) }}\n {% endcall %}\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.827768, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.8281548, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.829144, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.8295648, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.8308558, "supported_languages": null}, "macro.dbt.resolve_model_name": {"name": "resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.resolve_model_name", "macro_sql": "{% macro resolve_model_name(input_model_name) %}\n {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.833535, "supported_languages": null}, "macro.dbt.default__resolve_model_name": {"name": "default__resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.default__resolve_model_name", "macro_sql": "\n\n{%- macro default__resolve_model_name(input_model_name) -%}\n {{ input_model_name | string | replace('\"', '\\\"') }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.833805, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {% set _ref_args = [_ref.get('package'), _ref['name']] if _ref.get('package') else [_ref['name'],] %}\n {%- set resolved = ref(*_ref_args, v=_ref.get('version')) -%}\n {%- if _ref.get('version') -%}\n {% do _ref_args.extend([\"v\" ~ _ref['version']]) %}\n {%- endif -%}\n {%- do ref_dict.update({_ref_args | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef ref(*args, **kwargs):\n refs = {{ ref_dict | tojson }}\n key = '.'.join(args)\n version = kwargs.get(\"v\") or kwargs.get(\"version\")\n if version:\n key += f\".v{version}\"\n dbt_load_df_function = kwargs.get(\"dbt_load_df_function\")\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.835222, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.835905, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.836863, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = resolve_model_name(this) %}\n def __repr__(self):\n return '{{ this_relation_name }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args, **kwargs: ref(*args, **kwargs, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.resolve_model_name", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.8376498, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.837773, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.838603, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.8389928, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.839478, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1706614034.839941, "supported_languages": null}}, "docs": {"doc.test.somedoc": {"name": "somedoc", "resource_type": "doc", "package_name": "test", "path": "somedoc.md", "original_file_path": "models/somedoc.md", "unique_id": "doc.test.somedoc", "block_contents": "Testing, testing"}, "doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {"exposure.test.simple_exposure": {"name": "simple_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.simple_exposure", "fqn": ["test", "simple_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": true}, "unrendered_config": {}, "url": null, "depends_on": {"macros": [], "nodes": ["source.test.my_source.my_table", "model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [["my_source", "my_table"]], "metrics": [], "created_at": 1706614035.762317}}, "metrics": {"metric.test.blue_customers_post_2010": {"name": "blue_customers_post_2010", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.blue_customers_post_2010", "fqn": ["test", "blue_customers_post_2010"], "description": "", "label": "Blue Customers since 2010", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [{"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": {"where_filters": [{"where_sql_template": "{{ TimeDimension('id__created_at', 'day') }} > '2010-01-01'"}]}, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["semantic_model.test.semantic_people"]}, "refs": [], "metrics": [], "created_at": 1706614035.8904068, "group": null}, "metric.test.customers": {"name": "customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.customers", "fqn": ["test", "customers"], "description": "", "label": "Customers Metric", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["semantic_model.test.semantic_people"]}, "refs": [], "metrics": [], "created_at": 1706614035.891094, "group": null}, "metric.test.ratio_of_blue_customers_to_red_customers": {"name": "ratio_of_blue_customers_to_red_customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.ratio_of_blue_customers_to_red_customers", "fqn": ["test", "ratio_of_blue_customers_to_red_customers"], "description": "", "label": "Very Important Customer Color Ratio", "type": "ratio", "type_params": {"measure": null, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'blue'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}, "denominator": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'red'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["metric.test.customers"]}, "refs": [], "metrics": [], "created_at": 1706614035.892986, "group": null}, "metric.test.doubled_blue_customers": {"name": "doubled_blue_customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.doubled_blue_customers", "fqn": ["test", "doubled_blue_customers"], "description": "", "label": "Inflated blue customer numbers", "type": "derived", "type_params": {"measure": null, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": "customers * 2", "window": null, "grain_to_date": null, "metrics": [{"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'blue'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}], "conversion_type_params": null}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["metric.test.customers"]}, "refs": [], "metrics": [], "created_at": 1706614035.894361, "group": null}}, "groups": {}, "selectors": {}, "disabled": {"model.test.disabled_model": [{"database": "dbt", "schema": "test17066140326720908695_test_previous_version_state", "name": "disabled_model", "resource_type": "model", "package_name": "test", "path": "disabled_model.sql", "original_file_path": "models/disabled_model.sql", "unique_id": "model.test.disabled_model", "fqn": ["test", "disabled_model"], "alias": "disabled_model", "checksum": {"name": "sha256", "checksum": "597106d23ce34e3cd2430588e5c1cf474ebdd138fc47e09b925a4ab258a27acc"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1706614035.24477, "config_call_dict": {"enabled": false}, "relation_name": "\"dbt\".\"test17066140326720908695_test_previous_version_state\".\"disabled_model\"", "raw_code": "{{ config(enabled=False) }}\nselect 2 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null, "defer_relation": null}], "snapshot.test.disabled_snapshot_seed": [{"database": "dbt", "schema": "test17066140326720908695_test_previous_version_state", "name": "disabled_snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "disabled_snapshot_seed.sql", "original_file_path": "snapshots/disabled_snapshot_seed.sql", "unique_id": "snapshot.test.disabled_snapshot_seed", "fqn": ["test", "disabled_snapshot_seed", "disabled_snapshot_seed"], "alias": "disabled_snapshot_seed", "checksum": {"name": "sha256", "checksum": "fe76c9dd437341c9e82a0f2a8baf3148f961b768eaa0a4410cd27d3c071bd617"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "strategy": "check", "target_schema": "test17066140326720908695_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17066140326720908695_test_previous_version_state", "enabled": false}, "created_at": 1706614035.3636532, "config_call_dict": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17066140326720908695_test_previous_version_state", "enabled": false}, "relation_name": "\"dbt\".\"test17066140326720908695_test_previous_version_state\".\"disabled_snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n enabled=False,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "defer_relation": null}], "analysis.test.disabled_al": [{"database": "dbt", "schema": "test17066140326720908695_test_previous_version_state", "name": "disabled_al", "resource_type": "analysis", "package_name": "test", "path": "analysis/disabled_al.sql", "original_file_path": "analyses/disabled_al.sql", "unique_id": "analysis.test.disabled_al", "fqn": ["test", "analysis", "disabled_al"], "alias": "disabled_al", "checksum": {"name": "sha256", "checksum": "32d36ad6cff0786eb562440ba60ef6c9b9a7f4c282dfb7a52eaf19d36370f0e1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1706614035.422348, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\nselect 9 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.test.disabled_just_my": [{"database": "dbt", "schema": "test17066140326720908695_test_previous_version_state_dbt_test__audit", "name": "disabled_just_my", "resource_type": "test", "package_name": "test", "path": "disabled_just_my.sql", "original_file_path": "tests/disabled_just_my.sql", "unique_id": "test.test.disabled_just_my", "fqn": ["test", "disabled_just_my"], "alias": "disabled_just_my", "checksum": {"name": "sha256", "checksum": "4f2268fd89a3b4ef899264ada6d7aa33603671cbc5d5acead7dc2eadf1add985"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1706614035.489865, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.test.disabled_check_nothing_my_model_.f2c6a72d37": [{"test_metadata": {"name": "disabled_check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}, "database": "dbt", "schema": "test17066140326720908695_test_previous_version_state_dbt_test__audit", "name": "disabled_check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "disabled_check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.disabled_check_nothing_my_model_.f2c6a72d37", "fqn": ["test", "disabled_check_nothing_my_model_"], "alias": "disabled_check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1706614035.6771069, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ test_disabled_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_disabled_check_nothing", "macro.dbt.get_where_subquery"], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model"}], "exposure.test.disabled_exposure": [{"name": "disabled_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.disabled_exposure", "fqn": ["test", "disabled_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": false}, "unrendered_config": {"enabled": false}, "url": null, "depends_on": {"macros": [], "nodes": []}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "created_at": 1706614035.763594}], "metric.test.disabled_metric": [{"name": "disabled_metric", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.disabled_metric", "fqn": ["test", "disabled_metric"], "description": "", "label": "Count records", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": false, "group": null}, "unrendered_config": {"enabled": false}, "sources": [], "depends_on": {"macros": [], "nodes": []}, "refs": [], "metrics": [], "created_at": 1706614035.891953, "group": null}], "seed.test.disabled_seed": [{"database": "dbt", "schema": "test17066140326720908695_test_previous_version_state", "name": "disabled_seed", "resource_type": "seed", "package_name": "test", "path": "disabled_seed.csv", "original_file_path": "seeds/disabled_seed.csv", "unique_id": "seed.test.disabled_seed", "fqn": ["test", "disabled_seed"], "alias": "disabled_seed", "checksum": {"name": "sha256", "checksum": "31fddd8ec40c6aba6a3a8e7d83fedea2fd0a56c47b64ea3df1847ec1b018e2d1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": null}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "deferred": false, "unrendered_config": {"enabled": false}, "created_at": 1706614035.695063, "config_call_dict": {}, "relation_name": "\"dbt\".\"test17066140326720908695_test_previous_version_state\".\"disabled_seed\"", "raw_code": "", "root_path": "/private/var/folders/7h/hj5_fw9j291c58hwfdvy5xbm0000gp/T/pytest-of-jerco/pytest-10/project0", "depends_on": {"macros": []}, "defer_relation": null}], "source.test.my_source.disabled_table": [{"database": "dbt", "schema": "my_source", "name": "disabled_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.disabled_table", "fqn": ["test", "my_source", "disabled_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "disabled_table", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "Disabled table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": false}, "patch_path": null, "unrendered_config": {"enabled": false}, "relation_name": "\"dbt\".\"my_source\".\"disabled_table\"", "created_at": 1706614035.93874}]}, "parent_map": {"model.test.my_model": [], "model.test.metricflow_time_spine": [], "snapshot.test.snapshot_seed": ["seed.test.my_seed"], "analysis.test.a": [], "test.test.just_my": ["model.test.my_model"], "seed.test.my_seed": [], "test.test.not_null_my_model_id.43e0e9183a": ["model.test.my_model"], "test.test.check_nothing_my_model_.d5a5e66110": ["model.test.my_model"], "source.test.my_source.my_table": [], "exposure.test.simple_exposure": ["model.test.my_model", "source.test.my_source.my_table"], "metric.test.blue_customers_post_2010": ["semantic_model.test.semantic_people"], "metric.test.customers": ["semantic_model.test.semantic_people"], "metric.test.ratio_of_blue_customers_to_red_customers": ["metric.test.customers"], "metric.test.doubled_blue_customers": ["metric.test.customers"], "semantic_model.test.semantic_people": ["model.test.my_model"]}, "child_map": {"model.test.my_model": ["exposure.test.simple_exposure", "semantic_model.test.semantic_people", "test.test.check_nothing_my_model_.d5a5e66110", "test.test.just_my", "test.test.not_null_my_model_id.43e0e9183a"], "model.test.metricflow_time_spine": [], "snapshot.test.snapshot_seed": [], "analysis.test.a": [], "test.test.just_my": [], "seed.test.my_seed": ["snapshot.test.snapshot_seed"], "test.test.not_null_my_model_id.43e0e9183a": [], "test.test.check_nothing_my_model_.d5a5e66110": [], "source.test.my_source.my_table": ["exposure.test.simple_exposure"], "exposure.test.simple_exposure": [], "metric.test.blue_customers_post_2010": [], "metric.test.customers": ["metric.test.doubled_blue_customers", "metric.test.ratio_of_blue_customers_to_red_customers"], "metric.test.ratio_of_blue_customers_to_red_customers": [], "metric.test.doubled_blue_customers": [], "semantic_model.test.semantic_people": ["metric.test.blue_customers_post_2010", "metric.test.customers"]}, "group_map": {}, "saved_queries": {}, "semantic_models": {"semantic_model.test.semantic_people": {"name": "semantic_people", "resource_type": "semantic_model", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "semantic_model.test.semantic_people", "fqn": ["test", "semantic_people"], "model": "ref('my_model')", "node_relation": {"alias": "my_model", "schema_name": "test17066140326720908695_test_previous_version_state", "database": "dbt", "relation_name": "\"dbt\".\"test17066140326720908695_test_previous_version_state\".\"my_model\""}, "description": null, "label": null, "defaults": {"agg_time_dimension": "created_at"}, "entities": [{"name": "id", "type": "primary", "description": null, "label": null, "role": null, "expr": null}], "measures": [{"name": "years_tenure", "agg": "sum", "description": null, "label": null, "create_metric": false, "expr": "tenure", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}, {"name": "people", "agg": "count", "description": null, "label": null, "create_metric": false, "expr": "id", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}, {"name": "customers", "agg": "count", "description": null, "label": null, "create_metric": false, "expr": "id", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}], "dimensions": [{"name": "favorite_color", "type": "categorical", "description": null, "label": null, "is_partition": false, "type_params": null, "expr": null, "metadata": null}, {"name": "created_at", "type": "time", "description": null, "label": null, "is_partition": false, "type_params": {"time_granularity": "day", "validity_params": null}, "expr": null, "metadata": null}], "metadata": null, "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "created_at": 1706614035.935194, "config": {"enabled": true, "group": null, "meta": {}}, "unrendered_config": {}, "primary_entity": null, "group": null}}} diff --git a/tests/functional/artifacts/data/state/v12/manifest.json b/tests/functional/artifacts/data/state/v12/manifest.json new file mode 100644 index 00000000000..25d83c3e796 --- /dev/null +++ b/tests/functional/artifacts/data/state/v12/manifest.json @@ -0,0 +1 @@ +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v12.json", "dbt_version": "1.8.0b3", "generated_at": "2024-05-02T11:13:36.981553Z", "invocation_id": "05015bbc-b4d2-47f4-996f-acac2c7e1a85", "env": {}, "project_name": "test", "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": null, "send_anonymous_usage_stats": false, "adapter_type": "postgres"}, "nodes": {"model.test.my_model": {"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state", "name": "my_model", "resource_type": "model", "package_name": "test", "path": "my_model.sql", "original_file_path": "models/my_model.sql", "unique_id": "model.test.my_model", "fqn": ["test", "my_model"], "alias": "my_model", "checksum": {"name": "sha256", "checksum": "3ea0f972fa1b56aa2dc2f56ee784b6a5796312f9a813d59ae70fd8855f10d16d"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Example model", "columns": {"id": {"name": "id", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "unrendered_config": {}, "created_at": 1714648415.895201, "relation_name": "\"dbt\".\"test17146484148326086409_test_previous_version_state\".\"my_model\"", "raw_code": "select 1 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.test.metricflow_time_spine": {"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state", "name": "metricflow_time_spine", "resource_type": "model", "package_name": "test", "path": "metricflow_time_spine.sql", "original_file_path": "models/metricflow_time_spine.sql", "unique_id": "model.test.metricflow_time_spine", "fqn": ["test", "metricflow_time_spine"], "alias": "metricflow_time_spine", "checksum": {"name": "sha256", "checksum": "954d9b349821edb5558a373119a7d91eeac9e620aaa96cd112c0d14bab729fdb"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1714648415.362005, "relation_name": "\"dbt\".\"test17146484148326086409_test_previous_version_state\".\"metricflow_time_spine\"", "raw_code": "SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "snapshot.test.snapshot_seed": {"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state", "name": "snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "snapshot_seed.sql", "original_file_path": "snapshots/snapshot_seed.sql", "unique_id": "snapshot.test.snapshot_seed", "fqn": ["test", "snapshot_seed", "snapshot_seed"], "alias": "snapshot_seed", "checksum": {"name": "sha256", "checksum": "5fc998f39655f8fe52443a919e749b6e23883ef90202b040412baac13c6bfe18"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "strategy": "check", "target_schema": "test17146484148326086409_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17146484148326086409_test_previous_version_state"}, "created_at": 1714648415.444036, "relation_name": "\"dbt\".\"test17146484148326086409_test_previous_version_state\".\"snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["seed.test.my_seed"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "analysis.test.a": {"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state", "name": "a", "resource_type": "analysis", "package_name": "test", "path": "analysis/a.sql", "original_file_path": "analyses/a.sql", "unique_id": "analysis.test.a", "fqn": ["test", "analysis", "a"], "alias": "a", "checksum": {"name": "sha256", "checksum": "a389c282f569f0bbdc2a8a4f174dea746c28582fdaf2048d31d9226af9feab23"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1714648415.595048, "relation_name": null, "raw_code": "select 4 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "test.test.just_my": {"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state_dbt_test__audit", "name": "just_my", "resource_type": "test", "package_name": "test", "path": "just_my.sql", "original_file_path": "tests/just_my.sql", "unique_id": "test.test.just_my", "fqn": ["test", "just_my"], "alias": "just_my", "checksum": {"name": "sha256", "checksum": "744889a2e2d9ce380619265e1217d7ccf6e6ca896c048d42ebe0f9cfb74d7156"}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["data_test_tag"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["data_test_tag"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"tags": ["data_test_tag"]}, "created_at": 1714648415.67546, "relation_name": null, "raw_code": "{{ config(tags = ['data_test_tag']) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}, "seed.test.my_seed": {"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state", "name": "my_seed", "resource_type": "seed", "package_name": "test", "path": "my_seed.csv", "original_file_path": "seeds/my_seed.csv", "unique_id": "seed.test.my_seed", "fqn": ["test", "my_seed"], "alias": "my_seed", "checksum": {"name": "sha256", "checksum": "f7ede83f36165ac6b7a047aa2c3f212dff385bfa9f35f395108cd06fc8e96943"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": null}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1714648415.774666, "relation_name": "\"dbt\".\"test17146484148326086409_test_previous_version_state\".\"my_seed\"", "raw_code": "", "root_path": "/private/var/folders/7h/hj5_fw9j291c58hwfdvy5xbm0000gp/T/pytest-of-jerco/pytest-50/project0", "depends_on": {"macros": []}}, "test.test.not_null_my_model_id.43e0e9183a": {"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state_dbt_test__audit", "name": "not_null_my_model_id", "resource_type": "test", "package_name": "test", "path": "not_null_my_model_id.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.not_null_my_model_id.43e0e9183a", "fqn": ["test", "not_null_my_model_id"], "alias": "not_null_my_model_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1714648415.947873, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "id", "file_key_name": "models.my_model", "attached_node": "model.test.my_model", "test_metadata": {"name": "not_null", "kwargs": {"column_name": "id", "model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}}, "test.test.check_nothing_my_model_.d5a5e66110": {"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state_dbt_test__audit", "name": "check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.check_nothing_my_model_.d5a5e66110", "fqn": ["test", "check_nothing_my_model_"], "alias": "check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {}, "created_at": 1714648415.949169, "relation_name": null, "raw_code": "{{ test_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_check_nothing", "macro.dbt.get_where_subquery"], "nodes": ["model.test.my_model"]}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model", "test_metadata": {"name": "check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}}}, "sources": {"source.test.my_source.my_table": {"database": "dbt", "schema": "my_source", "name": "my_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.my_table", "fqn": ["test", "my_source", "my_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "my_seed", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "My table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"dbt\".\"my_source\".\"my_seed\"", "created_at": 1714648416.1599889}}, "macros": {"macro.test.test_check_nothing": {"name": "test_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/dummy_test.sql", "original_file_path": "macros/dummy_test.sql", "unique_id": "macro.test.test_check_nothing", "macro_sql": "{% test check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9534872, "supported_languages": null}, "macro.test.test_disabled_check_nothing": {"name": "test_disabled_check_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/disabled_dummy_test.sql", "original_file_path": "macros/disabled_dummy_test.sql", "unique_id": "macro.test.test_disabled_check_nothing", "macro_sql": "{% test disabled_check_nothing(model) %}\n-- a silly test to make sure that table-level tests show up in the manifest\n-- without a column_name field\n\n{{ config(enabled=False) }}\nselect 0\n\n{% endtest %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.953828, "supported_languages": null}, "macro.test.do_nothing": {"name": "do_nothing", "resource_type": "macro", "package_name": "test", "path": "macros/do_nothing.sql", "original_file_path": "macros/do_nothing.sql", "unique_id": "macro.test.do_nothing", "macro_sql": "{% macro do_nothing(foo2, bar2) %}\n select\n '{{ foo2 }}' as foo2,\n '{{ bar2 }}' as bar2\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954112, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp": {"name": "postgres__current_timestamp", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954139, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"name": "postgres__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954147, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_get_time": {"name": "postgres__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954156, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_backcompat": {"name": "postgres__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_backcompat", "macro_sql": "{% macro postgres__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954165, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat": {"name": "postgres__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro postgres__current_timestamp_in_utc_backcompat() %}\n (current_timestamp at time zone 'utc')::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954171, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog_relations": {"name": "postgres__get_catalog_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog_relations", "macro_sql": "{% macro postgres__get_catalog_relations(information_schema, relations) -%}\n {%- call statement('catalog', fetch_result=True) -%}\n\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n when 'm' then 'MATERIALIZED VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n where (\n {%- for relation in relations -%}\n {%- if relation.identifier -%}\n (upper(sch.nspname) = upper('{{ relation.schema }}') and\n upper(tbl.relname) = upper('{{ relation.identifier }}'))\n {%- else-%}\n upper(sch.nspname) = upper('{{ relation.schema }}')\n {%- endif -%}\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p', 'm') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table, [m]aterialized view. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954188, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog": {"name": "postgres__get_catalog", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n {%- set relations = [] -%}\n {%- for schema in schemas -%}\n {%- set dummy = relations.append({'schema': schema}) -%}\n {%- endfor -%}\n {{ return(postgres__get_catalog_relations(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954249, "supported_languages": null}, "macro.dbt_postgres.postgres__get_relations": {"name": "postgres__get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres__get_relations", "macro_sql": "{% macro postgres__get_relations() -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v', 'm')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954271, "supported_languages": null}, "macro.dbt_postgres.postgres_get_relations": {"name": "postgres_get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres_get_relations", "macro_sql": "{% macro postgres_get_relations() %}\n {{ return(postgres__get_relations()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9542809, "supported_languages": null}, "macro.dbt_postgres.postgres__create_table_as": {"name": "postgres__create_table_as", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {% endif -%}\n {% if contract_config.enforced and (not temporary) -%}\n {{ get_table_columns_and_constraints() }} ;\n insert into {{ relation }} (\n {{ adapter.dispatch('get_column_names', 'dbt')() }}\n )\n {%- set sql = get_select_subquery(sql) %}\n {% else %}\n as\n {% endif %}\n (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.default__get_column_names", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9543018, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_index_sql": {"name": "postgres__get_create_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9543092, "supported_languages": null}, "macro.dbt_postgres.postgres__create_schema": {"name": "postgres__create_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954317, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_schema": {"name": "postgres__drop_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9543219, "supported_languages": null}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"name": "postgres__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9543302, "supported_languages": null}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"name": "postgres__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n matviewname as name,\n schemaname as schema,\n 'materialized_view' as type\n from pg_matviews\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954336, "supported_languages": null}, "macro.dbt_postgres.postgres__information_schema_name": {"name": "postgres__information_schema_name", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954345, "supported_languages": null}, "macro.dbt_postgres.postgres__list_schemas": {"name": "postgres__list_schemas", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9543512, "supported_languages": null}, "macro.dbt_postgres.postgres__check_schema_exists": {"name": "postgres__check_schema_exists", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954356, "supported_languages": null}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"name": "postgres__make_relation_with_suffix", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9543638, "supported_languages": null}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"name": "postgres__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9543731, "supported_languages": null}, "macro.dbt_postgres.postgres__make_temp_relation": {"name": "postgres__make_temp_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954381, "supported_languages": null}, "macro.dbt_postgres.postgres__make_backup_relation": {"name": "postgres__make_backup_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954387, "supported_languages": null}, "macro.dbt_postgres.postgres_escape_comment": {"name": "postgres_escape_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9543931, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_relation_comment": {"name": "postgres__alter_relation_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954404, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_column_comment": {"name": "postgres__alter_column_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95441, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"name": "postgres__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954416, "supported_languages": null}, "macro.dbt_postgres.postgres__copy_grants": {"name": "postgres__copy_grants", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954422, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_indexes_sql": {"name": "postgres__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_indexes_sql", "macro_sql": "{% macro postgres__get_show_indexes_sql(relation) %}\n select\n i.relname as name,\n m.amname as method,\n ix.indisunique as \"unique\",\n array_to_string(array_agg(a.attname), ',') as column_names\n from pg_index ix\n join pg_class i\n on i.oid = ix.indexrelid\n join pg_am m\n on m.oid=i.relam\n join pg_class t\n on t.oid = ix.indrelid\n join pg_namespace n\n on n.oid = t.relnamespace\n join pg_attribute a\n on a.attrelid = t.oid\n and a.attnum = ANY(ix.indkey)\n where t.relname = '{{ relation.identifier }}'\n and n.nspname = '{{ relation.schema }}'\n and t.relkind in ('r', 'm')\n group by 1, 2, 3\n order by 1, 2, 3\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9544299, "supported_languages": null}, "macro.dbt_postgres.postgres__get_drop_index_sql": {"name": "postgres__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_drop_index_sql", "macro_sql": "\n\n\n{%- macro postgres__get_drop_index_sql(relation, index_name) -%}\n drop index if exists \"{{ relation.schema }}\".\"{{ index_name }}\"\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954438, "supported_languages": null}, "macro.dbt_postgres.postgres__get_incremental_default_sql": {"name": "postgres__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/incremental_strategies.sql", "original_file_path": "macros/materializations/incremental_strategies.sql", "unique_id": "macro.dbt_postgres.postgres__get_incremental_default_sql", "macro_sql": "{% macro postgres__get_incremental_default_sql(arg_dict) %}\n\n {% if arg_dict[\"unique_key\"] %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n {% else %}\n {% do return(get_incremental_append_sql(arg_dict)) %}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql", "macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9544508, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"name": "postgres__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954464, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_materialized_view": {"name": "postgres__drop_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_materialized_view", "macro_sql": "{% macro postgres__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954475, "supported_languages": null}, "macro.dbt_postgres.postgres__describe_materialized_view": {"name": "postgres__describe_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/describe.sql", "original_file_path": "macros/relations/materialized_view/describe.sql", "unique_id": "macro.dbt_postgres.postgres__describe_materialized_view", "macro_sql": "{% macro postgres__describe_materialized_view(relation) %}\n -- for now just get the indexes, we don't need the name or the query yet\n {% set _indexes = run_query(get_show_indexes_sql(relation)) %}\n {% do return({'indexes': _indexes}) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9544861, "supported_languages": null}, "macro.dbt_postgres.postgres__refresh_materialized_view": {"name": "postgres__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt_postgres.postgres__refresh_materialized_view", "macro_sql": "{% macro postgres__refresh_materialized_view(relation) %}\n refresh materialized view {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9545002, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_materialized_view_sql": {"name": "postgres__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_materialized_view_sql", "macro_sql": "{% macro postgres__get_rename_materialized_view_sql(relation, new_name) %}\n alter materialized view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9545112, "supported_languages": null}, "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql": {"name": "postgres__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n\n -- apply a full refresh immediately if needed\n {% if configuration_changes.requires_full_refresh %}\n\n {{ get_replace_sql(existing_relation, relation, sql) }}\n\n -- otherwise apply individual changes as needed\n {% else %}\n\n {{ postgres__update_indexes_on_materialized_view(relation, configuration_changes.indexes) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_sql", "macro.dbt_postgres.postgres__update_indexes_on_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954525, "supported_languages": null}, "macro.dbt_postgres.postgres__update_indexes_on_materialized_view": {"name": "postgres__update_indexes_on_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__update_indexes_on_materialized_view", "macro_sql": "\n\n\n{%- macro postgres__update_indexes_on_materialized_view(relation, index_changes) -%}\n {{- log(\"Applying UPDATE INDEXES to: \" ~ relation) -}}\n\n {%- for _index_change in index_changes -%}\n {%- set _index = _index_change.context -%}\n\n {%- if _index_change.action == \"drop\" -%}\n\n {{ postgres__get_drop_index_sql(relation, _index.name) }};\n\n {%- elif _index_change.action == \"create\" -%}\n\n {{ postgres__get_create_index_sql(relation, _index.as_node_config) }}\n\n {%- endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql", "macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95453, "supported_languages": null}, "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes": {"name": "postgres__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes", "macro_sql": "{% macro postgres__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {% set _existing_materialized_view = postgres__describe_materialized_view(existing_relation) %}\n {% set _configuration_changes = existing_relation.get_materialized_view_config_change_collection(_existing_materialized_view, new_config.model) %}\n {% do return(_configuration_changes) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__describe_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9545379, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql": {"name": "postgres__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_create_materialized_view_as_sql(relation, sql) %}\n create materialized view if not exists {{ relation }} as {{ sql }};\n\n {% for _index_dict in config.get('indexes', []) -%}\n {{- get_create_index_sql(relation, _index_dict) -}}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954549, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_table": {"name": "postgres__drop_table", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_table", "macro_sql": "{% macro postgres__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9545631, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_table_sql": {"name": "postgres__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_table_sql", "macro_sql": "{% macro postgres__get_replace_table_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95458, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_table_sql": {"name": "postgres__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_table_sql", "macro_sql": "{% macro postgres__get_rename_table_sql(relation, new_name) %}\n alter table {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954592, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_view": {"name": "postgres__drop_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_view", "macro_sql": "{% macro postgres__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954602, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_view_sql": {"name": "postgres__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_view_sql", "macro_sql": "{% macro postgres__get_replace_view_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954616, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_view_sql": {"name": "postgres__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_view_sql", "macro_sql": "{% macro postgres__get_rename_view_sql(relation, new_name) %}\n alter view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954626, "supported_languages": null}, "macro.dbt_postgres.postgres__dateadd": {"name": "postgres__dateadd", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_postgres.postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9546409, "supported_languages": null}, "macro.dbt_postgres.postgres__listagg": {"name": "postgres__listagg", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_postgres.postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954655, "supported_languages": null}, "macro.dbt_postgres.postgres__datediff": {"name": "postgres__datediff", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_postgres.postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954672, "supported_languages": null}, "macro.dbt_postgres.postgres__any_value": {"name": "postgres__any_value", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt_postgres.postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954685, "supported_languages": null}, "macro.dbt_postgres.postgres__last_day": {"name": "postgres__last_day", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt_postgres.postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954695, "supported_languages": null}, "macro.dbt_postgres.postgres__split_part": {"name": "postgres__split_part", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt_postgres.postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954706, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954721, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954727, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954734, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954742, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954747, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954758, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9547641, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954772, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954792, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9548008, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9548218, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954828, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954836, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9548512, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954856, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954865, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n {#-- N.B. The whitespace below is necessary to avoid edge case issue with comments --#}\n {#-- See: https://github.com/dbt-labs/dbt-core/issues/6781 --#}\n select {{ check_cols_config | join(', ') }} from (\n {{ node['compiled_code'] }}\n ) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954873, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9548821, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9549062, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954915, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954925, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954931, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954937, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954944, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.954953, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9549599, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9549649, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9549701, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9550028, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9550211, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% set store_failures_as = config.get('store_failures_as') %}\n -- if `--store-failures` is invoked via command line and `store_failures_as` is not set,\n -- config.get('store_failures_as', 'table') returns None, not 'table'\n {% if store_failures_as == none %}{% set store_failures_as = 'table' %}{% endif %}\n {% if store_failures_as not in ['table', 'view'] %}\n {{ exceptions.raise_compiler_error(\n \"'\" ~ store_failures_as ~ \"' is not a valid value for `store_failures_as`. \"\n \"Accepted values are: ['ephemeral', 'table', 'view']\"\n ) }}\n {% endif %}\n\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type=store_failures_as) -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ get_create_sql(target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.get_create_sql", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95505, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955074, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955084, "supported_languages": null}, "macro.dbt.get_unit_test_sql": {"name": "get_unit_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_unit_test_sql", "macro_sql": "{% macro get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%}\n {{ adapter.dispatch('get_unit_test_sql', 'dbt')(main_sql, expected_fixture_sql, expected_column_names) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_unit_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955091, "supported_languages": null}, "macro.dbt.default__get_unit_test_sql": {"name": "default__get_unit_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_unit_test_sql", "macro_sql": "{% macro default__get_unit_test_sql(main_sql, expected_fixture_sql, expected_column_names) -%}\n-- Build actual result given inputs\nwith dbt_internal_unit_test_actual as (\n select\n {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%},{% endif %}{%- endfor -%}, {{ dbt.string_literal(\"actual\") }} as {{ adapter.quote(\"actual_or_expected\") }}\n from (\n {{ main_sql }}\n ) _dbt_internal_unit_test_actual\n),\n-- Build expected result\ndbt_internal_unit_test_expected as (\n select\n {% for expected_column_name in expected_column_names %}{{expected_column_name}}{% if not loop.last -%}, {% endif %}{%- endfor -%}, {{ dbt.string_literal(\"expected\") }} as {{ adapter.quote(\"actual_or_expected\") }}\n from (\n {{ expected_fixture_sql }}\n ) _dbt_internal_unit_test_expected\n)\n-- Union actual and expected results\nselect * from dbt_internal_unit_test_actual\nunion all\nselect * from dbt_internal_unit_test_expected\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9550982, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955111, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955116, "supported_languages": null}, "macro.dbt.materialization_unit_default": {"name": "materialization_unit_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/unit.sql", "original_file_path": "macros/materializations/tests/unit.sql", "unique_id": "macro.dbt.materialization_unit_default", "macro_sql": "{%- materialization unit, default -%}\n\n {% set relations = [] %}\n\n {% set expected_rows = config.get('expected_rows') %}\n {% set expected_sql = config.get('expected_sql') %}\n {% set tested_expected_column_names = expected_rows[0].keys() if (expected_rows | length ) > 0 else get_columns_in_query(sql) %} %}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {% do run_query(get_create_table_as_sql(True, temp_relation, get_empty_subquery_sql(sql))) %}\n {%- set columns_in_relation = adapter.get_columns_in_relation(temp_relation) -%}\n {%- set column_name_to_data_types = {} -%}\n {%- for column in columns_in_relation -%}\n {%- do column_name_to_data_types.update({column.name|lower: column.data_type}) -%}\n {%- endfor -%}\n\n {% if not expected_sql %}\n {% set expected_sql = get_expected_sql(expected_rows, column_name_to_data_types) %}\n {% endif %}\n {% set unit_test_sql = get_unit_test_sql(sql, expected_sql, tested_expected_column_names) %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ unit_test_sql }}\n\n {%- endcall %}\n\n {% do adapter.drop_relation(temp_relation) %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query", "macro.dbt.make_temp_relation", "macro.dbt.run_query", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_empty_subquery_sql", "macro.dbt.get_expected_sql", "macro.dbt.get_unit_test_sql", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955128, "supported_languages": ["sql"]}, "macro.dbt.materialization_materialized_view_default": {"name": "materialization_materialized_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialization_materialized_view_default", "macro_sql": "{% materialization materialized_view, default %}\n {% set existing_relation = load_cached_relation(this) %}\n {% set target_relation = this.incorporate(type=this.MaterializedView) %}\n {% set intermediate_relation = make_intermediate_relation(target_relation) %}\n {% set backup_relation_type = target_relation.MaterializedView if existing_relation is none else existing_relation.type %}\n {% set backup_relation = make_backup_relation(target_relation, backup_relation_type) %}\n\n {{ materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) }}\n\n {% set build_sql = materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% if build_sql == '' %}\n {{ materialized_view_execute_no_op(target_relation) }}\n {% else %}\n {{ materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) }}\n {% endif %}\n\n {{ materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.materialized_view_setup", "macro.dbt.materialized_view_get_build_sql", "macro.dbt.materialized_view_execute_no_op", "macro.dbt.materialized_view_execute_build_sql", "macro.dbt.materialized_view_teardown"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9551501, "supported_languages": ["sql"]}, "macro.dbt.materialized_view_setup": {"name": "materialized_view_setup", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_setup", "macro_sql": "{% macro materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) %}\n\n -- backup_relation and intermediate_relation should not already exist in the database\n -- it's possible these exist because of a previous run that exited unexpectedly\n {% set preexisting_backup_relation = load_cached_relation(backup_relation) %}\n {% set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9551601, "supported_languages": null}, "macro.dbt.materialized_view_teardown": {"name": "materialized_view_teardown", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_teardown", "macro_sql": "{% macro materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) %}\n\n -- drop the temp relations if they exist to leave the database clean for the next run\n {{ drop_relation_if_exists(backup_relation) }}\n {{ drop_relation_if_exists(intermediate_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955165, "supported_languages": null}, "macro.dbt.materialized_view_get_build_sql": {"name": "materialized_view_get_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_get_build_sql", "macro_sql": "{% macro materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% set full_refresh_mode = should_full_refresh() %}\n\n -- determine the scenario we're in: create, full_refresh, alter, refresh data\n {% if existing_relation is none %}\n {% set build_sql = get_create_materialized_view_as_sql(target_relation, sql) %}\n {% elif full_refresh_mode or not existing_relation.is_materialized_view %}\n {% set build_sql = get_replace_sql(existing_relation, target_relation, sql) %}\n {% else %}\n\n -- get config options\n {% set on_configuration_change = config.get('on_configuration_change') %}\n {% set configuration_changes = get_materialized_view_configuration_changes(existing_relation, config) %}\n\n {% if configuration_changes is none %}\n {% set build_sql = refresh_materialized_view(target_relation) %}\n\n {% elif on_configuration_change == 'apply' %}\n {% set build_sql = get_alter_materialized_view_as_sql(target_relation, configuration_changes, sql, existing_relation, backup_relation, intermediate_relation) %}\n {% elif on_configuration_change == 'continue' %}\n {% set build_sql = '' %}\n {{ exceptions.warn(\"Configuration changes were identified and `on_configuration_change` was set to `continue` for `\" ~ target_relation ~ \"`\") }}\n {% elif on_configuration_change == 'fail' %}\n {{ exceptions.raise_fail_fast_error(\"Configuration changes were identified and `on_configuration_change` was set to `fail` for `\" ~ target_relation ~ \"`\") }}\n\n {% else %}\n -- this only happens if the user provides a value other than `apply`, 'skip', 'fail'\n {{ exceptions.raise_compiler_error(\"Unexpected configuration scenario\") }}\n\n {% endif %}\n\n {% endif %}\n\n {% do return(build_sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.get_create_materialized_view_as_sql", "macro.dbt.get_replace_sql", "macro.dbt.get_materialized_view_configuration_changes", "macro.dbt.refresh_materialized_view", "macro.dbt.get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955171, "supported_languages": null}, "macro.dbt.materialized_view_execute_no_op": {"name": "materialized_view_execute_no_op", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_no_op", "macro_sql": "{% macro materialized_view_execute_no_op(target_relation) %}\n {% do store_raw_result(\n name=\"main\",\n message=\"skip \" ~ target_relation,\n code=\"skip\",\n rows_affected=\"-1\"\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955182, "supported_languages": null}, "macro.dbt.materialized_view_execute_build_sql": {"name": "materialized_view_execute_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_build_sql", "macro_sql": "{% macro materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) %}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set grant_config = config.get('grants') %}\n\n {% call statement(name=\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9551911, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view.sql", "original_file_path": "macros/materializations/models/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9552078, "supported_languages": ["sql"]}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table.sql", "original_file_path": "macros/materializations/models/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955231, "supported_languages": ["sql"]}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9552588, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955266, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955271, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955279, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9552839, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955302, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955311, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955322, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955331, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955342, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955348, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9553668, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955382, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9553878, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9553962, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955402, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9554088, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955414, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955422, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9554272, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955432, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9554381, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955447, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95547, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9554932, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955505, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955515, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955522, "supported_languages": null}, "macro.dbt.can_clone_table": {"name": "can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.can_clone_table", "macro_sql": "{% macro can_clone_table() %}\n {{ return(adapter.dispatch('can_clone_table', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__can_clone_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955536, "supported_languages": null}, "macro.dbt.default__can_clone_table": {"name": "default__can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.default__can_clone_table", "macro_sql": "{% macro default__can_clone_table() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955542, "supported_languages": null}, "macro.dbt.create_or_replace_clone": {"name": "create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.create_or_replace_clone", "macro_sql": "{% macro create_or_replace_clone(this_relation, defer_relation) %}\n {{ return(adapter.dispatch('create_or_replace_clone', 'dbt')(this_relation, defer_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_or_replace_clone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955552, "supported_languages": null}, "macro.dbt.default__create_or_replace_clone": {"name": "default__create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.default__create_or_replace_clone", "macro_sql": "{% macro default__create_or_replace_clone(this_relation, defer_relation) %}\n create or replace table {{ this_relation }} clone {{ defer_relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95556, "supported_languages": null}, "macro.dbt.materialization_clone_default": {"name": "materialization_clone_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/clone.sql", "original_file_path": "macros/materializations/models/clone/clone.sql", "unique_id": "macro.dbt.materialization_clone_default", "macro_sql": "{%- materialization clone, default -%}\n\n {%- set relations = {'relations': []} -%}\n\n {%- if not defer_relation -%}\n -- nothing to do\n {{ log(\"No relation found in state manifest for \" ~ model.unique_id, info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n\n {%- if existing_relation and not flags.FULL_REFRESH -%}\n -- noop!\n {{ log(\"Relation \" ~ existing_relation ~ \" already exists\", info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set other_existing_relation = load_cached_relation(defer_relation) -%}\n\n -- If this is a database that can do zero-copy cloning of tables, and the other relation is a table, then this will be a table\n -- Otherwise, this will be a view\n\n {% set can_clone_table = can_clone_table() %}\n\n {%- if other_existing_relation and other_existing_relation.type == 'table' and can_clone_table -%}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {% if existing_relation is not none and not existing_relation.is_table %}\n {{ log(\"Dropping relation \" ~ existing_relation ~ \" because it is of type \" ~ existing_relation.type) }}\n {{ drop_relation_if_exists(existing_relation) }}\n {% endif %}\n\n -- as a general rule, data platforms that can clone tables can also do atomic 'create or replace'\n {% call statement('main') %}\n {% if target_relation and defer_relation and target_relation == defer_relation %}\n {{ log(\"Target relation and defer relation are the same, skipping clone for relation: \" ~ target_relation) }}\n {% else %}\n {{ create_or_replace_clone(target_relation, defer_relation) }}\n {% endif %}\n\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n {%- else -%}\n\n {%- set target_relation = this.incorporate(type='view') -%}\n\n -- reuse the view materialization\n -- TODO: support actual dispatch for materialization macros\n -- Tracking ticket: https://github.com/dbt-labs/dbt-core/issues/7799\n {% set search_name = \"materialization_view_\" ~ adapter.type() %}\n {% if not search_name in context %}\n {% set search_name = \"materialization_view_default\" %}\n {% endif %}\n {% set materialization_macro = context[search_name] %}\n {% set relations = materialization_macro() %}\n {{ return(relations) }}\n\n {%- endif -%}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.can_clone_table", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_or_replace_clone", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9555771, "supported_languages": ["sql"]}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9555998, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955629, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955638, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9556448, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9556532, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955659, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955667, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955682, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955688, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955693, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9557, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9557052, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955714, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955719, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955734, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name -%}\n\n {{ custom_alias_name | trim }}\n\n {%- elif node.version -%}\n\n {{ return(node.name ~ \"_v\" ~ (node.version | replace(\".\", \"_\"))) }}\n\n {%- else -%}\n\n {{ node.name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955761, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955776, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955782, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955789, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955803, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955809, "supported_languages": null}, "macro.dbt.get_drop_sql": {"name": "get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.get_drop_sql", "macro_sql": "{%- macro get_drop_sql(relation) -%}\n {{- log('Applying DROP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95582, "supported_languages": null}, "macro.dbt.default__get_drop_sql": {"name": "default__get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__get_drop_sql", "macro_sql": "{%- macro default__get_drop_sql(relation) -%}\n\n {%- if relation.is_view -%}\n {{ drop_view(relation) }}\n\n {%- elif relation.is_table -%}\n {{ drop_table(relation) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ drop_materialized_view(relation) }}\n\n {%- else -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.drop_view", "macro.dbt.drop_table", "macro.dbt.drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955827, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955837, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n {{ get_drop_sql(relation) }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955842, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955847, "supported_languages": null}, "macro.dbt.get_replace_sql": {"name": "get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.get_replace_sql", "macro_sql": "{% macro get_replace_sql(existing_relation, target_relation, sql) %}\n {{- log('Applying REPLACE to: ' ~ existing_relation) -}}\n {{- adapter.dispatch('get_replace_sql', 'dbt')(existing_relation, target_relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955864, "supported_languages": null}, "macro.dbt.default__get_replace_sql": {"name": "default__get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.default__get_replace_sql", "macro_sql": "{% macro default__get_replace_sql(existing_relation, target_relation, sql) %}\n\n {# /* use a create or replace statement if possible */ #}\n\n {% set is_replaceable = existing_relation.type == target_relation_type and existing_relation.can_be_replaced %}\n\n {% if is_replaceable and existing_relation.is_view %}\n {{ get_replace_view_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_table %}\n {{ get_replace_table_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_materialized_view %}\n {{ get_replace_materialized_view_sql(target_relation, sql) }}\n\n {# /* a create or replace statement is not possible, so try to stage and/or backup to be safe */ #}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one using a backup */ #}\n {%- elif target_relation.can_be_renamed and existing_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one without using a backup */ #}\n {%- elif target_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_drop_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }}\n\n {# /* create target_relation in place by first backing up the existing relation */ #}\n {%- elif existing_relation.can_be_renamed -%}\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* no renaming is allowed, so just drop and create */ #}\n {%- else -%}\n {{ get_drop_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_view_sql", "macro.dbt.get_replace_table_sql", "macro.dbt.get_replace_materialized_view_sql", "macro.dbt.get_create_intermediate_sql", "macro.dbt.get_create_backup_sql", "macro.dbt.get_rename_intermediate_sql", "macro.dbt.get_drop_backup_sql", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955874, "supported_languages": null}, "macro.dbt.get_create_intermediate_sql": {"name": "get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.get_create_intermediate_sql", "macro_sql": "{%- macro get_create_intermediate_sql(relation, sql) -%}\n {{- log('Applying CREATE INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_intermediate_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955892, "supported_languages": null}, "macro.dbt.default__get_create_intermediate_sql": {"name": "default__get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.default__get_create_intermediate_sql", "macro_sql": "{%- macro default__get_create_intermediate_sql(relation, sql) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n -- drop any pre-existing intermediate\n {{ get_drop_sql(intermediate_relation) }};\n\n {{ get_create_sql(intermediate_relation, sql) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955897, "supported_languages": null}, "macro.dbt.drop_schema_named": {"name": "drop_schema_named", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/schema.sql", "original_file_path": "macros/relations/schema.sql", "unique_id": "macro.dbt.drop_schema_named", "macro_sql": "{% macro drop_schema_named(schema_name) %}\n {{ return(adapter.dispatch('drop_schema_named', 'dbt') (schema_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_schema_named"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955909, "supported_languages": null}, "macro.dbt.default__drop_schema_named": {"name": "default__drop_schema_named", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/schema.sql", "original_file_path": "macros/relations/schema.sql", "unique_id": "macro.dbt.default__drop_schema_named", "macro_sql": "{% macro default__drop_schema_named(schema_name) %}\n {% set schema_relation = api.Relation.create(schema=schema_name) %}\n {{ adapter.drop_schema(schema_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9559171, "supported_languages": null}, "macro.dbt.get_drop_backup_sql": {"name": "get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.get_drop_backup_sql", "macro_sql": "{%- macro get_drop_backup_sql(relation) -%}\n {{- log('Applying DROP BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95593, "supported_languages": null}, "macro.dbt.default__get_drop_backup_sql": {"name": "default__get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.default__get_drop_backup_sql", "macro_sql": "{%- macro default__get_drop_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n {{ get_drop_sql(backup_relation) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955936, "supported_languages": null}, "macro.dbt.get_rename_sql": {"name": "get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.get_rename_sql", "macro_sql": "{%- macro get_rename_sql(relation, new_name) -%}\n {{- log('Applying RENAME to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_sql', 'dbt')(relation, new_name) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955948, "supported_languages": null}, "macro.dbt.default__get_rename_sql": {"name": "default__get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__get_rename_sql", "macro_sql": "{%- macro default__get_rename_sql(relation, new_name) -%}\n\n {%- if relation.is_view -%}\n {{ get_rename_view_sql(relation, new_name) }}\n\n {%- elif relation.is_table -%}\n {{ get_rename_table_sql(relation, new_name) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_rename_materialized_view_sql(relation, new_name) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_rename_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.get_rename_view_sql", "macro.dbt.get_rename_table_sql", "macro.dbt.get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955955, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955966, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955971, "supported_languages": null}, "macro.dbt.get_create_backup_sql": {"name": "get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.get_create_backup_sql", "macro_sql": "{%- macro get_create_backup_sql(relation) -%}\n {{- log('Applying CREATE BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.955989, "supported_languages": null}, "macro.dbt.default__get_create_backup_sql": {"name": "default__get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.default__get_create_backup_sql", "macro_sql": "{%- macro default__get_create_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n -- drop any pre-existing backup\n {{ get_drop_sql(backup_relation) }};\n\n {{ get_rename_sql(relation, backup_relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9559941, "supported_languages": null}, "macro.dbt.get_create_sql": {"name": "get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.get_create_sql", "macro_sql": "{%- macro get_create_sql(relation, sql) -%}\n {{- log('Applying CREATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956007, "supported_languages": null}, "macro.dbt.default__get_create_sql": {"name": "default__get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.default__get_create_sql", "macro_sql": "{%- macro default__get_create_sql(relation, sql) -%}\n\n {%- if relation.is_view -%}\n {{ get_create_view_as_sql(relation, sql) }}\n\n {%- elif relation.is_table -%}\n {{ get_create_table_as_sql(False, relation, sql) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_create_materialized_view_as_sql(relation, sql) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_create_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.get_create_view_as_sql", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956013, "supported_languages": null}, "macro.dbt.get_rename_intermediate_sql": {"name": "get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.get_rename_intermediate_sql", "macro_sql": "{%- macro get_rename_intermediate_sql(relation) -%}\n {{- log('Applying RENAME INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_intermediate_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956028, "supported_languages": null}, "macro.dbt.default__get_rename_intermediate_sql": {"name": "default__get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.default__get_rename_intermediate_sql", "macro_sql": "{%- macro default__get_rename_intermediate_sql(relation) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n {{ get_rename_sql(intermediate_relation, relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956036, "supported_languages": null}, "macro.dbt.drop_materialized_view": {"name": "drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.drop_materialized_view", "macro_sql": "{% macro drop_materialized_view(relation) -%}\n {{- adapter.dispatch('drop_materialized_view', 'dbt')(relation) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956047, "supported_languages": null}, "macro.dbt.default__drop_materialized_view": {"name": "default__drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.default__drop_materialized_view", "macro_sql": "{% macro default__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956052, "supported_languages": null}, "macro.dbt.get_replace_materialized_view_sql": {"name": "get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.get_replace_materialized_view_sql", "macro_sql": "{% macro get_replace_materialized_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_materialized_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9560661, "supported_languages": null}, "macro.dbt.default__get_replace_materialized_view_sql": {"name": "default__get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.default__get_replace_materialized_view_sql", "macro_sql": "{% macro default__get_replace_materialized_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956071, "supported_languages": null}, "macro.dbt.refresh_materialized_view": {"name": "refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.refresh_materialized_view", "macro_sql": "{% macro refresh_materialized_view(relation) %}\n {{- log('Applying REFRESH to: ' ~ relation) -}}\n {{- adapter.dispatch('refresh_materialized_view', 'dbt')(relation) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__refresh_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956081, "supported_languages": null}, "macro.dbt.default__refresh_materialized_view": {"name": "default__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.default__refresh_materialized_view", "macro_sql": "{% macro default__refresh_materialized_view(relation) %}\n {{ exceptions.raise_compiler_error(\"`refresh_materialized_view` has not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956087, "supported_languages": null}, "macro.dbt.get_rename_materialized_view_sql": {"name": "get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.get_rename_materialized_view_sql", "macro_sql": "{% macro get_rename_materialized_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_materialized_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9560971, "supported_languages": null}, "macro.dbt.default__get_rename_materialized_view_sql": {"name": "default__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.default__get_rename_materialized_view_sql", "macro_sql": "{% macro default__get_rename_materialized_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9561028, "supported_languages": null}, "macro.dbt.get_alter_materialized_view_as_sql": {"name": "get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_alter_materialized_view_as_sql", "macro_sql": "{% macro get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{- log('Applying ALTER to: ' ~ relation) -}}\n {{- adapter.dispatch('get_alter_materialized_view_as_sql', 'dbt')(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n ) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956117, "supported_languages": null}, "macro.dbt.default__get_alter_materialized_view_as_sql": {"name": "default__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_alter_materialized_view_as_sql", "macro_sql": "{% macro default__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9561229, "supported_languages": null}, "macro.dbt.get_materialized_view_configuration_changes": {"name": "get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_materialized_view_configuration_changes", "macro_sql": "{% macro get_materialized_view_configuration_changes(existing_relation, new_config) %}\n /* {#\n It's recommended that configuration changes be formatted as follows:\n {\"\": [{\"action\": \"\", \"context\": ...}]}\n\n For example:\n {\n \"indexes\": [\n {\"action\": \"drop\", \"context\": \"index_abc\"},\n {\"action\": \"create\", \"context\": {\"columns\": [\"column_1\", \"column_2\"], \"type\": \"hash\", \"unique\": True}},\n ],\n }\n\n Either way, `get_materialized_view_configuration_changes` needs to align with `get_alter_materialized_view_as_sql`.\n #} */\n {{- log('Determining configuration changes on: ' ~ existing_relation) -}}\n {%- do return(adapter.dispatch('get_materialized_view_configuration_changes', 'dbt')(existing_relation, new_config)) -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_materialized_view_configuration_changes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956128, "supported_languages": null}, "macro.dbt.default__get_materialized_view_configuration_changes": {"name": "default__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_materialized_view_configuration_changes", "macro_sql": "{% macro default__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9561338, "supported_languages": null}, "macro.dbt.get_create_materialized_view_as_sql": {"name": "get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.get_create_materialized_view_as_sql", "macro_sql": "{% macro get_create_materialized_view_as_sql(relation, sql) -%}\n {{- adapter.dispatch('get_create_materialized_view_as_sql', 'dbt')(relation, sql) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9561448, "supported_languages": null}, "macro.dbt.default__get_create_materialized_view_as_sql": {"name": "default__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.default__get_create_materialized_view_as_sql", "macro_sql": "{% macro default__get_create_materialized_view_as_sql(relation, sql) -%}\n {{ exceptions.raise_compiler_error(\n \"`get_create_materialized_view_as_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956152, "supported_languages": null}, "macro.dbt.get_table_columns_and_constraints": {"name": "get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_table_columns_and_constraints", "macro_sql": "{%- macro get_table_columns_and_constraints() -%}\n {{ adapter.dispatch('get_table_columns_and_constraints', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956168, "supported_languages": null}, "macro.dbt.default__get_table_columns_and_constraints": {"name": "default__get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_table_columns_and_constraints", "macro_sql": "{% macro default__get_table_columns_and_constraints() -%}\n {{ return(table_columns_and_constraints()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956178, "supported_languages": null}, "macro.dbt.table_columns_and_constraints": {"name": "table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.table_columns_and_constraints", "macro_sql": "{% macro table_columns_and_constraints() %}\n {# loop through user_provided_columns to create DDL with data types and constraints #}\n {%- set raw_column_constraints = adapter.render_raw_columns_constraints(raw_columns=model['columns']) -%}\n {%- set raw_model_constraints = adapter.render_raw_model_constraints(raw_constraints=model['constraints']) -%}\n (\n {% for c in raw_column_constraints -%}\n {{ c }}{{ \",\" if not loop.last or raw_model_constraints }}\n {% endfor %}\n {% for c in raw_model_constraints -%}\n {{ c }}{{ \",\" if not loop.last }}\n {% endfor -%}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9561841, "supported_languages": null}, "macro.dbt.get_assert_columns_equivalent": {"name": "get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_assert_columns_equivalent", "macro_sql": "\n\n{%- macro get_assert_columns_equivalent(sql) -%}\n {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9561942, "supported_languages": null}, "macro.dbt.default__get_assert_columns_equivalent": {"name": "default__get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_assert_columns_equivalent", "macro_sql": "{% macro default__get_assert_columns_equivalent(sql) -%}\n {{ return(assert_columns_equivalent(sql)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9562, "supported_languages": null}, "macro.dbt.assert_columns_equivalent": {"name": "assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.assert_columns_equivalent", "macro_sql": "{% macro assert_columns_equivalent(sql) %}\n\n {#-- First ensure the user has defined 'columns' in yaml specification --#}\n {%- set user_defined_columns = model['columns'] -%}\n {%- if not user_defined_columns -%}\n {{ exceptions.raise_contract_error([], []) }}\n {%- endif -%}\n\n {#-- Obtain the column schema provided by sql file. #}\n {%- set sql_file_provided_columns = get_column_schema_from_query(sql, config.get('sql_header', none)) -%}\n {#--Obtain the column schema provided by the schema file by generating an 'empty schema' query from the model's columns. #}\n {%- set schema_file_provided_columns = get_column_schema_from_query(get_empty_schema_sql(user_defined_columns)) -%}\n\n {#-- create dictionaries with name and formatted data type and strings for exception #}\n {%- set sql_columns = format_columns(sql_file_provided_columns) -%}\n {%- set yaml_columns = format_columns(schema_file_provided_columns) -%}\n\n {%- if sql_columns|length != yaml_columns|length -%}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n\n {%- for sql_col in sql_columns -%}\n {%- set yaml_col = [] -%}\n {%- for this_col in yaml_columns -%}\n {%- if this_col['name'] == sql_col['name'] -%}\n {%- do yaml_col.append(this_col) -%}\n {%- break -%}\n {%- endif -%}\n {%- endfor -%}\n {%- if not yaml_col -%}\n {#-- Column with name not found in yaml #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- if sql_col['formatted'] != yaml_col[0]['formatted'] -%}\n {#-- Column data types don't match #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_column_schema_from_query", "macro.dbt.get_empty_schema_sql", "macro.dbt.format_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956212, "supported_languages": null}, "macro.dbt.format_columns": {"name": "format_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.format_columns", "macro_sql": "{% macro format_columns(columns) %}\n {% set formatted_columns = [] %}\n {% for column in columns %}\n {%- set formatted_column = adapter.dispatch('format_column', 'dbt')(column) -%}\n {%- do formatted_columns.append(formatted_column) -%}\n {% endfor %}\n {{ return(formatted_columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__format_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956223, "supported_languages": null}, "macro.dbt.default__format_column": {"name": "default__format_column", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__format_column", "macro_sql": "{% macro default__format_column(column) -%}\n {% set data_type = column.dtype %}\n {% set formatted = column.column.lower() ~ \" \" ~ data_type %}\n {{ return({'name': column.name, 'data_type': data_type, 'formatted': formatted}) }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95623, "supported_languages": null}, "macro.dbt.drop_table": {"name": "drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.drop_table", "macro_sql": "{% macro drop_table(relation) -%}\n {{- adapter.dispatch('drop_table', 'dbt')(relation) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9562418, "supported_languages": null}, "macro.dbt.default__drop_table": {"name": "default__drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.default__drop_table", "macro_sql": "{% macro default__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956247, "supported_languages": null}, "macro.dbt.get_replace_table_sql": {"name": "get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.get_replace_table_sql", "macro_sql": "{% macro get_replace_table_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_table_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956259, "supported_languages": null}, "macro.dbt.default__get_replace_table_sql": {"name": "default__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.default__get_replace_table_sql", "macro_sql": "{% macro default__get_replace_table_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956268, "supported_languages": null}, "macro.dbt.get_rename_table_sql": {"name": "get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.get_rename_table_sql", "macro_sql": "{% macro get_rename_table_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_table_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95628, "supported_languages": null}, "macro.dbt.default__get_rename_table_sql": {"name": "default__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.default__get_rename_table_sql", "macro_sql": "{% macro default__get_rename_table_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956286, "supported_languages": null}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956301, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956308, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956314, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced and (not temporary) %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956325, "supported_languages": null}, "macro.dbt.default__get_column_names": {"name": "default__get_column_names", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_column_names", "macro_sql": "{% macro default__get_column_names() %}\n {#- loop through user_provided_columns to get column names -#}\n {%- set user_provided_columns = model['columns'] -%}\n {%- for i in user_provided_columns %}\n {%- set col = user_provided_columns[i] -%}\n {%- set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] -%}\n {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95633, "supported_languages": null}, "macro.dbt.get_select_subquery": {"name": "get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_select_subquery", "macro_sql": "{% macro get_select_subquery(sql) %}\n {{ return(adapter.dispatch('get_select_subquery', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956337, "supported_languages": null}, "macro.dbt.default__get_select_subquery": {"name": "default__get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_select_subquery", "macro_sql": "{% macro default__get_select_subquery(sql) %}\n select {{ adapter.dispatch('get_column_names', 'dbt')() }}\n from (\n {{ sql }}\n ) as model_subq\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_column_names"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956345, "supported_languages": null}, "macro.dbt.drop_view": {"name": "drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.drop_view", "macro_sql": "{% macro drop_view(relation) -%}\n {{- adapter.dispatch('drop_view', 'dbt')(relation) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956357, "supported_languages": null}, "macro.dbt.default__drop_view": {"name": "default__drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.default__drop_view", "macro_sql": "{% macro default__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956364, "supported_languages": null}, "macro.dbt.get_replace_view_sql": {"name": "get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.get_replace_view_sql", "macro_sql": "{% macro get_replace_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956377, "supported_languages": null}, "macro.dbt.default__get_replace_view_sql": {"name": "default__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__get_replace_view_sql", "macro_sql": "{% macro default__get_replace_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956382, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956392, "supported_languages": null}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9564018, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956948, "supported_languages": null}, "macro.dbt.get_rename_view_sql": {"name": "get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.get_rename_view_sql", "macro_sql": "{% macro get_rename_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956962, "supported_languages": null}, "macro.dbt.default__get_rename_view_sql": {"name": "default__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.default__get_rename_view_sql", "macro_sql": "{% macro default__get_rename_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9569669, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956979, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956987, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.956994, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9570198, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9570298, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957046, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957057, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957077, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957084, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957089, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9571042, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957112, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957119, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957124, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9571369, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957142, "supported_languages": null}, "macro.dbt.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957155, "supported_languages": null}, "macro.dbt.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957164, "supported_languages": null}, "macro.dbt.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957172, "supported_languages": null}, "macro.dbt.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n {# call as follows:\n\n date_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n ) #}\n\n\n with rawdata as (\n\n {{dbt.generate_series(\n dbt.get_intervals_between(start_date, end_date, datepart)\n )}}\n\n ),\n\n all_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"row_number() over (order by 1) - 1\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n ),\n\n filtered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n )\n\n select * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.generate_series", "macro.dbt.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95718, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957191, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9571981, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957212, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957218, "supported_languages": null}, "macro.dbt.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9572291, "supported_languages": null}, "macro.dbt.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957238, "supported_languages": null}, "macro.dbt.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957244, "supported_languages": null}, "macro.dbt.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95725, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957265, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957271, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957282, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957288, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957299, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9573078, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957319, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9573271, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95734, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957345, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957356, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957362, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957373, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957379, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957394, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9574, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957412, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957418, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957431, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957438, "supported_languages": null}, "macro.dbt.cast": {"name": "cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast.sql", "original_file_path": "macros/utils/cast.sql", "unique_id": "macro.dbt.cast", "macro_sql": "{% macro cast(field, type) %}\n {{ return(adapter.dispatch('cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957448, "supported_languages": null}, "macro.dbt.default__cast": {"name": "default__cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast.sql", "original_file_path": "macros/utils/cast.sql", "unique_id": "macro.dbt.default__cast", "macro_sql": "{% macro default__cast(field, type) %}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9574552, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9574652, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9574752, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9574862, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957491, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957501, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9575071, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95753, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957543, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957549, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957555, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957561, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957567, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957576, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957581, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957587, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957592, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95762, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957628, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957634, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957639, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95765, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9576561, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957667, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9576719, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957685, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9576929, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9577, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957712, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9577181, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 + {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9577239, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957737, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9577441, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9577558, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957767, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95778, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957788, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957799, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957807, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957813, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957818, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9578319, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957838, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957844, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957852, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9578571, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957862, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957868, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9578738, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957889, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957897, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9579039, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95791, "supported_languages": null}, "macro.dbt.get_drop_index_sql": {"name": "get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_drop_index_sql", "macro_sql": "{% macro get_drop_index_sql(relation, index_name) -%}\n {{ adapter.dispatch('get_drop_index_sql', 'dbt')(relation, index_name) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957916, "supported_languages": null}, "macro.dbt.default__get_drop_index_sql": {"name": "default__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_drop_index_sql", "macro_sql": "{% macro default__get_drop_index_sql(relation, index_name) -%}\n {{ exceptions.raise_compiler_error(\"`get_drop_index_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957922, "supported_languages": null}, "macro.dbt.get_show_indexes_sql": {"name": "get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_show_indexes_sql", "macro_sql": "{% macro get_show_indexes_sql(relation) -%}\n {{ adapter.dispatch('get_show_indexes_sql', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957928, "supported_languages": null}, "macro.dbt.default__get_show_indexes_sql": {"name": "default__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_show_indexes_sql", "macro_sql": "{% macro default__get_show_indexes_sql(relation) -%}\n {{ exceptions.raise_compiler_error(\"`get_show_indexes_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957933, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957949, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9579551, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9579608, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9579709, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957977, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957982, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9579918, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.957998, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958005, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9580119, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958019, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958025, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958039, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958045, "supported_languages": null}, "macro.dbt.validate_sql": {"name": "validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.validate_sql", "macro_sql": "{% macro validate_sql(sql) -%}\n {{ return(adapter.dispatch('validate_sql', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__validate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958066, "supported_languages": null}, "macro.dbt.default__validate_sql": {"name": "default__validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.default__validate_sql", "macro_sql": "{% macro default__validate_sql(sql) -%}\n {% call statement('validate_sql') -%}\n explain {{ sql }}\n {% endcall %}\n {{ return(load_result('validate_sql')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958072, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95809, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958095, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958102, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958108, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958116, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958121, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958127, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958132, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9581382, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958146, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958153, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9581592, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958167, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9581718, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958198, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9582038, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9582121, "supported_languages": null}, "macro.dbt.get_show_sql": {"name": "get_show_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_show_sql", "macro_sql": "{% macro get_show_sql(compiled_code, sql_header, limit) -%}\n {%- if sql_header -%}\n {{ sql_header }}\n {%- endif -%}\n {%- if limit is not none -%}\n {{ get_limit_subquery_sql(compiled_code, limit) }}\n {%- else -%}\n {{ compiled_code }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958223, "supported_languages": null}, "macro.dbt.get_limit_subquery_sql": {"name": "get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_limit_subquery_sql", "macro_sql": "{% macro get_limit_subquery_sql(sql, limit) %}\n {{ adapter.dispatch('get_limit_subquery_sql', 'dbt')(sql, limit) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9582322, "supported_languages": null}, "macro.dbt.default__get_limit_subquery_sql": {"name": "default__get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.default__get_limit_subquery_sql", "macro_sql": "{% macro default__get_limit_subquery_sql(sql, limit) %}\n select *\n from (\n {{ sql }}\n ) as model_limit_subq\n limit {{ limit }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958237, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9582489, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958255, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958264, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958269, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9582741, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958279, "supported_languages": null}, "macro.dbt.get_catalog_relations": {"name": "get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog_relations", "macro_sql": "{% macro get_catalog_relations(information_schema, relations) -%}\n {{ return(adapter.dispatch('get_catalog_relations', 'dbt')(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9582999, "supported_languages": null}, "macro.dbt.default__get_catalog_relations": {"name": "default__get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog_relations", "macro_sql": "{% macro default__get_catalog_relations(information_schema, relations) -%}\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog_relations not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958306, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958313, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9583218, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958328, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958336, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958343, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95836, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958365, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95837, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958377, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9583821, "supported_languages": null}, "macro.dbt.get_relations": {"name": "get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relations", "macro_sql": "{% macro get_relations() %}\n {{ return(adapter.dispatch('get_relations', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95839, "supported_languages": null}, "macro.dbt.default__get_relations": {"name": "default__get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relations", "macro_sql": "{% macro default__get_relations() %}\n {{ exceptions.raise_not_implemented(\n 'get_relations macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958397, "supported_languages": null}, "macro.dbt.get_relation_last_modified": {"name": "get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relation_last_modified", "macro_sql": "{% macro get_relation_last_modified(information_schema, relations) %}\n {{ return(adapter.dispatch('get_relation_last_modified', 'dbt')(information_schema, relations)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_relation_last_modified"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9584022, "supported_languages": null}, "macro.dbt.default__get_relation_last_modified": {"name": "default__get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relation_last_modified", "macro_sql": "{% macro default__get_relation_last_modified(information_schema, relations) %}\n {{ exceptions.raise_not_implemented(\n 'get_relation_last_modified macro not implemented for adapter ' + adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958407, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958425, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95843, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9584372, "supported_languages": null}, "macro.dbt.get_empty_subquery_sql": {"name": "get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_subquery_sql", "macro_sql": "{% macro get_empty_subquery_sql(select_sql, select_sql_header=none) -%}\n {{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql, select_sql_header)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958444, "supported_languages": null}, "macro.dbt.default__get_empty_subquery_sql": {"name": "default__get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_subquery_sql", "macro_sql": "{% macro default__get_empty_subquery_sql(select_sql, select_sql_header=none) %}\n {%- if select_sql_header is not none -%}\n {{ select_sql_header }}\n {%- endif -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958451, "supported_languages": null}, "macro.dbt.get_empty_schema_sql": {"name": "get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_schema_sql", "macro_sql": "{% macro get_empty_schema_sql(columns) -%}\n {{ return(adapter.dispatch('get_empty_schema_sql', 'dbt')(columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_schema_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9584582, "supported_languages": null}, "macro.dbt.default__get_empty_schema_sql": {"name": "default__get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_schema_sql", "macro_sql": "{% macro default__get_empty_schema_sql(columns) %}\n {%- set col_err = [] -%}\n {%- set col_naked_numeric = [] -%}\n select\n {% for i in columns %}\n {%- set col = columns[i] -%}\n {%- if col['data_type'] is not defined -%}\n {%- do col_err.append(col['name']) -%}\n {#-- If this column's type is just 'numeric' then it is missing precision/scale, raise a warning --#}\n {%- elif col['data_type'].strip().lower() in ('numeric', 'decimal', 'number') -%}\n {%- do col_naked_numeric.append(col['name']) -%}\n {%- endif -%}\n {% set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] %}\n {{ cast('null', col['data_type']) }} as {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n {%- if (col_err | length) > 0 -%}\n {{ exceptions.column_type_missing(column_names=col_err) }}\n {%- elif (col_naked_numeric | length) > 0 -%}\n {{ exceptions.warn(\"Detected columns with numeric type and unspecified precision/scale, this can lead to unintended rounding: \" ~ col_naked_numeric ~ \"`\") }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958466, "supported_languages": null}, "macro.dbt.get_column_schema_from_query": {"name": "get_column_schema_from_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_column_schema_from_query", "macro_sql": "{% macro get_column_schema_from_query(select_sql, select_sql_header=none) -%}\n {% set columns = [] %}\n {# -- Using an 'empty subquery' here to get the same schema as the given select_sql statement, without necessitating a data scan.#}\n {% set sql = get_empty_subquery_sql(select_sql, select_sql_header) %}\n {% set column_schema = adapter.get_column_schema_from_query(sql) %}\n {{ return(column_schema) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958473, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958478, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n {{ get_empty_subquery_sql(select_sql) }}\n {% endcall %}\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9584892, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958498, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958503, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9585102, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958515, "supported_languages": null}, "macro.dbt.get_fixture_sql": {"name": "get_fixture_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.get_fixture_sql", "macro_sql": "{% macro get_fixture_sql(rows, column_name_to_data_types) %}\n-- Fixture for {{ model.name }}\n{% set default_row = {} %}\n\n{%- if not column_name_to_data_types -%}\n{#-- Use defer_relation IFF it is available in the manifest and 'this' is missing from the database --#}\n{%- set this_or_defer_relation = defer_relation if (defer_relation and not load_relation(this)) else this -%}\n{%- set columns_in_relation = adapter.get_columns_in_relation(this_or_defer_relation) -%}\n\n{%- set column_name_to_data_types = {} -%}\n{%- for column in columns_in_relation -%}\n{#-- This needs to be a case-insensitive comparison --#}\n{%- do column_name_to_data_types.update({column.name|lower: column.data_type}) -%}\n{%- endfor -%}\n{%- endif -%}\n\n{%- if not column_name_to_data_types -%}\n {{ exceptions.raise_compiler_error(\"Not able to get columns for unit test '\" ~ model.name ~ \"' from relation \" ~ this) }}\n{%- endif -%}\n\n{%- for column_name, column_type in column_name_to_data_types.items() -%}\n {%- do default_row.update({column_name: (safe_cast(\"null\", column_type) | trim )}) -%}\n{%- endfor -%}\n\n\n{%- for row in rows -%}\n{%- set formatted_row = format_row(row, column_name_to_data_types) -%}\n{%- set default_row_copy = default_row.copy() -%}\n{%- do default_row_copy.update(formatted_row) -%}\nselect\n{%- for column_name, column_value in default_row_copy.items() %} {{ column_value }} as {{ column_name }}{% if not loop.last -%}, {%- endif %}\n{%- endfor %}\n{%- if not loop.last %}\nunion all\n{% endif %}\n{%- endfor -%}\n\n{%- if (rows | length) == 0 -%}\n select\n {%- for column_name, column_value in default_row.items() %} {{ column_value }} as {{ column_name }}{% if not loop.last -%},{%- endif %}\n {%- endfor %}\n limit 0\n{%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_relation", "macro.dbt.safe_cast", "macro.dbt.format_row"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958533, "supported_languages": null}, "macro.dbt.get_expected_sql": {"name": "get_expected_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.get_expected_sql", "macro_sql": "{% macro get_expected_sql(rows, column_name_to_data_types) %}\n\n{%- if (rows | length) == 0 -%}\n select * from dbt_internal_unit_test_actual\n limit 0\n{%- else -%}\n{%- for row in rows -%}\n{%- set formatted_row = format_row(row, column_name_to_data_types) -%}\nselect\n{%- for column_name, column_value in formatted_row.items() %} {{ column_value }} as {{ column_name }}{% if not loop.last -%}, {%- endif %}\n{%- endfor %}\n{%- if not loop.last %}\nunion all\n{% endif %}\n{%- endfor -%}\n{%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.format_row"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958542, "supported_languages": null}, "macro.dbt.format_row": {"name": "format_row", "resource_type": "macro", "package_name": "dbt", "path": "macros/unit_test_sql/get_fixture_sql.sql", "original_file_path": "macros/unit_test_sql/get_fixture_sql.sql", "unique_id": "macro.dbt.format_row", "macro_sql": "\n\n{%- macro format_row(row, column_name_to_data_types) -%}\n {#-- generate case-insensitive formatted row --#}\n {% set formatted_row = {} %}\n {%- for column_name, column_value in row.items() -%}\n {% set column_name = column_name|lower %}\n\n {%- if column_name not in column_name_to_data_types %}\n {#-- if user-provided row contains column name that relation does not contain, raise an error --#}\n {% set fixture_name = \"expected output\" if model.resource_type == 'unit_test' else (\"'\" ~ model.name ~ \"'\") %}\n {{ exceptions.raise_compiler_error(\n \"Invalid column name: '\" ~ column_name ~ \"' in unit test fixture for \" ~ fixture_name ~ \".\"\n \"\\nAccepted columns for \" ~ fixture_name ~ \" are: \" ~ (column_name_to_data_types.keys()|list)\n ) }}\n {%- endif -%}\n\n {%- set column_type = column_name_to_data_types[column_name] %}\n\n {#-- sanitize column_value: wrap yaml strings in quotes, apply cast --#}\n {%- set column_value_clean = column_value -%}\n {%- if column_value is string -%}\n {%- set column_value_clean = dbt.string_literal(dbt.escape_single_quotes(column_value)) -%}\n {%- elif column_value is none -%}\n {%- set column_value_clean = 'null' -%}\n {%- endif -%}\n\n {%- set row_update = {column_name: safe_cast(column_value_clean, column_type) } -%}\n {%- do formatted_row.update(row_update) -%}\n {%- endfor -%}\n {{ return(formatted_row) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.string_literal", "macro.dbt.escape_single_quotes", "macro.dbt.safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9585521, "supported_languages": null}, "macro.dbt.resolve_model_name": {"name": "resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.resolve_model_name", "macro_sql": "{% macro resolve_model_name(input_model_name) %}\n {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958566, "supported_languages": null}, "macro.dbt.default__resolve_model_name": {"name": "default__resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.default__resolve_model_name", "macro_sql": "\n\n{%- macro default__resolve_model_name(input_model_name) -%}\n {{ input_model_name | string | replace('\"', '\\\"') }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958571, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {% set _ref_args = [_ref.get('package'), _ref['name']] if _ref.get('package') else [_ref['name'],] %}\n {%- set resolved = ref(*_ref_args, v=_ref.get('version')) -%}\n {%- if _ref.get('version') -%}\n {% do _ref_args.extend([\"v\" ~ _ref['version']]) %}\n {%- endif -%}\n {%- do ref_dict.update({_ref_args | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef ref(*args, **kwargs):\n refs = {{ ref_dict | tojson }}\n key = '.'.join(args)\n version = kwargs.get(\"v\") or kwargs.get(\"version\")\n if version:\n key += f\".v{version}\"\n dbt_load_df_function = kwargs.get(\"dbt_load_df_function\")\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9585762, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958584, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9585888, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = resolve_model_name(this) %}\n def __repr__(self):\n return '{{ this_relation_name }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args, **kwargs: ref(*args, **kwargs, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.resolve_model_name", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958596, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.958603, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.95939, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.959782, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.960213, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1714648414.9606311, "supported_languages": null}}, "docs": {"doc.test.somedoc": {"name": "somedoc", "resource_type": "doc", "package_name": "test", "path": "somedoc.md", "original_file_path": "models/somedoc.md", "unique_id": "doc.test.somedoc", "block_contents": "Testing, testing"}, "doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {"exposure.test.simple_exposure": {"name": "simple_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.simple_exposure", "fqn": ["test", "simple_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": true}, "unrendered_config": {}, "url": null, "depends_on": {"macros": [], "nodes": ["source.test.my_source.my_table", "model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [["my_source", "my_table"]], "metrics": [], "created_at": 1714648416.054376}}, "metrics": {"metric.test.blue_customers_post_2010": {"name": "blue_customers_post_2010", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.blue_customers_post_2010", "fqn": ["test", "blue_customers_post_2010"], "description": "", "label": "Blue Customers since 2010", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [{"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": {"where_filters": [{"where_sql_template": "{{ TimeDimension('id__created_at', 'day') }} > '2010-01-01'"}]}, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null, "meta": {}}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["semantic_model.test.semantic_people"]}, "refs": [], "metrics": [], "created_at": 1714648416.109668, "group": null}, "metric.test.customers": {"name": "customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.customers", "fqn": ["test", "customers"], "description": "", "label": "Customers Metric", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null, "meta": {}}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["semantic_model.test.semantic_people"]}, "refs": [], "metrics": [], "created_at": 1714648416.110243, "group": null}, "metric.test.ratio_of_blue_customers_to_red_customers": {"name": "ratio_of_blue_customers_to_red_customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.ratio_of_blue_customers_to_red_customers", "fqn": ["test", "ratio_of_blue_customers_to_red_customers"], "description": "", "label": "Very Important Customer Color Ratio", "type": "ratio", "type_params": {"measure": null, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'blue'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}, "denominator": {"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'red'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null, "meta": {}}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["metric.test.customers"]}, "refs": [], "metrics": [], "created_at": 1714648416.1137412, "group": null}, "metric.test.doubled_blue_customers": {"name": "doubled_blue_customers", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.doubled_blue_customers", "fqn": ["test", "doubled_blue_customers"], "description": "", "label": "Inflated blue customer numbers", "type": "derived", "type_params": {"measure": null, "input_measures": [{"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}], "numerator": null, "denominator": null, "expr": "customers * 2", "window": null, "grain_to_date": null, "metrics": [{"name": "customers", "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color')}} = 'blue'"}]}, "alias": null, "offset_window": null, "offset_to_grain": null}], "conversion_type_params": null}, "filter": null, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": true, "group": null, "meta": {}}, "unrendered_config": {}, "sources": [], "depends_on": {"macros": [], "nodes": ["metric.test.customers"]}, "refs": [], "metrics": [], "created_at": 1714648416.114889, "group": null}}, "groups": {}, "selectors": {}, "disabled": {"model.test.disabled_model": [{"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state", "name": "disabled_model", "resource_type": "model", "package_name": "test", "path": "disabled_model.sql", "original_file_path": "models/disabled_model.sql", "unique_id": "model.test.disabled_model", "fqn": ["test", "disabled_model"], "alias": "disabled_model", "checksum": {"name": "sha256", "checksum": "597106d23ce34e3cd2430588e5c1cf474ebdd138fc47e09b925a4ab258a27acc"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"enabled": false}, "created_at": 1714648415.360441, "config_call_dict": {"enabled": false}, "relation_name": "\"dbt\".\"test17146484148326086409_test_previous_version_state\".\"disabled_model\"", "raw_code": "{{ config(enabled=False) }}\nselect 2 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null, "defer_relation": null}], "snapshot.test.disabled_snapshot_seed": [{"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state", "name": "disabled_snapshot_seed", "resource_type": "snapshot", "package_name": "test", "path": "disabled_snapshot_seed.sql", "original_file_path": "snapshots/disabled_snapshot_seed.sql", "unique_id": "snapshot.test.disabled_snapshot_seed", "fqn": ["test", "disabled_snapshot_seed", "disabled_snapshot_seed"], "alias": "disabled_snapshot_seed", "checksum": {"name": "sha256", "checksum": "fe76c9dd437341c9e82a0f2a8baf3148f961b768eaa0a4410cd27d3c071bd617"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "snapshot", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "strategy": "check", "target_schema": "test17146484148326086409_test_previous_version_state", "target_database": null, "updated_at": null, "check_cols": "all"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17146484148326086409_test_previous_version_state", "enabled": false}, "created_at": 1714648415.499299, "config_call_dict": {"unique_key": "id", "strategy": "check", "check_cols": "all", "target_schema": "test17146484148326086409_test_previous_version_state", "enabled": false}, "relation_name": "\"dbt\".\"test17146484148326086409_test_previous_version_state\".\"disabled_snapshot_seed\"", "raw_code": "\n{{\n config(\n unique_key='id',\n strategy='check',\n check_cols='all',\n target_schema=schema,\n enabled=False,\n )\n}}\nselect * from {{ ref('my_seed') }}\n", "language": "sql", "refs": [{"name": "my_seed", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "defer_relation": null}], "analysis.test.disabled_al": [{"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state", "name": "disabled_al", "resource_type": "analysis", "package_name": "test", "path": "analysis/disabled_al.sql", "original_file_path": "analyses/disabled_al.sql", "unique_id": "analysis.test.disabled_al", "fqn": ["test", "analysis", "disabled_al"], "alias": "disabled_al", "checksum": {"name": "sha256", "checksum": "32d36ad6cff0786eb562440ba60ef6c9b9a7f4c282dfb7a52eaf19d36370f0e1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"enabled": false}, "created_at": 1714648415.5861099, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\nselect 9 as id", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.test.disabled_just_my": [{"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state_dbt_test__audit", "name": "disabled_just_my", "resource_type": "test", "package_name": "test", "path": "disabled_just_my.sql", "original_file_path": "tests/disabled_just_my.sql", "unique_id": "test.test.disabled_just_my", "fqn": ["test", "disabled_just_my"], "alias": "disabled_just_my", "checksum": {"name": "sha256", "checksum": "4f2268fd89a3b4ef899264ada6d7aa33603671cbc5d5acead7dc2eadf1add985"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"enabled": false}, "created_at": 1714648415.6666071, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ config(enabled=False) }}\n\nselect * from {{ ref('my_model') }}\nwhere false", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.test.disabled_check_nothing_my_model_.f2c6a72d37": [{"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state_dbt_test__audit", "name": "disabled_check_nothing_my_model_", "resource_type": "test", "package_name": "test", "path": "disabled_check_nothing_my_model_.sql", "original_file_path": "models/schema.yml", "unique_id": "test.test.disabled_check_nothing_my_model_.f2c6a72d37", "fqn": ["test", "disabled_check_nothing_my_model_"], "alias": "disabled_check_nothing_my_model_", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "unrendered_config": {"enabled": false}, "created_at": 1714648415.9626381, "config_call_dict": {"enabled": false}, "relation_name": null, "raw_code": "{{ test_disabled_check_nothing(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.test.test_disabled_check_nothing", "macro.dbt.get_where_subquery"], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": null, "file_key_name": "models.my_model", "attached_node": "model.test.my_model", "test_metadata": {"name": "disabled_check_nothing", "kwargs": {"model": "{{ get_where_subquery(ref('my_model')) }}"}, "namespace": null}}], "exposure.test.disabled_exposure": [{"name": "disabled_exposure", "resource_type": "exposure", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "exposure.test.disabled_exposure", "fqn": ["test", "disabled_exposure"], "type": "dashboard", "owner": {"email": "something@example.com", "name": null}, "description": "", "label": null, "maturity": null, "meta": {}, "tags": [], "config": {"enabled": false}, "unrendered_config": {"enabled": false}, "url": null, "depends_on": {"macros": [], "nodes": []}, "refs": [{"name": "my_model", "package": null, "version": null}], "sources": [], "metrics": [], "created_at": 1714648416.0555809}], "metric.test.disabled_metric": [{"name": "disabled_metric", "resource_type": "metric", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "metric.test.disabled_metric", "fqn": ["test", "disabled_metric"], "description": "", "label": "Count records", "type": "simple", "type_params": {"measure": {"name": "customers", "filter": null, "alias": null, "join_to_timespine": false, "fill_nulls_with": null}, "input_measures": [], "numerator": null, "denominator": null, "expr": null, "window": null, "grain_to_date": null, "metrics": [], "conversion_type_params": null}, "filter": {"where_filters": [{"where_sql_template": "{{ Dimension('id__favorite_color') }} = 'blue'"}]}, "metadata": null, "meta": {}, "tags": [], "config": {"enabled": false, "group": null, "meta": {}}, "unrendered_config": {"enabled": false}, "sources": [], "depends_on": {"macros": [], "nodes": []}, "refs": [], "metrics": [], "created_at": 1714648416.110999, "group": null}], "seed.test.disabled_seed": [{"database": "dbt", "schema": "test17146484148326086409_test_previous_version_state", "name": "disabled_seed", "resource_type": "seed", "package_name": "test", "path": "disabled_seed.csv", "original_file_path": "seeds/disabled_seed.csv", "unique_id": "seed.test.disabled_seed", "fqn": ["test", "disabled_seed"], "alias": "disabled_seed", "checksum": {"name": "sha256", "checksum": "31fddd8ec40c6aba6a3a8e7d83fedea2fd0a56c47b64ea3df1847ec1b018e2d1"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": null}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "test://models/schema.yml", "build_path": null, "unrendered_config": {"enabled": false}, "created_at": 1714648415.984149, "config_call_dict": {}, "relation_name": "\"dbt\".\"test17146484148326086409_test_previous_version_state\".\"disabled_seed\"", "raw_code": "", "root_path": "/private/var/folders/7h/hj5_fw9j291c58hwfdvy5xbm0000gp/T/pytest-of-jerco/pytest-50/project0", "depends_on": {"macros": []}, "defer_relation": null}], "source.test.my_source.disabled_table": [{"database": "dbt", "schema": "my_source", "name": "disabled_table", "resource_type": "source", "package_name": "test", "path": "models/schema.yml", "original_file_path": "models/schema.yml", "unique_id": "source.test.my_source.disabled_table", "fqn": ["test", "my_source", "disabled_table"], "source_name": "my_source", "source_description": "My source", "loader": "a_loader", "identifier": "disabled_table", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": null, "freshness": {"warn_after": {"count": null, "period": null}, "error_after": {"count": null, "period": null}, "filter": null}, "external": null, "description": "Disabled table", "columns": {}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": false}, "patch_path": null, "unrendered_config": {"enabled": false}, "relation_name": "\"dbt\".\"my_source\".\"disabled_table\"", "created_at": 1714648416.1610181}]}, "parent_map": {"model.test.my_model": [], "model.test.metricflow_time_spine": [], "snapshot.test.snapshot_seed": ["seed.test.my_seed"], "analysis.test.a": [], "test.test.just_my": ["model.test.my_model"], "seed.test.my_seed": [], "test.test.not_null_my_model_id.43e0e9183a": ["model.test.my_model"], "test.test.check_nothing_my_model_.d5a5e66110": ["model.test.my_model"], "source.test.my_source.my_table": [], "exposure.test.simple_exposure": ["model.test.my_model", "source.test.my_source.my_table"], "metric.test.blue_customers_post_2010": ["semantic_model.test.semantic_people"], "metric.test.customers": ["semantic_model.test.semantic_people"], "metric.test.ratio_of_blue_customers_to_red_customers": ["metric.test.customers"], "metric.test.doubled_blue_customers": ["metric.test.customers"], "semantic_model.test.semantic_people": ["model.test.my_model"]}, "child_map": {"model.test.my_model": ["exposure.test.simple_exposure", "semantic_model.test.semantic_people", "test.test.check_nothing_my_model_.d5a5e66110", "test.test.just_my", "test.test.not_null_my_model_id.43e0e9183a"], "model.test.metricflow_time_spine": [], "snapshot.test.snapshot_seed": [], "analysis.test.a": [], "test.test.just_my": [], "seed.test.my_seed": ["snapshot.test.snapshot_seed"], "test.test.not_null_my_model_id.43e0e9183a": [], "test.test.check_nothing_my_model_.d5a5e66110": [], "source.test.my_source.my_table": ["exposure.test.simple_exposure"], "exposure.test.simple_exposure": [], "metric.test.blue_customers_post_2010": [], "metric.test.customers": ["metric.test.doubled_blue_customers", "metric.test.ratio_of_blue_customers_to_red_customers"], "metric.test.ratio_of_blue_customers_to_red_customers": [], "metric.test.doubled_blue_customers": [], "semantic_model.test.semantic_people": ["metric.test.blue_customers_post_2010", "metric.test.customers"]}, "group_map": {}, "saved_queries": {}, "semantic_models": {"semantic_model.test.semantic_people": {"name": "semantic_people", "resource_type": "semantic_model", "package_name": "test", "path": "schema.yml", "original_file_path": "models/schema.yml", "unique_id": "semantic_model.test.semantic_people", "fqn": ["test", "semantic_people"], "model": "ref('my_model')", "node_relation": {"alias": "my_model", "schema_name": "test17146484148326086409_test_previous_version_state", "database": "dbt", "relation_name": "\"dbt\".\"test17146484148326086409_test_previous_version_state\".\"my_model\""}, "description": null, "label": null, "defaults": {"agg_time_dimension": "created_at"}, "entities": [{"name": "id", "type": "primary", "description": null, "label": null, "role": null, "expr": null}], "measures": [{"name": "years_tenure", "agg": "sum", "description": null, "label": null, "create_metric": false, "expr": "tenure", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}, {"name": "people", "agg": "count", "description": null, "label": null, "create_metric": false, "expr": "id", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}, {"name": "customers", "agg": "count", "description": null, "label": null, "create_metric": false, "expr": "id", "agg_params": null, "non_additive_dimension": null, "agg_time_dimension": null}], "dimensions": [{"name": "favorite_color", "type": "categorical", "description": null, "label": null, "is_partition": false, "type_params": null, "expr": null, "metadata": null}, {"name": "created_at", "type": "time", "description": null, "label": null, "is_partition": false, "type_params": {"time_granularity": "day", "validity_params": null}, "expr": null, "metadata": null}], "metadata": null, "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, "refs": [{"name": "my_model", "package": null, "version": null}], "created_at": 1714648416.155906, "config": {"enabled": true, "group": null, "meta": {}}, "unrendered_config": {}, "primary_entity": null, "group": null}}, "unit_tests": {}} diff --git a/tests/functional/artifacts/expected_manifest.py b/tests/functional/artifacts/expected_manifest.py index d6729342543..749a5b77619 100644 --- a/tests/functional/artifacts/expected_manifest.py +++ b/tests/functional/artifacts/expected_manifest.py @@ -1,7 +1,8 @@ import hashlib -import dbt import os from unittest.mock import ANY + +import dbt from dbt.tests.util import AnyStringWith # This produces an "expected manifest", with a number of the fields @@ -36,7 +37,11 @@ def get_rendered_model_config(**updates): "packages": [], "incremental_strategy": None, "docs": {"node_color": None, "show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, + "access": "protected", + "event_time": None, + "lookback": 0, + "batch_size": None, } result.update(updates) return result @@ -55,6 +60,7 @@ def get_rendered_seed_config(**updates): "pre-hook": [], "post-hook": [], "column_types": {}, + "delimiter": ",", "quoting": {}, "tags": [], "quote_columns": True, @@ -70,7 +76,10 @@ def get_rendered_seed_config(**updates): "packages": [], "incremental_strategy": None, "docs": {"node_color": None, "show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, + "event_time": None, + "lookback": 0, + "batch_size": None, } result.update(updates) return result @@ -94,6 +103,12 @@ def get_rendered_snapshot_config(**updates): "post-hook": [], "column_types": {}, "quoting": {}, + "snapshot_meta_column_names": { + "dbt_valid_to": None, + "dbt_valid_from": None, + "dbt_updated_at": None, + "dbt_scd_id": None, + }, "tags": [], "persist_docs": {}, "full_refresh": None, @@ -110,7 +125,10 @@ def get_rendered_snapshot_config(**updates): "packages": [], "incremental_strategy": None, "docs": {"node_color": None, "show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, + "event_time": None, + "lookback": 0, + "batch_size": None, } result.update(updates) return result @@ -130,6 +148,7 @@ def get_rendered_tst_config(**updates): "tags": [], "severity": "ERROR", "store_failures": None, + "store_failures_as": None, "warn_if": "!= 0", "error_if": "!= 0", "fail_calc": "count(*)", @@ -276,9 +295,9 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "group": None, "schema": my_schema_name, "database": model_database, - "deferred": False, "alias": "model", "description": "The test model", + "primary_key": ["id"], "columns": { "id": { "name": "id", @@ -288,6 +307,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, "first_name": { "name": "first_name", @@ -297,6 +317,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, "email": { "name": "email", @@ -306,6 +327,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, "ip_address": { "name": "ip_address", @@ -315,6 +337,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, "updated_at": { "name": "updated_at", @@ -324,9 +347,10 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, }, - "contract": {"checksum": None, "enforced": False}, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, "constraints": [], "patch_path": "test://" + model_schema_yml_path, "docs": {"node_color": None, "show": False}, @@ -339,6 +363,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "access": "protected", "version": None, "latest_version": None, + "time_spine": None, }, "model.test.second_model": { "compiled_path": os.path.join(compiled_model_path, "second_model.sql"), @@ -369,9 +394,9 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "group": None, "schema": alternate_schema, "database": project.database, - "deferred": False, "alias": "second_model", "description": "The second test model", + "primary_key": [], "columns": { "id": { "name": "id", @@ -381,6 +406,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, "first_name": { "name": "first_name", @@ -390,6 +416,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, "email": { "name": "email", @@ -399,6 +426,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, "ip_address": { "name": "ip_address", @@ -408,6 +436,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, "updated_at": { "name": "updated_at", @@ -417,9 +446,10 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, }, - "contract": {"checksum": None, "enforced": False}, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, "constraints": [], "patch_path": "test://" + model_schema_yml_path, "docs": {"node_color": None, "show": False}, @@ -432,6 +462,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "access": "protected", "version": None, "latest_version": None, + "time_spine": None, }, "seed.test.seed": { "build_path": None, @@ -454,7 +485,6 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "schema": my_schema_name, "database": project.database, "alias": "seed", - "deferred": False, "description": "The test seed", "columns": { "id": { @@ -465,6 +495,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, "first_name": { "name": "first_name", @@ -474,6 +505,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, "email": { "name": "email", @@ -483,6 +515,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, "ip_address": { "name": "ip_address", @@ -492,6 +525,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, "updated_at": { "name": "updated_at", @@ -501,6 +535,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, }, "docs": {"node_color": None, "show": True}, @@ -527,7 +562,6 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.test.model"], }, - "deferred": False, "description": "", "file_key_name": "models.model", "fqn": ["test", "not_null_model_id"], @@ -562,7 +596,7 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): }, "checksum": {"name": "none", "checksum": ""}, "unrendered_config": unrendered_test_config, - "contract": {"checksum": None, "enforced": False}, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, }, "snapshot.test.snapshot_seed": { "alias": "snapshot_seed", @@ -574,10 +608,9 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "compiled": True, "compiled_code": ANY, "config": snapshot_config, - "contract": {"checksum": None, "enforced": False}, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, "database": project.database, "group": None, - "deferred": False, "depends_on": { "macros": [], "nodes": ["seed.test.seed"], @@ -623,13 +656,12 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "columns": {}, "config": test_config, "group": None, - "contract": {"checksum": None, "enforced": False}, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, "sources": [], "depends_on": { "macros": ["macro.test.test_nothing", "macro.dbt.get_where_subquery"], "nodes": ["model.test.model"], }, - "deferred": False, "description": "", "file_key_name": "models.model", "fqn": ["test", "test_nothing_model_"], @@ -676,13 +708,12 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "columns": {}, "config": test_config, "group": None, - "contract": {"checksum": None, "enforced": False}, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, "sources": [], "depends_on": { "macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.test.model"], }, - "deferred": False, "description": "", "file_key_name": "models.model", "fqn": ["test", "unique_model_id"], @@ -731,10 +762,12 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): "quote": None, "tags": [], "constraints": [], + "granularity": None, } }, "config": { "enabled": True, + "event_time": None, }, "quoting": { "database": None, @@ -887,6 +920,8 @@ def expected_seeded_manifest(project, model_database=None, quote_model=False): }, "disabled": {}, "semantic_models": {}, + "unit_tests": {}, + "saved_queries": {}, } @@ -924,8 +959,8 @@ def expected_references_manifest(project): "nodes": ["source.test.my_source.my_table"], }, "deprecation_date": None, - "deferred": False, "description": "", + "primary_key": [], "docs": {"node_color": None, "show": True}, "fqn": ["test", "ephemeral_copy"], "group": None, @@ -947,7 +982,7 @@ def expected_references_manifest(project): "unique_id": "model.test.ephemeral_copy", "compiled": True, "compiled_code": ANY, - "contract": {"checksum": None, "enforced": False}, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, "extra_ctes_injected": True, "extra_ctes": [], "checksum": checksum_file(ephemeral_copy_path), @@ -956,6 +991,7 @@ def expected_references_manifest(project): "version": None, "latest_version": None, "constraints": [], + "time_spine": None, }, "model.test.ephemeral_summary": { "alias": "ephemeral_summary", @@ -971,6 +1007,7 @@ def expected_references_manifest(project): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, "ct": { "description": "The number of instances of the first name", @@ -980,18 +1017,19 @@ def expected_references_manifest(project): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, }, "config": get_rendered_model_config(materialized="table", group="test_group"), - "contract": {"checksum": None, "enforced": False}, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, "sources": [], "depends_on": { "macros": [], "nodes": ["model.test.ephemeral_copy"], }, "deprecation_date": None, - "deferred": False, "description": "A summmary table of the ephemeral copy of the seed data", + "primary_key": [], "docs": {"node_color": None, "show": True}, "fqn": ["test", "ephemeral_summary"], "group": "test_group", @@ -1025,6 +1063,7 @@ def expected_references_manifest(project): "version": None, "latest_version": None, "constraints": [], + "time_spine": None, }, "model.test.view_summary": { "alias": "view_summary", @@ -1040,6 +1079,7 @@ def expected_references_manifest(project): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, "ct": { "description": "The number of instances of the first name", @@ -1049,18 +1089,19 @@ def expected_references_manifest(project): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, }, "config": get_rendered_model_config(), - "contract": {"checksum": None, "enforced": False}, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, "database": project.database, "depends_on": { "macros": [], "nodes": ["model.test.ephemeral_summary"], }, "deprecation_date": None, - "deferred": False, "description": "A view of the summary of the ephemeral copy of the seed data", + "primary_key": [], "docs": {"node_color": None, "show": True}, "fqn": ["test", "view_summary"], "group": None, @@ -1090,6 +1131,7 @@ def expected_references_manifest(project): "version": None, "latest_version": None, "constraints": [], + "time_spine": None, }, "seed.test.seed": { "alias": "seed", @@ -1104,6 +1146,7 @@ def expected_references_manifest(project): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, "first_name": { "name": "first_name", @@ -1113,6 +1156,7 @@ def expected_references_manifest(project): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, "email": { "name": "email", @@ -1122,6 +1166,7 @@ def expected_references_manifest(project): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, "ip_address": { "name": "ip_address", @@ -1131,6 +1176,7 @@ def expected_references_manifest(project): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, "updated_at": { "name": "updated_at", @@ -1140,10 +1186,10 @@ def expected_references_manifest(project): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, }, "config": get_rendered_seed_config(), - "deferred": False, "depends_on": {"macros": []}, "description": "The test seed", "docs": {"node_color": None, "show": True}, @@ -1176,9 +1222,8 @@ def expected_references_manifest(project): "compiled": True, "compiled_code": ANY, "config": get_rendered_snapshot_config(target_schema=alternate_schema), - "contract": {"checksum": None, "enforced": False}, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, "database": model_database, - "deferred": False, "depends_on": {"macros": [], "nodes": ["seed.test.seed"]}, "description": "", "docs": {"node_color": None, "show": True}, @@ -1220,10 +1265,12 @@ def expected_references_manifest(project): "quote": None, "tags": [], "constraints": [], + "granularity": None, } }, "config": { "enabled": True, + "event_time": None, }, "quoting": { "database": False, @@ -1446,6 +1493,8 @@ def expected_references_manifest(project): } }, "semantic_models": {}, + "unit_tests": {}, + "saved_queries": {}, } @@ -1486,6 +1535,7 @@ def expected_versions_manifest(project): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, "ct": { "description": "The number of instances of the first name", @@ -1495,6 +1545,7 @@ def expected_versions_manifest(project): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, }, "config": get_rendered_model_config( @@ -1505,8 +1556,8 @@ def expected_versions_manifest(project): "constraints": [], "sources": [], "depends_on": {"macros": [], "nodes": []}, - "deferred": False, "description": "A versioned model", + "primary_key": ["count", "first_name"], "deprecation_date": ANY, "docs": {"node_color": None, "show": True}, "fqn": ["test", "versioned_model", "v1"], @@ -1531,7 +1582,7 @@ def expected_versions_manifest(project): "unique_id": "model.test.versioned_model.v1", "compiled": True, "compiled_code": ANY, - "contract": {"checksum": None, "enforced": False}, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, "extra_ctes_injected": True, "extra_ctes": [], "checksum": checksum_file(versioned_model_v1_path), @@ -1543,6 +1594,7 @@ def expected_versions_manifest(project): "access": "protected", "version": 1, "latest_version": 2, + "time_spine": None, }, "model.test.versioned_model.v2": { "alias": "versioned_model_v2", @@ -1558,6 +1610,7 @@ def expected_versions_manifest(project): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, "extra": { "description": "", @@ -1567,17 +1620,18 @@ def expected_versions_manifest(project): "quote": None, "tags": [], "constraints": [], + "granularity": None, }, }, "config": get_rendered_model_config( materialized="view", group="test_group", meta={"size": "large", "color": "red"} ), "constraints": [], - "contract": {"checksum": None, "enforced": False}, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, "sources": [], "depends_on": {"macros": [], "nodes": []}, - "deferred": False, "description": "A versioned model", + "primary_key": ["first_name"], "deprecation_date": None, "docs": {"node_color": None, "show": True}, "fqn": ["test", "versioned_model", "v2"], @@ -1611,6 +1665,7 @@ def expected_versions_manifest(project): "access": "protected", "version": 2, "latest_version": 2, + "time_spine": None, }, "model.test.ref_versioned_model": { "alias": "ref_versioned_model", @@ -1620,7 +1675,7 @@ def expected_versions_manifest(project): "columns": {}, "config": get_rendered_model_config(), "constraints": [], - "contract": {"checksum": None, "enforced": False}, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, "database": project.database, "depends_on": { "macros": [], @@ -1630,8 +1685,8 @@ def expected_versions_manifest(project): ], }, "deprecation_date": None, - "deferred": False, "description": "", + "primary_key": [], "docs": {"node_color": None, "show": True}, "fqn": ["test", "ref_versioned_model"], "group": None, @@ -1668,6 +1723,7 @@ def expected_versions_manifest(project): "access": "protected", "version": None, "latest_version": None, + "time_spine": None, }, "test.test.unique_versioned_model_v1_first_name.6138195dec": { "alias": "unique_versioned_model_v1_first_name", @@ -1681,13 +1737,12 @@ def expected_versions_manifest(project): "columns": {}, "config": test_config, "group": "test_group", - "contract": {"checksum": None, "enforced": False}, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, "sources": [], "depends_on": { "macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.test.versioned_model.v1"], }, - "deferred": False, "description": "", "file_key_name": "models.versioned_model", "fqn": ["test", "unique_versioned_model_v1_first_name"], @@ -1735,13 +1790,12 @@ def expected_versions_manifest(project): "columns": {}, "config": test_config, "group": "test_group", - "contract": {"checksum": None, "enforced": False}, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, "sources": [], "depends_on": { "macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.test.versioned_model.v1"], }, - "deferred": False, "description": "", "file_key_name": "models.versioned_model", "fqn": ["test", "unique_versioned_model_v1_count"], @@ -1789,13 +1843,12 @@ def expected_versions_manifest(project): "columns": {}, "config": test_config, "group": "test_group", - "contract": {"checksum": None, "enforced": False}, + "contract": {"checksum": None, "enforced": False, "alias_types": True}, "sources": [], "depends_on": { "macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.test.versioned_model.v2"], }, - "deferred": False, "description": "", "file_key_name": "models.versioned_model", "fqn": ["test", "unique_versioned_model_v2_first_name"], @@ -1925,4 +1978,6 @@ def expected_versions_manifest(project): "disabled": {}, "macros": {}, "semantic_models": {}, + "unit_tests": {}, + "saved_queries": {}, } diff --git a/tests/functional/artifacts/expected_run_results.py b/tests/functional/artifacts/expected_run_results.py index c6187440ca1..3a3148eba4d 100644 --- a/tests/functional/artifacts/expected_run_results.py +++ b/tests/functional/artifacts/expected_run_results.py @@ -1,4 +1,5 @@ from unittest.mock import ANY + from dbt.tests.util import AnyFloat @@ -17,6 +18,9 @@ def expected_run_results(): "thread_id": ANY, "timing": [ANY, ANY], "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, }, { "status": "success", @@ -27,6 +31,9 @@ def expected_run_results(): "thread_id": ANY, "timing": [ANY, ANY], "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, }, { "status": "success", @@ -37,6 +44,9 @@ def expected_run_results(): "thread_id": ANY, "timing": [ANY, ANY], "failures": ANY, + "compiled": None, + "compiled_code": ANY, + "relation_name": None, }, { "status": "success", @@ -47,6 +57,9 @@ def expected_run_results(): "thread_id": ANY, "timing": [ANY, ANY], "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, }, { "status": "success", @@ -57,6 +70,9 @@ def expected_run_results(): "thread_id": ANY, "timing": [ANY, ANY], "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": None, }, { "status": "success", @@ -67,6 +83,9 @@ def expected_run_results(): "thread_id": ANY, "timing": [ANY, ANY], "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": None, }, { "status": "success", @@ -77,6 +96,9 @@ def expected_run_results(): "thread_id": ANY, "timing": [ANY, ANY], "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": None, }, ] @@ -92,6 +114,9 @@ def expected_references_run_results(): "thread_id": ANY, "timing": [ANY, ANY], "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, }, { "status": "success", @@ -102,6 +127,9 @@ def expected_references_run_results(): "thread_id": ANY, "timing": [ANY, ANY], "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, }, { "status": "success", @@ -112,6 +140,9 @@ def expected_references_run_results(): "thread_id": ANY, "timing": [ANY, ANY], "failures": ANY, + "compiled": None, + "compiled_code": ANY, + "relation_name": ANY, }, { "status": "success", @@ -122,6 +153,9 @@ def expected_references_run_results(): "thread_id": ANY, "timing": [ANY, ANY], "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, }, ] @@ -137,6 +171,9 @@ def expected_versions_run_results(): "thread_id": ANY, "timing": [ANY, ANY], "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, }, { "status": "success", @@ -147,6 +184,9 @@ def expected_versions_run_results(): "thread_id": ANY, "timing": [ANY, ANY], "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, }, { "status": "success", @@ -157,6 +197,9 @@ def expected_versions_run_results(): "thread_id": ANY, "timing": [ANY, ANY], "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, }, { "status": "success", @@ -167,6 +210,9 @@ def expected_versions_run_results(): "thread_id": ANY, "timing": [ANY, ANY], "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, }, { "status": "success", @@ -177,6 +223,9 @@ def expected_versions_run_results(): "thread_id": ANY, "timing": [ANY, ANY], "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, }, { "status": "success", @@ -187,5 +236,8 @@ def expected_versions_run_results(): "thread_id": ANY, "timing": [ANY, ANY], "failures": ANY, + "compiled": True, + "compiled_code": ANY, + "relation_name": ANY, }, ] diff --git a/tests/functional/artifacts/test_artifact_fields.py b/tests/functional/artifacts/test_artifact_fields.py index 9276f545c8d..74b121b7622 100644 --- a/tests/functional/artifacts/test_artifact_fields.py +++ b/tests/functional/artifacts/test_artifact_fields.py @@ -1,5 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, get_manifest, get_artifact + +from dbt.tests.util import get_artifact, get_manifest, run_dbt # This is a place to put specific tests for contents of artifacts that we # don't want to bother putting in the big artifact output test, which is @@ -13,7 +14,7 @@ - name: my_model columns: - name: fun - tests: + data_tests: - not_null """ diff --git a/tests/functional/artifacts/test_artifacts.py b/tests/functional/artifacts/test_artifacts.py index db0f72fd5cb..4126266b129 100644 --- a/tests/functional/artifacts/test_artifacts.py +++ b/tests/functional/artifacts/test_artifacts.py @@ -1,24 +1,30 @@ -import pytest import os from datetime import datetime -import dbt + import jsonschema +import pytest -from dbt.tests.util import run_dbt, get_artifact, check_datetime_between, run_dbt_and_capture +import dbt +from dbt.artifacts.schemas.results import RunStatus +from dbt.artifacts.schemas.run import RunResultsArtifact +from dbt.contracts.graph.manifest import WritableManifest +from dbt.tests.util import ( + check_datetime_between, + get_artifact, + run_dbt, + run_dbt_and_capture, +) from tests.functional.artifacts.expected_manifest import ( - expected_seeded_manifest, expected_references_manifest, + expected_seeded_manifest, expected_versions_manifest, ) from tests.functional.artifacts.expected_run_results import ( - expected_run_results, expected_references_run_results, + expected_run_results, expected_versions_run_results, ) -from dbt.contracts.graph.manifest import WritableManifest -from dbt.contracts.results import RunResultsArtifact, RunStatus - models__schema_yml = """ version: 2 @@ -30,7 +36,7 @@ columns: - name: id description: The user ID number - tests: + data_tests: - unique - not_null - name: first_name @@ -41,7 +47,7 @@ description: The user's IP address - name: updated_at description: The last time this user's email was updated - tests: + data_tests: - test.nothing - name: second_model @@ -368,13 +374,13 @@ meta: color: blue size: large - tests: + data_tests: - unique: column_name: count columns: - name: first_name description: "The first name being summarized" - tests: + data_tests: - unique - name: ct description: "The number of instances of the first name" @@ -387,7 +393,7 @@ materialized: view meta: color: red - tests: [] + data_tests: [] columns: - include: '*' exclude: ['ct'] @@ -469,6 +475,8 @@ def verify_manifest(project, expected_manifest, start_time, manifest_schema_path "exposures", "selectors", "semantic_models", + "unit_tests", + "saved_queries", } assert set(manifest.keys()) == manifest_keys @@ -494,7 +502,7 @@ def verify_manifest(project, expected_manifest, start_time, manifest_schema_path for unique_id, node in expected_manifest[key].items(): assert unique_id in manifest[key] assert manifest[key][unique_id] == node, f"{unique_id} did not match" - else: # ['docs', 'parent_map', 'child_map', 'group_map', 'selectors'] + else: # ['docs', 'parent_map', 'child_map', 'group_map', 'selectors', 'semantic_models', 'saved_queries'] assert manifest[key] == expected_manifest[key] diff --git a/tests/functional/artifacts/test_docs_generate_defer.py b/tests/functional/artifacts/test_docs_generate_defer.py index e85949819ac..cbeff63558d 100644 --- a/tests/functional/artifacts/test_docs_generate_defer.py +++ b/tests/functional/artifacts/test_docs_generate_defer.py @@ -1,6 +1,8 @@ import os import shutil + import pytest + from dbt.tests.util import run_dbt model_sql = """ @@ -29,5 +31,12 @@ def test_generate_defer( self.copy_state() # defer test, it succeeds - results = run_dbt(["docs", "generate", "--state", "./state", "--defer"]) - assert results.nodes["model.test.model"] + catalog = run_dbt(["docs", "generate", "--state", "./state", "--defer"]) + assert catalog.nodes["model.test.model"] + + # Check that catalog validates with jsonschema + catalog_dict = catalog.to_dict() + try: + catalog.validate(catalog_dict) + except Exception: + raise pytest.fail("Catalog validation failed") diff --git a/tests/functional/artifacts/test_override.py b/tests/functional/artifacts/test_override.py index a7b689a3670..11f4ec200e1 100644 --- a/tests/functional/artifacts/test_override.py +++ b/tests/functional/artifacts/test_override.py @@ -1,13 +1,14 @@ import pytest -from dbt.tests.util import run_dbt + from dbt.exceptions import CompilationError +from dbt.tests.util import run_dbt model_sql = """ select 1 as id """ fail_macros__failure_sql = """ -{% macro get_catalog(information_schema, schemas) %} +{% macro get_catalog_relations(information_schema, relations) %} {% do exceptions.raise_compiler_error('rejected: no catalogs for you') %} {% endmacro %} diff --git a/tests/functional/artifacts/test_previous_version_state.py b/tests/functional/artifacts/test_previous_version_state.py index 7ed5fb61310..449c0004566 100644 --- a/tests/functional/artifacts/test_previous_version_state.py +++ b/tests/functional/artifacts/test_previous_version_state.py @@ -1,9 +1,14 @@ -import pytest +import json import os import shutil -from dbt.tests.util import run_dbt, get_manifest -from dbt.exceptions import IncompatibleSchemaError + +import pytest + +from dbt.artifacts.exceptions import IncompatibleSchemaError +from dbt.artifacts.schemas.base import get_artifact_schema_version +from dbt.artifacts.schemas.run import RunResultsArtifact from dbt.contracts.graph.manifest import WritableManifest +from dbt.tests.util import get_manifest, run_dbt # This project must have one of each kind of node type, plus disabled versions, for # test coverage to be complete. @@ -123,12 +128,12 @@ models: - name: my_model description: "Example model" - tests: + data_tests: - check_nothing - disabled_check_nothing columns: - name: id - tests: + data_tests: - not_null semantic_models: @@ -158,8 +163,16 @@ agg_time_dimension: created_at metrics: - - name: my_metric - label: Count records + - name: blue_customers_post_2010 + label: Blue Customers since 2010 + type: simple + filter: "{{ TimeDimension('id__created_at', 'day') }} > '2010-01-01'" + type_params: + measure: + name: customers + filter: "{{ Dimension('id__favorite_color') }} = 'blue'" + - name: customers + label: Customers Metric type: simple type_params: measure: customers @@ -167,9 +180,29 @@ label: Count records config: enabled: False + filter: "{{ Dimension('id__favorite_color') }} = 'blue'" type: simple type_params: measure: customers + - name: ratio_of_blue_customers_to_red_customers + label: Very Important Customer Color Ratio + type: ratio + type_params: + numerator: + name: customers + filter: "{{ Dimension('id__favorite_color')}} = 'blue'" + denominator: + name: customers + filter: "{{ Dimension('id__favorite_color')}} = 'red'" + - name: doubled_blue_customers + type: derived + label: Inflated blue customer numbers + type_params: + expr: 'customers * 2' + metrics: + - name: customers + filter: "{{ Dimension('id__favorite_color')}} = 'blue'" + sources: - name: my_source @@ -229,7 +262,8 @@ class TestPreviousVersionState: - CURRENT_EXPECTED_MANIFEST_VERSION = 10 + CURRENT_EXPECTED_MANIFEST_VERSION = 12 + CURRENT_EXPECTED_RUN_RESULTS_VERSION = 6 @pytest.fixture(scope="class") def models(self): @@ -287,7 +321,7 @@ def test_project(self, project): assert len(manifest.nodes) == 8 assert len(manifest.sources) == 1 assert len(manifest.exposures) == 1 - assert len(manifest.metrics) == 1 + assert len(manifest.metrics) == 4 # disabled model, snapshot, seed, singular test, generic test, analysis, source, exposure, metric assert len(manifest.disabled) == 9 assert "macro.test.do_nothing" in manifest.macros @@ -299,13 +333,27 @@ def generate_latest_manifest( project, current_manifest_version, ): - run_dbt(["list"]) + run_dbt(["parse"]) source_path = os.path.join(project.project_root, "target/manifest.json") state_path = os.path.join(project.test_data_dir, f"state/v{current_manifest_version}") target_path = os.path.join(state_path, "manifest.json") os.makedirs(state_path, exist_ok=True) shutil.copyfile(source_path, target_path) + # Use this method when generating a new run_results version for the first time. + # Once generated, we shouldn't need to re-generate or modify the manifest. + def generate_latest_run_results( + self, + project, + current_run_results_version, + ): + run_dbt(["run"]) + source_path = os.path.join(project.project_root, "target/run_results.json") + state_path = os.path.join(project.test_data_dir, f"results/v{current_run_results_version}") + target_path = os.path.join(state_path, "run_results.json") + os.makedirs(state_path, exist_ok=True) + shutil.copyfile(source_path, target_path) + # The actual test method. Run `dbt list --select state:modified --state ...` # once for each past manifest version. They all have the same content, but different # schema/structure, only some of which are forward-compatible with the @@ -315,6 +363,7 @@ def compare_previous_state( project, compare_manifest_version, expect_pass, + num_results, ): state_path = os.path.join(project.test_data_dir, f"state/v{compare_manifest_version}") cli_args = [ @@ -328,26 +377,76 @@ def compare_previous_state( ] if expect_pass: results = run_dbt(cli_args, expect_pass=expect_pass) - assert len(results) == 1 + assert len(results) == num_results + else: + with pytest.raises(IncompatibleSchemaError): + run_dbt(cli_args, expect_pass=expect_pass) + + # The actual test method. Run `dbt retry --state ...` + # once for each past run_results version. They all have the same content, but different + # schema/structure, only some of which are forward-compatible with the + # current WritableManifest class. + def compare_previous_results( + self, + project, + compare_run_results_version, + expect_pass, + num_results, + ): + state_path = os.path.join(project.test_data_dir, f"results/v{compare_run_results_version}") + cli_args = [ + "retry", + "--state", + state_path, + ] + if expect_pass: + results = run_dbt(cli_args, expect_pass=expect_pass) + assert len(results) == num_results else: with pytest.raises(IncompatibleSchemaError): run_dbt(cli_args, expect_pass=expect_pass) def test_compare_state_current(self, project): - current_schema_version = WritableManifest.dbt_schema_version.version + current_manifest_schema_version = WritableManifest.dbt_schema_version.version assert ( - current_schema_version == self.CURRENT_EXPECTED_MANIFEST_VERSION + current_manifest_schema_version == self.CURRENT_EXPECTED_MANIFEST_VERSION ), "Sounds like you've bumped the manifest version and need to update this test!" # If we need a newly generated manifest, uncomment the following line and commit the result - # self.generate_latest_manifest(project, current_schema_version) - self.compare_previous_state(project, current_schema_version, True) + # self.generate_latest_manifest(project, current_manifest_schema_version) + self.compare_previous_state(project, current_manifest_schema_version, True, 0) def test_backwards_compatible_versions(self, project): # manifest schema version 4 and greater should always be forward compatible - for schema_version in range(4, self.CURRENT_EXPECTED_MANIFEST_VERSION): - self.compare_previous_state(project, schema_version, True) + for schema_version in range(4, 10): + self.compare_previous_state(project, schema_version, True, 1) + for schema_version in range(10, self.CURRENT_EXPECTED_MANIFEST_VERSION): + self.compare_previous_state(project, schema_version, True, 0) def test_nonbackwards_compatible_versions(self, project): # schema versions 1, 2, 3 are all not forward compatible for schema_version in range(1, 4): - self.compare_previous_state(project, schema_version, False) + self.compare_previous_state(project, schema_version, False, 0) + + def test_get_manifest_schema_version(self, project): + for schema_version in range(1, self.CURRENT_EXPECTED_MANIFEST_VERSION): + manifest_path = os.path.join( + project.test_data_dir, f"state/v{schema_version}/manifest.json" + ) + manifest = json.load(open(manifest_path)) + + manifest_version = get_artifact_schema_version(manifest) + assert manifest_version == schema_version + + def test_compare_results_current(self, project): + current_run_results_schema_version = RunResultsArtifact.dbt_schema_version.version + assert ( + current_run_results_schema_version == self.CURRENT_EXPECTED_RUN_RESULTS_VERSION + ), "Sounds like you've bumped the run_results version and need to update this test!" + # If we need a newly generated run_results, uncomment the following line and commit the result + # self.generate_latest_run_results(project, current_run_results_schema_version) + self.compare_previous_results(project, current_run_results_schema_version, True, 0) + + def test_backwards_compatible_run_results_versions(self, project): + # run_results schema version 4 and greater should always be forward compatible + for schema_version in range(4, self.CURRENT_EXPECTED_RUN_RESULTS_VERSION): + self.compare_previous_results(project, schema_version, True, 0) diff --git a/tests/functional/artifacts/test_run_results.py b/tests/functional/artifacts/test_run_results.py index c03c7abdf8f..dea947f342b 100644 --- a/tests/functional/artifacts/test_run_results.py +++ b/tests/functional/artifacts/test_run_results.py @@ -1,8 +1,9 @@ +import json from multiprocessing import Process from pathlib import Path -import json + import pytest -import platform + from dbt.tests.util import run_dbt good_model_sql = """ @@ -41,7 +42,25 @@ def test_timing_exists(self, project): assert len(results.results[0].timing) > 0 -@pytest.mark.skipif(platform.system() != "Darwin", reason="Fails on linux in github actions") +class TestRunResultsSerializableInContext: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": good_model_sql} + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "on-run-end": ["{% for result in results %}{{ log(result.to_dict()) }}{% endfor %}"] + } + + def test_results_serializable(self, project): + results = run_dbt(["run"]) + assert len(results.results) == 1 + + +# This test is failing due to the faulty assumptions that run_results.json would +# be written multiple times. Temporarily disabling. +@pytest.mark.skip() class TestRunResultsWritesFileOnSignal: @pytest.fixture(scope="class") def models(self): diff --git a/tests/functional/assertions/test_runner.py b/tests/functional/assertions/test_runner.py new file mode 100644 index 00000000000..01ebc87339e --- /dev/null +++ b/tests/functional/assertions/test_runner.py @@ -0,0 +1,43 @@ +import os +from typing import Callable, List, Optional + +from dbt.cli.main import dbtRunner, dbtRunnerResult +from dbt.contracts.graph.manifest import Manifest +from dbt.tests.util import get_run_results +from dbt_common.events.base_types import EventMsg + + +def assert_run_results_have_compiled_node_attributes( + args: List[str], result: dbtRunnerResult +) -> None: + commands_with_run_results = ["build", "compile", "docs", "run", "test"] + if not [a for a in args if a in commands_with_run_results] or not result.success: + return + + run_results = get_run_results(os.getcwd()) + for r in run_results["results"]: + if r["unique_id"].startswith("model") and r["status"] == "success": + assert "compiled_code" in r + assert "compiled" in r + + +_STANDARD_ASSERTIONS = [assert_run_results_have_compiled_node_attributes] + + +class dbtTestRunner(dbtRunner): + def __init__( + self, + manifest: Optional[Manifest] = None, + callbacks: Optional[List[Callable[[EventMsg], None]]] = None, + exit_assertions: Optional[List[Callable[[List[str], dbtRunnerResult], None]]] = None, + ): + self.exit_assertions = exit_assertions if exit_assertions else _STANDARD_ASSERTIONS + super().__init__(manifest, callbacks) + + def invoke(self, args: List[str], **kwargs) -> dbtRunnerResult: + result = super().invoke(args, **kwargs) + + for assertion in self.exit_assertions: + assertion(args, result) + + return result diff --git a/tests/functional/basic/test_basic.py b/tests/functional/basic/test_basic.py index 836df78f83f..115f5ab206d 100644 --- a/tests/functional/basic/test_basic.py +++ b/tests/functional/basic/test_basic.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, get_manifest +from dbt.tests.util import get_manifest, run_dbt my_model_sql = """ select 1 as fun diff --git a/tests/functional/basic/test_invalid_reference.py b/tests/functional/basic/test_invalid_reference.py index 1c54d1b906a..9452573c11a 100644 --- a/tests/functional/basic/test_invalid_reference.py +++ b/tests/functional/basic/test_invalid_reference.py @@ -1,7 +1,7 @@ import pytest -from dbt.tests.util import run_dbt -from dbt.exceptions import CompilationError +from dbt.exceptions import CompilationError +from dbt.tests.util import run_dbt descendant_sql = """ -- should be ref('model') diff --git a/tests/functional/basic/test_jaffle_shop.py b/tests/functional/basic/test_jaffle_shop.py index c4ac406d462..c74cbf28298 100644 --- a/tests/functional/basic/test_jaffle_shop.py +++ b/tests/functional/basic/test_jaffle_shop.py @@ -1,6 +1,4 @@ -from dbt.tests.util import run_dbt, get_manifest, run_dbt_and_capture, write_file - - +from dbt.tests.util import get_manifest, run_dbt, run_dbt_and_capture, write_file from tests.fixtures.jaffle_shop import JaffleShopProject diff --git a/tests/functional/basic/test_mixed_case_db.py b/tests/functional/basic/test_mixed_case_db.py index 19b2077cede..8c9cf44fdd5 100644 --- a/tests/functional/basic/test_mixed_case_db.py +++ b/tests/functional/basic/test_mixed_case_db.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, get_manifest +from dbt.tests.util import get_manifest, run_dbt model_sql = """ select 1 as id @@ -16,7 +16,6 @@ def models(): def dbt_profile_data(unique_schema): return { - "config": {"send_anonymous_usage_stats": False}, "test": { "outputs": { "default": { diff --git a/tests/functional/basic/test_project.py b/tests/functional/basic/test_project.py index 10427c5ec3b..7a4cb9fd1da 100644 --- a/tests/functional/basic/test_project.py +++ b/tests/functional/basic/test_project.py @@ -1,7 +1,12 @@ +import os +from pathlib import Path + import pytest -from dbt.tests.util import run_dbt, update_config_file -from dbt.exceptions import ProjectContractError +import yaml +from dbt.cli.main import dbtRunner +from dbt.exceptions import DbtProjectError, ProjectContractError +from dbt.tests.util import run_dbt, update_config_file, write_config_file simple_model_sql = """ select true as my_column @@ -62,3 +67,103 @@ def test_invalid_version(self, project): assert "at path ['version']: 'invalid' is not valid under any of the given schemas" in str( excinfo.value ) + + +class TestProjectDbtCloudConfig: + @pytest.fixture(scope="class") + def models(self): + return {"simple_model.sql": simple_model_sql, "simple_model.yml": simple_model_yml} + + def test_dbt_cloud(self, project): + run_dbt(["parse"], expect_pass=True) + conf = yaml.safe_load( + Path(os.path.join(project.project_root, "dbt_project.yml")).read_text() + ) + assert conf == { + "name": "test", + "profile": "test", + "flags": {"send_anonymous_usage_stats": False}, + } + + config = { + "name": "test", + "profile": "test", + "flags": {"send_anonymous_usage_stats": False}, + "dbt-cloud": { + "account_id": "123", + "application": "test", + "environment": "test", + "api_key": "test", + }, + } + write_config_file(config, project.project_root, "dbt_project.yml") + run_dbt(["parse"], expect_pass=True) + conf = yaml.safe_load( + Path(os.path.join(project.project_root, "dbt_project.yml")).read_text() + ) + assert conf == config + + +class TestProjectDbtCloudConfigString: + @pytest.fixture(scope="class") + def models(self): + return {"simple_model.sql": simple_model_sql, "simple_model.yml": simple_model_yml} + + def test_dbt_cloud_invalid(self, project): + run_dbt() + config = {"name": "test", "profile": "test", "dbt-cloud": "Some string"} + update_config_file(config, "dbt_project.yml") + expected_err = ( + "at path ['dbt-cloud']: 'Some string' is not valid under any of the given schemas" + ) + with pytest.raises(ProjectContractError) as excinfo: + run_dbt() + assert expected_err in str(excinfo.value) + + +class TestVersionSpecifierChecksComeBeforeYamlValidation: + def test_version_specifier_checks_before_yaml_validation(self, project) -> None: + runner = dbtRunner() + + # if no version specifier error, we should get a yaml validation error + config_update = {"this-is-not-a-valid-key": "my-value-for-invalid-key"} + update_config_file(config_update, "dbt_project.yml") + result = runner.invoke(["parse"]) + assert result.exception is not None + assert isinstance(result.exception, ProjectContractError) + assert "Additional properties are not allowed" in str(result.exception) + + # add bad version specifier, and assert we get the error for that + update_config_file({"require-dbt-version": [">0.0.0", "<=0.0.1"]}, "dbt_project.yml") + result = runner.invoke(["parse"]) + assert result.exception is not None + assert isinstance(result.exception, DbtProjectError) + assert "This version of dbt is not supported" + + +class TestArchiveNotAllowed: + """At one point in time we supported an 'archive' key in projects, but no longer""" + + def test_archive_not_allowed(self, project): + runner = dbtRunner() + + config_update = { + "archive": { + "source_schema": "a", + "target_schema": "b", + "tables": [ + { + "source_table": "seed", + "target_table": "archive_actual", + "updated_at": "updated_at", + "unique_key": """id || '-' || first_name""", + }, + ], + } + } + update_config_file(config_update, "dbt_project.yml") + + result = runner.invoke(["parse"]) + assert result.exception is not None + assert isinstance(result.exception, ProjectContractError) + assert "Additional properties are not allowed" in str(result.exception) diff --git a/tests/functional/basic/test_simple_reference.py b/tests/functional/basic/test_simple_reference.py index 680a81383c5..22ba540bee6 100644 --- a/tests/functional/basic/test_simple_reference.py +++ b/tests/functional/basic/test_simple_reference.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, copy_file, read_file, check_relations_equal +from dbt.tests.util import check_relations_equal, copy_file, read_file, run_dbt ephemeral_copy_sql = """ {{ diff --git a/tests/functional/basic/test_varchar_widening.py b/tests/functional/basic/test_varchar_widening.py index 3e74629adea..76b62b0ef85 100644 --- a/tests/functional/basic/test_varchar_widening.py +++ b/tests/functional/basic/test_varchar_widening.py @@ -1,7 +1,8 @@ -import pytest import os -from dbt.tests.util import run_dbt, check_relations_equal +import pytest + +from dbt.tests.util import check_relations_equal, run_dbt incremental_sql = """ {{ diff --git a/tests/functional/build/fixtures.py b/tests/functional/build_command/fixtures.py similarity index 91% rename from tests/functional/build/fixtures.py rename to tests/functional/build_command/fixtures.py index e6f8dd4f0b3..7f6b17d48b6 100644 --- a/tests/functional/build/fixtures.py +++ b/tests/functional/build_command/fixtures.py @@ -126,17 +126,30 @@ - name: model_0 columns: - name: iso3 - tests: + data_tests: - unique - not_null - name: model_2 columns: - name: iso3 - tests: + data_tests: - unique - not_null """ +unit_tests__yml = """ +unit_tests: + - name: ut_model_3 + model: model_3 + given: + - input: ref('model_1') + rows: + - {iso3: ABW, name: Aruba} + expect: + rows: + - {iso3: ABW, name: Aruba} +""" + models_failing_tests__tests_yml = """ version: 2 @@ -144,16 +157,16 @@ - name: model_0 columns: - name: iso3 - tests: + data_tests: - unique - not_null - name: historical_iso_numeric - tests: + data_tests: - not_null - name: model_2 columns: - name: iso3 - tests: + data_tests: - unique - not_null """ @@ -172,7 +185,7 @@ - name: model_0 columns: - name: iso3 - tests: + data_tests: - relationships: to: ref('model_1') field: iso3 @@ -180,7 +193,7 @@ - name: model_1 columns: - name: iso3 - tests: + data_tests: - relationships: to: ref('model_0') field: iso3 @@ -202,7 +215,7 @@ - name: model_a columns: - name: id - tests: + data_tests: - not_null """ @@ -213,17 +226,17 @@ - name: model_a columns: - name: id - tests: + data_tests: - not_null - name: model_b columns: - name: id - tests: + data_tests: - not_null - name: model_c columns: - name: id - tests: + data_tests: - not_null """ @@ -251,7 +264,7 @@ - name: model_a columns: - name: id - tests: + data_tests: - unique - not_null - relationships: @@ -264,7 +277,7 @@ - name: model_b columns: - name: id - tests: + data_tests: - unique - not_null - relationships: @@ -277,7 +290,7 @@ - name: model_c columns: - name: id - tests: + data_tests: - unique - not_null - relationships: diff --git a/tests/functional/build/test_build.py b/tests/functional/build_command/test_build.py similarity index 92% rename from tests/functional/build/test_build.py rename to tests/functional/build_command/test_build.py index fb909d69f4b..f0464f75dd8 100644 --- a/tests/functional/build/test_build.py +++ b/tests/functional/build_command/test_build.py @@ -1,29 +1,30 @@ import pytest from dbt.tests.util import run_dbt -from tests.functional.build.fixtures import ( - seeds__country_csv, - snapshots__snap_0, - snapshots__snap_1, - snapshots__snap_99, - models__test_yml, +from tests.functional.build_command.fixtures import ( models__model_0_sql, models__model_1_sql, models__model_2_sql, models__model_3_sql, models__model_99_sql, - models_failing__model_1_sql, + models__test_yml, models_circular_relationship__test_yml, + models_failing__model_1_sql, models_failing_tests__tests_yml, + models_interdependent__model_a_sql, + models_interdependent__model_b_null_sql, + models_interdependent__model_b_sql, + models_interdependent__model_c_sql, + models_interdependent__test_yml, models_simple_blocking__model_a_sql, models_simple_blocking__model_b_sql, models_simple_blocking__test_yml, models_triple_blocking__test_yml, - models_interdependent__test_yml, - models_interdependent__model_a_sql, - models_interdependent__model_b_sql, - models_interdependent__model_b_null_sql, - models_interdependent__model_c_sql, + seeds__country_csv, + snapshots__snap_0, + snapshots__snap_1, + snapshots__snap_99, + unit_tests__yml, ) @@ -56,8 +57,9 @@ def models(self): "model_0.sql": models__model_0_sql, "model_1.sql": models__model_1_sql, "model_2.sql": models__model_2_sql, + "model_3.sql": models__model_3_sql, "model_99.sql": models__model_99_sql, - "test.yml": models__test_yml, + "test.yml": models__test_yml + unit_tests__yml, } def test_build_happy_path(self, project): @@ -73,14 +75,14 @@ def models(self): "model_2.sql": models__model_2_sql, "model_3.sql": models__model_3_sql, "model_99.sql": models__model_99_sql, - "test.yml": models__test_yml, + "test.yml": models__test_yml + unit_tests__yml, } def test_failing_test_skips_downstream(self, project): results = run_dbt(["build"], expect_pass=False) - assert len(results) == 13 + assert len(results) == 14 actual = [str(r.status) for r in results] - expected = ["error"] * 1 + ["skipped"] * 5 + ["pass"] * 2 + ["success"] * 5 + expected = ["error"] * 1 + ["skipped"] * 6 + ["pass"] * 2 + ["success"] * 5 assert sorted(actual) == sorted(expected) @@ -210,7 +212,9 @@ def models(self): def test_downstream_selection(self, project): """Ensure that selecting test+ does not select model_a's other children""" - results = run_dbt(["build", "--select", "model_a not_null_model_a_id+"], expect_pass=True) + # fails with "Got 1 result, configured to fail if != 0" + # model_a is defined as select null as id + results = run_dbt(["build", "--select", "model_a not_null_model_a_id+"], expect_pass=False) assert len(results) == 2 @@ -226,5 +230,6 @@ def models(self): def test_limited_upstream_selection(self, project): """Ensure that selecting 1+model_c only selects up to model_b (+ tests of both)""" - results = run_dbt(["build", "--select", "1+model_c"], expect_pass=True) + # Fails with "relation "test17005969872609282880_test_build.model_a" does not exist" + results = run_dbt(["build", "--select", "1+model_c"], expect_pass=False) assert len(results) == 4 diff --git a/tests/functional/clean/test_clean.py b/tests/functional/clean/test_clean.py new file mode 100644 index 00000000000..5da8144f48f --- /dev/null +++ b/tests/functional/clean/test_clean.py @@ -0,0 +1,56 @@ +import pytest + +from dbt.exceptions import DbtRuntimeError +from dbt.tests.util import run_dbt + + +class TestCleanSourcePath: + @pytest.fixture(scope="class") + def project_config_update(self): + return "clean-targets: ['models']" + + def test_clean_source_path(self, project): + with pytest.raises(DbtRuntimeError, match="dbt will not clean the following source paths"): + run_dbt(["clean"]) + + +class TestCleanPathOutsideProjectRelative: + @pytest.fixture(scope="class") + def project_config_update(self): + return "clean-targets: ['..']" + + def test_clean_path_outside_project(self, project): + with pytest.raises( + DbtRuntimeError, + match="dbt will not clean the following directories outside the project", + ): + run_dbt(["clean"]) + + +class TestCleanPathOutsideProjectAbsolute: + @pytest.fixture(scope="class") + def project_config_update(self): + return "clean-targets: ['/']" + + def test_clean_path_outside_project(self, project): + with pytest.raises( + DbtRuntimeError, + match="dbt will not clean the following directories outside the project", + ): + run_dbt(["clean"]) + + +class TestCleanPathOutsideProjectWithFlag: + @pytest.fixture(scope="class") + def project_config_update(self): + return "clean-targets: ['/tmp/foo']" + + def test_clean_path_outside_project(self, project): + # Doesn't fail because flag is set + run_dbt(["clean", "--no-clean-project-files-only"]) + + with pytest.raises( + DbtRuntimeError, + match="dbt will not clean the following directories outside the project", + ): + run_dbt(["clean", "--clean-project-files-only"]) diff --git a/tests/functional/cli/test_cli_exit_codes.py b/tests/functional/cli/test_cli_exit_codes.py index 71c1097ba6a..2853a5d5c3c 100644 --- a/tests/functional/cli/test_cli_exit_codes.py +++ b/tests/functional/cli/test_cli_exit_codes.py @@ -3,7 +3,6 @@ from dbt.cli.exceptions import ResultExit from dbt.cli.main import cli - good_sql = """ select 1 as fun """ diff --git a/tests/functional/cli/test_env_var_deprecations.py b/tests/functional/cli/test_env_var_deprecations.py index 6880cc6890d..42a5afab88e 100644 --- a/tests/functional/cli/test_env_var_deprecations.py +++ b/tests/functional/cli/test_env_var_deprecations.py @@ -1,8 +1,8 @@ -import pytest import os -from dbt.tests.util import read_file, run_dbt +import pytest +from dbt.tests.util import read_file, run_dbt model_one_sql = """ select 1 as fun diff --git a/tests/functional/cli/test_error_handling.py b/tests/functional/cli/test_error_handling.py index 83c8a6fc47c..1eab78a0418 100644 --- a/tests/functional/cli/test_error_handling.py +++ b/tests/functional/cli/test_error_handling.py @@ -2,7 +2,6 @@ from dbt.tests.util import run_dbt - model_one_sql = """ someting bad """ diff --git a/tests/functional/cli/test_multioption.py b/tests/functional/cli/test_multioption.py new file mode 100644 index 00000000000..59b233c5a98 --- /dev/null +++ b/tests/functional/cli/test_multioption.py @@ -0,0 +1,142 @@ +import pytest + +from dbt.tests.util import run_dbt + +model_one_sql = """ +select 1 as fun +""" + +schema_sql = """ +sources: + - name: my_source + description: "My source" + schema: test_schema + tables: + - name: my_table + - name: my_other_table + +exposures: + - name: weekly_jaffle_metrics + label: By the Week + type: dashboard + maturity: high + url: https://bi.tool/dashboards/1 + description: > + Did someone say "exponential growth"? + depends_on: + - ref('model_one') + owner: + name: dbt Labs + email: data@jaffleshop.com +""" + + +class TestResourceType: + @pytest.fixture(scope="class") + def models(self): + return {"schema.yml": schema_sql, "model_one.sql": model_one_sql} + + def test_resource_type_single(self, project): + result = run_dbt(["-q", "ls", "--resource-types", "model"]) + assert len(result) == 1 + assert result == ["test.model_one"] + + def test_resource_type_quoted(self, project): + result = run_dbt(["-q", "ls", "--resource-types", "model source"]) + assert len(result) == 3 + expected_result = { + "test.model_one", + "source:test.my_source.my_table", + "source:test.my_source.my_other_table", + } + assert set(result) == expected_result + + def test_resource_type_args(self, project): + result = run_dbt( + [ + "-q", + "ls", + "--resource-type", + "model", + "--resource-type", + "source", + "--resource-type", + "exposure", + ] + ) + assert len(result) == 4 + expected_result = { + "test.model_one", + "source:test.my_source.my_table", + "source:test.my_source.my_other_table", + "exposure:test.weekly_jaffle_metrics", + } + assert set(result) == expected_result + + +class TestOutputKeys: + @pytest.fixture(scope="class") + def models(self): + return {"model_one.sql": model_one_sql} + + def test_output_key_single(self, project): + result = run_dbt(["-q", "ls", "--output", "json", "--output-keys", "name"]) + assert len(result) == 1 + assert result == ['{"name": "model_one"}'] + + def test_output_key_quoted(self, project): + result = run_dbt(["-q", "ls", "--output", "json", "--output-keys", "name resource_type"]) + + assert len(result) == 1 + assert result == ['{"name": "model_one", "resource_type": "model"}'] + + def test_output_key_args(self, project): + result = run_dbt( + [ + "-q", + "ls", + "--output", + "json", + "--output-keys", + "name", + "--output-keys", + "resource_type", + ] + ) + + assert len(result) == 1 + assert result == ['{"name": "model_one", "resource_type": "model"}'] + + +class TestSelectExclude: + @pytest.fixture(scope="class") + def models(self): + return { + "model_one.sql": model_one_sql, + "model_two.sql": model_one_sql, + "model_three.sql": model_one_sql, + } + + def test_select_exclude_single(self, project): + result = run_dbt(["-q", "ls", "--select", "model_one"]) + assert len(result) == 1 + assert result == ["test.model_one"] + result = run_dbt(["-q", "ls", "--exclude", "model_one"]) + assert len(result) == 2 + assert "test.model_one" not in result + + def test_select_exclude_quoted(self, project): + result = run_dbt(["-q", "ls", "--select", "model_one model_two"]) + assert len(result) == 2 + assert "test.model_three" not in result + result = run_dbt(["-q", "ls", "--exclude", "model_one model_two"]) + assert len(result) == 1 + assert result == ["test.model_three"] + + def test_select_exclude_args(self, project): + result = run_dbt(["-q", "ls", "--select", "model_one", "--select", "model_two"]) + assert len(result) == 2 + assert "test.model_three" not in result + result = run_dbt(["-q", "ls", "--exclude", "model_one", "--exclude", "model_two"]) + assert len(result) == 1 + assert result == ["test.model_three"] diff --git a/tests/functional/cli/test_resolvers.py b/tests/functional/cli/test_resolvers.py index 2ce8e17ceba..e25b6651aa5 100644 --- a/tests/functional/cli/test_resolvers.py +++ b/tests/functional/cli/test_resolvers.py @@ -1,6 +1,8 @@ +from pathlib import Path + import pytest + from dbt.cli.resolvers import default_log_path -from pathlib import Path class TestDefaultLogPathNoProject: diff --git a/tests/functional/colors/test_colors.py b/tests/functional/colors/test_colors.py index f42591c2b6a..3f731108d18 100644 --- a/tests/functional/colors/test_colors.py +++ b/tests/functional/colors/test_colors.py @@ -1,7 +1,8 @@ -import pytest import re -from dbt.tests.util import run_dbt_and_capture +import pytest + +from dbt.tests.util import run_dbt_and_capture models__do_nothing_then_fail_sql = """ select 1, diff --git a/tests/functional/compile/fixtures.py b/tests/functional/compile/fixtures.py index fff9a11bfcb..97ccd6d16b6 100644 --- a/tests/functional/compile/fixtures.py +++ b/tests/functional/compile/fixtures.py @@ -42,6 +42,15 @@ select sum(n) from t; """ +first_ephemeral_model_with_alias_sql = """ +{{ config(materialized = 'ephemeral', alias = 'first_alias') }} +select 1 as fun +""" + +second_ephemeral_model_with_alias_sql = """ +select * from {{ ref('first_ephemeral_model_with_alias') }} +""" + schema_yml = """ version: 2 @@ -50,9 +59,9 @@ description: "The second model" columns: - name: fun - tests: + data_tests: - not_null - name: schema - tests: + data_tests: - unique """ diff --git a/tests/functional/compile/test_compile.py b/tests/functional/compile/test_compile.py index 9f21c8d4538..956169eddf1 100644 --- a/tests/functional/compile/test_compile.py +++ b/tests/functional/compile/test_compile.py @@ -1,20 +1,24 @@ import json import pathlib -import pytest import re -from dbt.cli.main import dbtRunner -from dbt.exceptions import DbtRuntimeError, Exception as DbtException -from dbt.tests.util import run_dbt, run_dbt_and_capture, read_file +import pytest + +from dbt.tests.util import read_file, run_dbt, run_dbt_and_capture +from dbt_common.exceptions import DbtBaseException as DbtException +from dbt_common.exceptions import DbtRuntimeError +from tests.functional.assertions.test_runner import dbtTestRunner from tests.functional.compile.fixtures import ( - first_model_sql, - second_model_sql, first_ephemeral_model_sql, + first_ephemeral_model_with_alias_sql, + first_model_sql, + model_multiline_jinja, + schema_yml, second_ephemeral_model_sql, + second_ephemeral_model_with_alias_sql, + second_model_sql, third_ephemeral_model_sql, with_recursive_model_sql, - schema_yml, - model_multiline_jinja, ) @@ -49,9 +53,8 @@ def test_default(self, project): assert get_lines("first_model") == ["select 1 as fun"] assert any("_test_compile as schema" in line for line in get_lines("second_model")) - @pytest.mark.skip("Investigate flaky test #7179") def test_no_introspect(self, project): - with pytest.raises(DbtRuntimeError): + with pytest.raises(DbtRuntimeError, match="connection never acquired for thread"): run_dbt(["compile", "--no-introspect"]) @@ -127,6 +130,24 @@ def test_with_recursive_cte(self, project): ] +class TestEphemeralModelWithAlias: + @pytest.fixture(scope="class") + def models(self): + return { + "first_ephemeral_model_with_alias.sql": first_ephemeral_model_with_alias_sql, + "second_ephemeral_model_with_alias.sql": second_ephemeral_model_with_alias_sql, + } + + def test_compile(self, project): + run_dbt(["compile"]) + + assert get_lines("second_ephemeral_model_with_alias") == [ + "with __dbt__cte__first_alias as (", + "select 1 as fun", + ") select * from __dbt__cte__first_alias", + ] + + class TestCompile: @pytest.fixture(scope="class") def models(self): @@ -190,11 +211,11 @@ def test_output_json_inline(self, project): assert '"compiled"' in log_output def test_compile_inline_not_add_node(self, project): - dbt = dbtRunner() + dbt = dbtTestRunner() parse_result = dbt.invoke(["parse"]) manifest = parse_result.result assert len(manifest.nodes) == 4 - dbt = dbtRunner(manifest=manifest) + dbt = dbtTestRunner(manifest=manifest) dbt.invoke( ["compile", "--inline", "select * from {{ ref('second_model') }}"], populate_cache=False, @@ -219,7 +240,7 @@ def test_graph_summary_output(self, project): """Ensure that the compile command generates a file named graph_summary.json in the target directory, that the file contains valid json, and that the json has the high level structure it should.""" - dbtRunner().invoke(["compile"]) + dbtTestRunner().invoke(["compile"]) summary_path = pathlib.Path(project.project_root, "target/graph_summary.json") with open(summary_path, "r") as summary_file: summary = json.load(summary_file) diff --git a/tests/functional/configs/fixtures.py b/tests/functional/configs/fixtures.py index 20ffa78846b..63490289528 100644 --- a/tests/functional/configs/fixtures.py +++ b/tests/functional/configs/fixtures.py @@ -11,7 +11,7 @@ identifier: "{{ var('seed_name', 'invalid') }}" columns: - name: id - tests: + data_tests: - unique: enabled: "{{ var('enabled_direct', None) | as_native }}" - accepted_values: @@ -23,7 +23,7 @@ - name: model columns: - name: id - tests: + data_tests: - unique - accepted_values: values: [1,2,3,4] diff --git a/tests/functional/configs/test_configs.py b/tests/functional/configs/test_configs.py index 49a3222910a..2bbfac85c5c 100644 --- a/tests/functional/configs/test_configs.py +++ b/tests/functional/configs/test_configs.py @@ -1,9 +1,14 @@ -from hologram import ValidationError -import pytest import os -from dbt.exceptions import ParsingError -from dbt.tests.util import run_dbt, update_config_file, write_file, check_relations_equal +import pytest + +from dbt.tests.util import ( + check_relations_equal, + run_dbt, + update_config_file, + write_file, +) +from dbt_common.dataclass_schema import ValidationError from tests.functional.configs.fixtures import BaseConfigProject, simple_snapshot @@ -71,7 +76,7 @@ def test_alternative_target_paths(self, project): class TestInvalidTestsMaterializationProj(object): def test_tests_materialization_proj_config(self, project): - config_patch = {"tests": {"materialized": "table"}} + config_patch = {"data_tests": {"materialized": "table"}} update_config_file(config_patch, project.project_root, "dbt_project.yml") tests_dir = os.path.join(project.project_root, "tests") write_file("select * from foo", tests_dir, "test.sql") @@ -114,7 +119,7 @@ def test_snapshots_materialization_proj_config(self, project): snapshots_dir = os.path.join(project.project_root, "snapshots") write_file(simple_snapshot, snapshots_dir, "mysnapshot.sql") - with pytest.raises(ParsingError): + with pytest.raises(ValidationError): run_dbt() diff --git a/tests/functional/configs/test_configs_in_schema_files.py b/tests/functional/configs/test_configs_in_schema_files.py index a04b9ed43aa..e0b85686895 100644 --- a/tests/functional/configs/test_configs_in_schema_files.py +++ b/tests/functional/configs/test_configs_in_schema_files.py @@ -1,8 +1,7 @@ import pytest -from dbt.tests.util import run_dbt, get_manifest, check_relations_equal, write_file - from dbt.exceptions import CompilationError, ParsingError +from dbt.tests.util import check_relations_equal, get_manifest, run_dbt, write_file models_alt__schema_yml = """ version: 2 @@ -27,7 +26,7 @@ columns: - name: id - tests: + data_tests: - not_null: meta: owner: 'Simple Simon' @@ -102,7 +101,7 @@ models: - name: untagged description: "This is a model description" - tests: + data_tests: - not_null: error_if: ">2" config: @@ -250,3 +249,23 @@ def test_config_layering( write_file(extra_alt__untagged2_yml, project.project_root, "models", "untagged.yml") with pytest.raises(CompilationError): run_dbt(["run"]) + + +list_schema_yml = """ +- name: my_name +- name: alt_name +""" + + +class TestListSchemaFile: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": "select 1 as id", + "schema.yml": list_schema_yml, + } + + def test_list_schema(self, project): + with pytest.raises(ParsingError) as excinfo: + run_dbt(["run"]) + assert "Dictionary expected" in str(excinfo.value) diff --git a/tests/functional/configs/test_contract_configs.py b/tests/functional/configs/test_contract_configs.py index 092fcd51824..179b0058a8d 100644 --- a/tests/functional/configs/test_contract_configs.py +++ b/tests/functional/configs/test_contract_configs.py @@ -1,7 +1,15 @@ -import pytest import os + +import pytest + from dbt.exceptions import ParsingError, ValidationError -from dbt.tests.util import run_dbt, get_manifest, get_artifact, run_dbt_and_capture, write_file +from dbt.tests.util import ( + get_artifact, + get_manifest, + run_dbt, + run_dbt_and_capture, + write_file, +) my_model_sql = """ {{ @@ -85,7 +93,6 @@ def model(dbt, _): """ model_schema_yml = """ -version: 2 models: - name: my_model config: @@ -101,16 +108,93 @@ def model(dbt, _): - type: primary_key - type: check expression: (id > 0) - tests: + data_tests: - unique - name: color - data_type: text + data_type: string + - name: date_day + data_type: date +""" + +model_pk_model_column_schema_yml = """ +models: + - name: my_model + config: + contract: + enforced: true + constraints: + - type: primary_key + columns: [id] + columns: + - name: id + data_type: integer + description: hello + constraints: + - type: not_null + - type: primary_key + - type: check + expression: (id > 0) + data_tests: + - unique + - name: color + data_type: string + - name: date_day + data_type: date +""" + +model_pk_mult_column_schema_yml = """ +models: + - name: my_model + config: + contract: + enforced: true + columns: + - name: id + quote: true + data_type: integer + description: hello + constraints: + - type: not_null + - type: primary_key + - type: check + expression: (id > 0) + data_tests: + - unique + - name: color + data_type: string + constraints: + - type: not_null + - type: primary_key + - name: date_day + data_type: date +""" + +model_schema_alias_types_false_yml = """ +models: + - name: my_model + config: + contract: + enforced: true + alias_types: false + columns: + - name: id + quote: true + data_type: integer + description: hello + constraints: + - type: not_null + - type: primary_key + - type: check + expression: (id > 0) + data_tests: + - unique + - name: color + data_type: string - name: date_day data_type: date """ model_schema_ignore_unsupported_yml = """ -version: 2 models: - name: my_model config: @@ -129,7 +213,7 @@ def model(dbt, _): - type: check warn_unsupported: False expression: (id > 0) - tests: + data_tests: - unique - name: color data_type: text @@ -138,7 +222,6 @@ def model(dbt, _): """ model_schema_errors_yml = """ -version: 2 models: - name: my_model config: @@ -153,7 +236,7 @@ def model(dbt, _): - type: primary_key - type: check expression: (id > 0) - tests: + data_tests: - unique - name: color data_type: text @@ -171,7 +254,7 @@ def model(dbt, _): - type: primary_key - type: check expression: (id > 0) - tests: + data_tests: - unique - name: color data_type: text @@ -180,7 +263,6 @@ def model(dbt, _): """ model_schema_blank_yml = """ -version: 2 models: - name: my_model config: @@ -189,7 +271,6 @@ def model(dbt, _): """ model_schema_complete_datatypes_yml = """ -version: 2 models: - name: my_model columns: @@ -202,7 +283,7 @@ def model(dbt, _): - type: primary_key - type: check expression: (id > 0) - tests: + data_tests: - unique - name: color data_type: text @@ -211,7 +292,6 @@ def model(dbt, _): """ model_schema_incomplete_datatypes_yml = """ -version: 2 models: - name: my_model columns: @@ -224,7 +304,7 @@ def model(dbt, _): - type: primary_key - type: check expression: (id > 0) - tests: + data_tests: - unique - name: color - name: date_day @@ -251,7 +331,7 @@ def test__model_contract_true(self, project): assert contract_actual_config.enforced is True - expected_columns = "{'id': ColumnInfo(name='id', description='hello', meta={}, data_type='integer', constraints=[ColumnLevelConstraint(type=, name=None, expression=None, warn_unenforced=True, warn_unsupported=True), ColumnLevelConstraint(type=, name=None, expression=None, warn_unenforced=True, warn_unsupported=True), ColumnLevelConstraint(type=, name=None, expression='(id > 0)', warn_unenforced=True, warn_unsupported=True)], quote=True, tags=[], _extra={}), 'color': ColumnInfo(name='color', description='', meta={}, data_type='text', constraints=[], quote=None, tags=[], _extra={}), 'date_day': ColumnInfo(name='date_day', description='', meta={}, data_type='date', constraints=[], quote=None, tags=[], _extra={})}" + expected_columns = "{'id': ColumnInfo(name='id', description='hello', meta={}, data_type='integer', constraints=[ColumnLevelConstraint(type=, name=None, expression=None, warn_unenforced=True, warn_unsupported=True, to=None, to_columns=[]), ColumnLevelConstraint(type=, name=None, expression=None, warn_unenforced=True, warn_unsupported=True, to=None, to_columns=[]), ColumnLevelConstraint(type=, name=None, expression='(id > 0)', warn_unenforced=True, warn_unsupported=True, to=None, to_columns=[])], quote=True, tags=[], _extra={}, granularity=None), 'color': ColumnInfo(name='color', description='', meta={}, data_type='string', constraints=[], quote=None, tags=[], _extra={}, granularity=None), 'date_day': ColumnInfo(name='date_day', description='', meta={}, data_type='date', constraints=[], quote=None, tags=[], _extra={}, granularity=None)}" assert expected_columns == str(my_model_columns) @@ -264,6 +344,15 @@ def test__model_contract_true(self, project): == cleaned_code ) + # set alias_types to false (should fail to compile) + write_file( + model_schema_alias_types_false_yml, + project.project_root, + "models", + "constraints_schema.yml", + ) + run_dbt(["run"], expect_pass=False) + class TestProjectContractEnabledConfigs: @pytest.fixture(scope="class") @@ -486,3 +575,35 @@ def test__missing_column_contract_error(self, project): "This model has an enforced contract, and its 'columns' specification is missing" ) assert expected_error in results[0].message + + +# test primary key defined across model and column level constraints, expect error +class TestPrimaryKeysModelAndColumnLevelConstraints: + @pytest.fixture(scope="class") + def models(self): + return { + "constraints_schema.yml": model_pk_model_column_schema_yml, + "my_model.sql": my_model_sql, + } + + def test_model_column_pk_error(self, project): + expected_error = "Primary key constraints defined at the model level and the columns level" + with pytest.raises(ParsingError) as exc_info: + run_dbt(["run"]) + assert expected_error in str(exc_info.value) + + +# test primary key defined across multiple columns, expect error +class TestPrimaryKeysMultipleColumns: + @pytest.fixture(scope="class") + def models(self): + return { + "constraints_schema.yml": model_pk_mult_column_schema_yml, + "my_model.sql": my_model_sql, + } + + def test_pk_multiple_columns(self, project): + expected_error = "Found 2 columns (['id', 'color']) with primary key constraints defined" + with pytest.raises(ParsingError) as exc_info: + run_dbt(["run"]) + assert expected_error in str(exc_info.value) diff --git a/tests/functional/configs/test_custom_node_colors_configs.py b/tests/functional/configs/test_custom_node_colors_configs.py index a5c56b3ee0f..7772e3d44ca 100644 --- a/tests/functional/configs/test_custom_node_colors_configs.py +++ b/tests/functional/configs/test_custom_node_colors_configs.py @@ -1,8 +1,8 @@ import pytest -from dbt.tests.util import run_dbt, get_manifest - -from hologram import ValidationError +from dbt.exceptions import ConfigUpdateError +from dbt.tests.util import get_manifest, run_dbt +from dbt_common.dataclass_schema import ValidationError CUSTOM_NODE_COLOR_MODEL_LEVEL = "red" CUSTOM_NODE_COLOR_SCHEMA_LEVEL = "blue" @@ -305,7 +305,7 @@ def test__invalid_color_config_block( self, project, ): - with pytest.raises(ValidationError): + with pytest.raises((ValidationError, ConfigUpdateError)): run_dbt(["compile"]) diff --git a/tests/functional/configs/test_disabled_configs.py b/tests/functional/configs/test_disabled_configs.py index ee56a39a867..f0176788777 100644 --- a/tests/functional/configs/test_disabled_configs.py +++ b/tests/functional/configs/test_disabled_configs.py @@ -1,7 +1,6 @@ import pytest from dbt.tests.util import run_dbt - from tests.functional.configs.fixtures import BaseConfigProject @@ -9,7 +8,6 @@ class TestDisabledConfigs(BaseConfigProject): @pytest.fixture(scope="class") def dbt_profile_data(self, unique_schema): return { - "config": {"send_anonymous_usage_stats": False}, "test": { "outputs": { "default": { @@ -62,7 +60,7 @@ def project_config_update(self): }, }, }, - "tests": { + "data_tests": { "test": { "enabled": "{{ (target.name == 'default') | as_bool }}", "severity": "WARN", @@ -90,3 +88,47 @@ def test_conditional_model(self, project): assert len(results) == 2 results = run_dbt(["test"]) assert len(results) == 5 + + +my_analysis_sql = """ +{{ + config(enabled=False) +}} +select 1 as id +""" + + +schema_yml = """ +models: + - name: my_analysis + description: "A Sample model" + config: + meta: + owner: Joe + +analyses: + - name: my_analysis + description: "A sample analysis" + config: + enabled: false +""" + + +class TestDisabledConfigsSameName: + @pytest.fixture(scope="class") + def models(self): + return { + "my_analysis.sql": my_analysis_sql, + "schema.yml": schema_yml, + } + + @pytest.fixture(scope="class") + def analyses(self): + return { + "my_analysis.sql": my_analysis_sql, + } + + def test_disabled_analysis(self, project): + manifest = run_dbt(["parse"]) + assert len(manifest.disabled) == 2 + assert len(manifest.nodes) == 0 diff --git a/tests/functional/configs/test_disabled_model.py b/tests/functional/configs/test_disabled_model.py index 4b6e74adffd..23cf8fde1e0 100644 --- a/tests/functional/configs/test_disabled_model.py +++ b/tests/functional/configs/test_disabled_model.py @@ -1,22 +1,21 @@ import pytest -from hologram import ValidationError -from dbt.tests.util import run_dbt, get_manifest from dbt.exceptions import CompilationError, ParsingError - +from dbt.tests.util import get_manifest, run_dbt +from dbt_common.dataclass_schema import ValidationError from tests.functional.configs.fixtures import ( - schema_all_disabled_yml, - schema_partial_enabled_yml, - schema_partial_disabled_yml, - schema_explicit_enabled_yml, - schema_invalid_enabled_yml, my_model, my_model_2, - my_model_2_enabled, my_model_2_disabled, + my_model_2_enabled, my_model_3, my_model_3_disabled, my_model_3_enabled, + schema_all_disabled_yml, + schema_explicit_enabled_yml, + schema_invalid_enabled_yml, + schema_partial_disabled_yml, + schema_partial_enabled_yml, ) diff --git a/tests/functional/configs/test_dupe_paths.py b/tests/functional/configs/test_dupe_paths.py index 95b76f5858f..7a12385a526 100644 --- a/tests/functional/configs/test_dupe_paths.py +++ b/tests/functional/configs/test_dupe_paths.py @@ -2,7 +2,6 @@ from dbt.tests.util import run_dbt - my_model_sql = """ select 1 as fun """ diff --git a/tests/functional/configs/test_get_default.py b/tests/functional/configs/test_get_default.py index 597e88c6d65..245ce4c3242 100644 --- a/tests/functional/configs/test_get_default.py +++ b/tests/functional/configs/test_get_default.py @@ -2,7 +2,6 @@ from dbt.tests.util import run_dbt - models_get__any_model_sql = """ -- models/any_model.sql select {{ config.get('made_up_nonexistent_key', 'default_value') }} as col_value diff --git a/tests/functional/configs/test_grant_configs.py b/tests/functional/configs/test_grant_configs.py index 64d3f48d4ca..8b1a4e40126 100644 --- a/tests/functional/configs/test_grant_configs.py +++ b/tests/functional/configs/test_grant_configs.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, get_manifest, write_file, write_config_file +from dbt.tests.util import get_manifest, run_dbt, write_config_file, write_file dbt_project_yml = """ models: diff --git a/tests/functional/configs/test_indiv_tests.py b/tests/functional/configs/test_indiv_tests.py index 1da1652de27..707d7f66320 100644 --- a/tests/functional/configs/test_indiv_tests.py +++ b/tests/functional/configs/test_indiv_tests.py @@ -1,7 +1,6 @@ import pytest from dbt.tests.util import run_dbt - from tests.functional.configs.fixtures import BaseConfigProject @@ -17,7 +16,7 @@ def project_config_update(self): "seed_name": "seed", } }, - "tests": {"test": {"enabled": True, "severity": "WARN"}}, + "data_tests": {"test": {"enabled": True, "severity": "WARN"}}, } def test_configuring_individual_tests( diff --git a/tests/functional/configs/test_unused_configs.py b/tests/functional/configs/test_unused_configs.py index 1bc887b03f1..62b0fc6b3f0 100644 --- a/tests/functional/configs/test_unused_configs.py +++ b/tests/functional/configs/test_unused_configs.py @@ -1,7 +1,7 @@ import pytest -from dbt.tests.util import run_dbt from dbt.exceptions import CompilationError +from dbt.tests.util import run_dbt seeds__seed_csv = """id,value 4,2 @@ -30,7 +30,7 @@ def project_config_update(self): "enabled": True, } }, - "tests": { + "data_tests": { "test": { "enabled": True, } diff --git a/tests/functional/configs/test_versioned_model_constraint.py b/tests/functional/configs/test_versioned_model_constraint.py new file mode 100644 index 00000000000..3776585b573 --- /dev/null +++ b/tests/functional/configs/test_versioned_model_constraint.py @@ -0,0 +1,226 @@ +import pytest + +from dbt.exceptions import ParsingError +from dbt.tests.util import get_manifest, rm_file, run_dbt, write_file + +schema_yml = """ +models: + - name: foo + config: + materialized: table + contract: + enforced: true + constraints: + - type: primary_key + columns: [id, user_name] + columns: + - name: id + data_type: int + constraints: + - type: not_null + - name: user_name + data_type: text +""" + +foo_sql = """ +select 1 as id, 'alice' as user_name +""" + +foo_v2_sql = """ +select 1 as id, 'alice' as user_name, 2 as another_pk +""" + +versioned_schema_yml = """ +models: + - name: foo + latest_version: 1 + config: + materialized: table + contract: + enforced: true + constraints: + - type: primary_key + columns: [id, user_name] + columns: + - name: id + data_type: int + constraints: + - type: not_null + - name: user_name + data_type: text + versions: + - v: 1 +""" + +versioned_pk_model_column_schema_yml = """ +models: + - name: foo + latest_version: 2 + config: + materialized: table + contract: + enforced: true + constraints: + - type: primary_key + columns: [id] + columns: + - name: id + data_type: int + constraints: + - type: not_null + - name: user_name + data_type: text + versions: + - v: 1 + - v: 2 + columns: + - name: id + data_type: int + constraints: + - type: not_null + - type: primary_key + - name: user_name + data_type: text +""" + +versioned_pk_mult_columns_schema_yml = """ +models: + - name: foo + latest_version: 2 + config: + materialized: table + contract: + enforced: true + columns: + - name: id + data_type: int + constraints: + - type: not_null + - type: primary_key + - name: user_name + data_type: text + versions: + - v: 1 + - v: 2 + columns: + - name: id + data_type: int + constraints: + - type: not_null + - type: primary_key + - name: user_name + data_type: text + constraints: + - type: primary_key + +""" + + +class TestVersionedModelConstraints: + @pytest.fixture(scope="class") + def models(self): + return { + "foo.sql": foo_sql, + "schema.yml": schema_yml, + } + + def test_versioned_model_constraints(self, project): + results = run_dbt(["run"]) + assert len(results) == 1 + manifest = get_manifest(project.project_root) + model_node = manifest.nodes["model.test.foo"] + assert len(model_node.constraints) == 1 + + # remove foo.sql and create foo_v1.sql + rm_file(project.project_root, "models", "foo.sql") + write_file(foo_sql, project.project_root, "models", "foo_v1.sql") + write_file(versioned_schema_yml, project.project_root, "models", "schema.yml") + results = run_dbt(["run"]) + assert len(results) == 1 + + manifest = get_manifest(project.project_root) + model_node = manifest.nodes["model.test.foo.v1"] + assert model_node.contract.enforced is True + assert len(model_node.constraints) == 1 + + +# test primary key defined across model and column level constraints, expect error +class TestPrimaryKeysModelAndColumnLevelConstraints: + @pytest.fixture(scope="class") + def models(self): + return { + "foo.sql": foo_sql, + "schema.yml": schema_yml, + } + + def test_model_column_pk_error(self, project): + results = run_dbt(["run"]) + assert len(results) == 1 + manifest = get_manifest(project.project_root) + model_node = manifest.nodes["model.test.foo"] + assert len(model_node.constraints) == 1 + + # remove foo.sql and create foo_v1.sql + rm_file(project.project_root, "models", "foo.sql") + write_file(foo_sql, project.project_root, "models", "foo_v1.sql") + write_file(versioned_schema_yml, project.project_root, "models", "schema.yml") + results = run_dbt(["run"]) + assert len(results) == 1 + + manifest = get_manifest(project.project_root) + model_node = manifest.nodes["model.test.foo.v1"] + assert model_node.contract.enforced is True + assert len(model_node.constraints) == 1 + + # add foo_v2.sql + write_file(foo_sql, project.project_root, "models", "foo_v2.sql") + write_file( + versioned_pk_model_column_schema_yml, project.project_root, "models", "schema.yml" + ) + + expected_error = "Primary key constraints defined at the model level and the columns level" + with pytest.raises(ParsingError) as exc_info: + run_dbt(["run"]) + assert expected_error in str(exc_info.value) + + +# test primary key defined across multiple columns, expect error +class TestPrimaryKeysMultipleColumns: + @pytest.fixture(scope="class") + def models(self): + return { + "foo.sql": foo_sql, + "schema.yml": schema_yml, + } + + def test_pk_multiple_columns(self, project): + results = run_dbt(["run"]) + assert len(results) == 1 + manifest = get_manifest(project.project_root) + model_node = manifest.nodes["model.test.foo"] + assert len(model_node.constraints) == 1 + + # remove foo.sql and create foo_v1.sql + rm_file(project.project_root, "models", "foo.sql") + write_file(foo_sql, project.project_root, "models", "foo_v1.sql") + write_file(versioned_schema_yml, project.project_root, "models", "schema.yml") + results = run_dbt(["run"]) + assert len(results) == 1 + + manifest = get_manifest(project.project_root) + model_node = manifest.nodes["model.test.foo.v1"] + assert model_node.contract.enforced is True + assert len(model_node.constraints) == 1 + + # add foo_v2.sql + write_file(foo_sql, project.project_root, "models", "foo_v2.sql") + write_file( + versioned_pk_mult_columns_schema_yml, project.project_root, "models", "schema.yml" + ) + + expected_error = ( + "Found 2 columns (['id', 'user_name']) with primary key constraints defined" + ) + with pytest.raises(ParsingError) as exc_info: + run_dbt(["run"]) + assert expected_error in str(exc_info.value) diff --git a/tests/functional/configs/test_warn_error_options.py b/tests/functional/configs/test_warn_error_options.py new file mode 100644 index 00000000000..c35ad5a9881 --- /dev/null +++ b/tests/functional/configs/test_warn_error_options.py @@ -0,0 +1,231 @@ +from typing import Dict, Union + +import pytest + +from dbt.cli.main import dbtRunner, dbtRunnerResult +from dbt.events.types import DeprecatedModel +from dbt.flags import get_flags +from dbt.tests.util import run_dbt, update_config_file +from dbt_common.events.base_types import EventLevel +from tests.utils import EventCatcher + +ModelsDictSpec = Dict[str, Union[str, "ModelsDictSpec"]] + +my_model_sql = """SELECT 1 AS id, 'cats are cute' AS description""" +schema_yml = """ +version: 2 +models: + - name: my_model + deprecation_date: 2020-01-01 +""" + + +@pytest.fixture(scope="class") +def models() -> ModelsDictSpec: + return {"my_model.sql": my_model_sql, "schema.yml": schema_yml} + + +@pytest.fixture(scope="function") +def catcher() -> EventCatcher: + return EventCatcher(event_to_catch=DeprecatedModel) + + +@pytest.fixture(scope="function") +def runner(catcher: EventCatcher) -> dbtRunner: + return dbtRunner(callbacks=[catcher.catch]) + + +def assert_deprecation_warning(result: dbtRunnerResult, catcher: EventCatcher) -> None: + assert result.success + assert result.exception is None + assert len(catcher.caught_events) == 1 + assert catcher.caught_events[0].info.level == EventLevel.WARN.value + + +def assert_deprecation_error(result: dbtRunnerResult) -> None: + assert not result.success + assert result.exception is not None + assert "Model my_model has passed its deprecation date of" in str(result.exception) + + +class TestWarnErrorOptionsFromCLI: + def test_can_silence(self, project, catcher: EventCatcher, runner: dbtRunner) -> None: + result = runner.invoke(["run"]) + assert_deprecation_warning(result, catcher) + + catcher.flush() + result = runner.invoke(["run", "--warn-error-options", "{'silence': ['DeprecatedModel']}"]) + assert result.success + assert len(catcher.caught_events) == 0 + + def test_can_raise_warning_to_error( + self, project, catcher: EventCatcher, runner: dbtRunner + ) -> None: + result = runner.invoke(["run"]) + assert_deprecation_warning(result, catcher) + + catcher.flush() + result = runner.invoke(["run", "--warn-error-options", "{'include': ['DeprecatedModel']}"]) + assert_deprecation_error(result) + + catcher.flush() + result = runner.invoke(["run", "--warn-error-options", "{'include': 'all'}"]) + assert_deprecation_error(result) + + catcher.flush() + result = runner.invoke(["run", "--warn-error-options", "{'error': ['DeprecatedModel']}"]) + assert_deprecation_error(result) + + catcher.flush() + result = runner.invoke(["run", "--warn-error-options", "{'error': 'all'}"]) + assert_deprecation_error(result) + + def test_can_exclude_specific_event( + self, project, catcher: EventCatcher, runner: dbtRunner + ) -> None: + result = runner.invoke(["run", "--warn-error-options", "{'include': 'all'}"]) + assert_deprecation_error(result) + + catcher.flush() + result = runner.invoke( + ["run", "--warn-error-options", "{'include': 'all', 'exclude': ['DeprecatedModel']}"] + ) + assert_deprecation_warning(result, catcher) + + catcher.flush() + result = runner.invoke( + ["run", "--warn-error-options", "{'include': 'all', 'warn': ['DeprecatedModel']}"] + ) + assert_deprecation_warning(result, catcher) + + def test_cant_set_both_include_and_error(self, project, runner: dbtRunner) -> None: + result = runner.invoke( + ["run", "--warn-error-options", "{'include': 'all', 'error': 'all'}"] + ) + assert not result.success + assert result.exception is not None + assert "Only `error` or `include` can be specified" in str(result.exception) + + def test_cant_set_both_exclude_and_warn(self, project, runner: dbtRunner) -> None: + result = runner.invoke( + [ + "run", + "--warn-error-options", + "{'include': 'all', 'exclude': ['DeprecatedModel'], 'warn': ['DeprecatedModel']}", + ] + ) + assert not result.success + assert result.exception is not None + assert "Only `warn` or `exclude` can be specified" in str(result.exception) + + +class TestWarnErrorOptionsFromProject: + @pytest.fixture(scope="function") + def clear_project_flags(self, project_root) -> None: + flags = {"flags": {}} + update_config_file(flags, project_root, "dbt_project.yml") + + def test_can_silence( + self, project, clear_project_flags, project_root, catcher: EventCatcher, runner: dbtRunner + ) -> None: + result = runner.invoke(["run"]) + assert_deprecation_warning(result, catcher) + + silence_options = {"flags": {"warn_error_options": {"silence": ["DeprecatedModel"]}}} + update_config_file(silence_options, project_root, "dbt_project.yml") + + catcher.flush() + result = runner.invoke(["run"]) + assert result.success + assert len(catcher.caught_events) == 0 + + def test_can_raise_warning_to_error( + self, project, clear_project_flags, project_root, catcher: EventCatcher, runner: dbtRunner + ) -> None: + result = runner.invoke(["run"]) + assert_deprecation_warning(result, catcher) + + include_options = {"flags": {"warn_error_options": {"include": ["DeprecatedModel"]}}} + update_config_file(include_options, project_root, "dbt_project.yml") + + catcher.flush() + result = runner.invoke(["run"]) + assert_deprecation_error(result) + + include_options = {"flags": {"warn_error_options": {"include": "all"}}} + update_config_file(include_options, project_root, "dbt_project.yml") + + catcher.flush() + result = runner.invoke(["run"]) + assert_deprecation_error(result) + + def test_can_exclude_specific_event( + self, project, clear_project_flags, project_root, catcher: EventCatcher, runner: dbtRunner + ) -> None: + include_options = {"flags": {"warn_error_options": {"include": "all"}}} + update_config_file(include_options, project_root, "dbt_project.yml") + result = runner.invoke(["run"]) + assert_deprecation_error(result) + + exclude_options = { + "flags": {"warn_error_options": {"include": "all", "exclude": ["DeprecatedModel"]}} + } + update_config_file(exclude_options, project_root, "dbt_project.yml") + + catcher.flush() + result = runner.invoke(["run"]) + assert_deprecation_warning(result, catcher) + + def test_cant_set_both_include_and_error( + self, project, clear_project_flags, project_root, runner: dbtRunner + ) -> None: + exclude_options = {"flags": {"warn_error_options": {"include": "all", "error": "all"}}} + update_config_file(exclude_options, project_root, "dbt_project.yml") + result = runner.invoke(["run"]) + assert not result.success + assert result.exception is not None + assert "Only `error` or `include` can be specified" in str(result.exception) + + def test_cant_set_both_exclude_and_warn( + self, project, clear_project_flags, project_root, runner: dbtRunner + ) -> None: + exclude_options = { + "flags": { + "warn_error_options": { + "include": "all", + "exclude": ["DeprecatedModel"], + "warn": ["DeprecatedModel"], + } + } + } + update_config_file(exclude_options, project_root, "dbt_project.yml") + result = runner.invoke(["run"]) + assert not result.success + assert result.exception is not None + assert "Only `warn` or `exclude` can be specified" in str(result.exception) + + +class TestEmptyWarnError: + @pytest.fixture(scope="class") + def models(self): + return {"my_model.sql": my_model_sql, "schema.yml": schema_yml} + + # This tests for a bug in creating WarnErrorOptions when warn or + # error are set to None (in yaml = warn:) + def test_project_flags(self, project): + project_flags = { + "flags": { + "send_anonymous_usage_stats": False, + "warn_error_options": { + "warn": None, + "error": None, + "silence": ["TestsConfigDeprecation"], + }, + } + } + update_config_file(project_flags, project.project_root, "dbt_project.yml") + run_dbt(["run"]) + flags = get_flags() + # Note: WarnErrorOptions is not a dataclass, so you won't get "silence" + # from to_dict or stringifying. + assert flags.warn_error_options.silence == ["TestsConfigDeprecation"] diff --git a/tests/functional/conftest.py b/tests/functional/conftest.py new file mode 100644 index 00000000000..1e5ab4fa9f8 --- /dev/null +++ b/tests/functional/conftest.py @@ -0,0 +1,4 @@ +from tests.functional.fixtures.happy_path_fixture import ( # noqa:D + happy_path_project, + happy_path_project_files, +) diff --git a/tests/functional/constraints/fixtures.py b/tests/functional/constraints/fixtures.py new file mode 100644 index 00000000000..de60963bfec --- /dev/null +++ b/tests/functional/constraints/fixtures.py @@ -0,0 +1,115 @@ +model_foreign_key_model_schema_yml = """ +models: + - name: my_model + constraints: + - type: foreign_key + columns: [id] + to: ref('my_model_to') + to_columns: [id] + columns: + - name: id + data_type: integer +""" + + +model_foreign_key_source_schema_yml = """ +sources: + - name: test_source + tables: + - name: test_table + +models: + - name: my_model + constraints: + - type: foreign_key + columns: [id] + to: source('test_source', 'test_table') + to_columns: [id] + columns: + - name: id + data_type: integer +""" + + +model_foreign_key_model_node_not_found_schema_yml = """ +models: + - name: my_model + constraints: + - type: foreign_key + columns: [id] + to: ref('doesnt_exist') + to_columns: [id] + columns: + - name: id + data_type: integer +""" + + +model_foreign_key_model_invalid_syntax_schema_yml = """ +models: + - name: my_model + constraints: + - type: foreign_key + columns: [id] + to: invalid + to_columns: [id] + columns: + - name: id + data_type: integer +""" + + +model_foreign_key_model_column_schema_yml = """ +models: + - name: my_model + columns: + - name: id + data_type: integer + constraints: + - type: foreign_key + to: ref('my_model_to') + to_columns: [id] +""" + + +model_foreign_key_column_invalid_syntax_schema_yml = """ +models: + - name: my_model + columns: + - name: id + data_type: integer + constraints: + - type: foreign_key + to: invalid + to_columns: [id] +""" + + +model_foreign_key_column_node_not_found_schema_yml = """ +models: + - name: my_model + columns: + - name: id + data_type: integer + constraints: + - type: foreign_key + to: ref('doesnt_exist') + to_columns: [id] +""" + +model_column_level_foreign_key_source_schema_yml = """ +sources: + - name: test_source + tables: + - name: test_table + +models: + - name: my_model + columns: + - name: id + data_type: integer + constraints: + - type: foreign_key + to: source('test_source', 'test_table') + to_columns: [id] +""" diff --git a/tests/functional/constraints/test_foreign_key_constraints.py b/tests/functional/constraints/test_foreign_key_constraints.py new file mode 100644 index 00000000000..2c02cfe7ad7 --- /dev/null +++ b/tests/functional/constraints/test_foreign_key_constraints.py @@ -0,0 +1,241 @@ +import pytest + +from dbt.artifacts.resources import RefArgs +from dbt.exceptions import CompilationError, ParsingError +from dbt.tests.util import get_artifact, run_dbt +from dbt_common.contracts.constraints import ( + ColumnLevelConstraint, + ConstraintType, + ModelLevelConstraint, +) +from tests.functional.constraints.fixtures import ( + model_column_level_foreign_key_source_schema_yml, + model_foreign_key_column_invalid_syntax_schema_yml, + model_foreign_key_column_node_not_found_schema_yml, + model_foreign_key_model_column_schema_yml, + model_foreign_key_model_invalid_syntax_schema_yml, + model_foreign_key_model_node_not_found_schema_yml, + model_foreign_key_model_schema_yml, + model_foreign_key_source_schema_yml, +) + + +class TestModelLevelForeignKeyConstraintToRef: + @pytest.fixture(scope="class") + def models(self): + return { + "constraints_schema.yml": model_foreign_key_model_schema_yml, + "my_model.sql": "select 1 as id", + "my_model_to.sql": "select 1 as id", + } + + def test_model_level_fk_to(self, project, unique_schema): + manifest = run_dbt(["parse"]) + node_with_fk_constraint = manifest.nodes["model.test.my_model"] + assert len(node_with_fk_constraint.constraints) == 1 + + parsed_constraint = node_with_fk_constraint.constraints[0] + assert parsed_constraint == ModelLevelConstraint( + type=ConstraintType.foreign_key, + columns=["id"], + to="ref('my_model_to')", + to_columns=["id"], + ) + # Assert column-level constraint source included in node.depends_on + assert node_with_fk_constraint.refs == [RefArgs("my_model_to")] + assert node_with_fk_constraint.depends_on.nodes == ["model.test.my_model_to"] + assert node_with_fk_constraint.sources == [] + + # Assert compilation renders to from 'ref' to relation identifer + run_dbt(["compile"]) + manifest = get_artifact(project.project_root, "target", "manifest.json") + assert len(manifest["nodes"]["model.test.my_model"]["constraints"]) == 1 + + compiled_constraint = manifest["nodes"]["model.test.my_model"]["constraints"][0] + assert compiled_constraint["to"] == f'"dbt"."{unique_schema}"."my_model_to"' + # Other constraint fields should remain as parsed + assert compiled_constraint["to_columns"] == parsed_constraint.to_columns + assert compiled_constraint["columns"] == parsed_constraint.columns + assert compiled_constraint["type"] == parsed_constraint.type + + +class TestModelLevelForeignKeyConstraintToSource: + @pytest.fixture(scope="class") + def models(self): + return { + "constraints_schema.yml": model_foreign_key_source_schema_yml, + "my_model.sql": "select 1 as id", + "my_model_to.sql": "select 1 as id", + } + + def test_model_level_fk_to(self, project, unique_schema): + manifest = run_dbt(["parse"]) + node_with_fk_constraint = manifest.nodes["model.test.my_model"] + assert len(node_with_fk_constraint.constraints) == 1 + + parsed_constraint = node_with_fk_constraint.constraints[0] + assert parsed_constraint == ModelLevelConstraint( + type=ConstraintType.foreign_key, + columns=["id"], + to="source('test_source', 'test_table')", + to_columns=["id"], + ) + # Assert column-level constraint source included in node.depends_on + assert node_with_fk_constraint.refs == [] + assert node_with_fk_constraint.depends_on.nodes == ["source.test.test_source.test_table"] + assert node_with_fk_constraint.sources == [["test_source", "test_table"]] + + # Assert compilation renders to from 'ref' to relation identifer + run_dbt(["compile"]) + manifest = get_artifact(project.project_root, "target", "manifest.json") + assert len(manifest["nodes"]["model.test.my_model"]["constraints"]) == 1 + + compiled_constraint = manifest["nodes"]["model.test.my_model"]["constraints"][0] + assert compiled_constraint["to"] == '"dbt"."test_source"."test_table"' + # Other constraint fields should remain as parsed + assert compiled_constraint["to_columns"] == parsed_constraint.to_columns + assert compiled_constraint["columns"] == parsed_constraint.columns + assert compiled_constraint["type"] == parsed_constraint.type + + +class TestModelLevelForeignKeyConstraintRefNotFoundError: + @pytest.fixture(scope="class") + def models(self): + return { + "constraints_schema.yml": model_foreign_key_model_node_not_found_schema_yml, + "my_model.sql": "select 1 as id", + "my_model_to.sql": "select 1 as id", + } + + def test_model_level_fk_to_doesnt_exist(self, project): + with pytest.raises( + CompilationError, match="depends on a node named 'doesnt_exist' which was not found" + ): + run_dbt(["parse"]) + + +class TestModelLevelForeignKeyConstraintRefSyntaxError: + @pytest.fixture(scope="class") + def models(self): + return { + "constraints_schema.yml": model_foreign_key_model_invalid_syntax_schema_yml, + "my_model.sql": "select 1 as id", + "my_model_to.sql": "select 1 as id", + } + + def test_model_level_fk_to(self, project): + with pytest.raises( + ParsingError, + match="Invalid 'ref' or 'source' syntax on foreign key constraint 'to' on model my_model: invalid", + ): + run_dbt(["parse"]) + + +class TestColumnLevelForeignKeyConstraintToRef: + @pytest.fixture(scope="class") + def models(self): + return { + "constraints_schema.yml": model_foreign_key_model_column_schema_yml, + "my_model.sql": "select 1 as id", + "my_model_to.sql": "select 1 as id", + } + + def test_column_level_fk_to(self, project, unique_schema): + manifest = run_dbt(["parse"]) + node_with_fk_constraint = manifest.nodes["model.test.my_model"] + assert len(node_with_fk_constraint.columns["id"].constraints) == 1 + + parsed_constraint = node_with_fk_constraint.columns["id"].constraints[0] + # Assert column-level constraint parsed + assert parsed_constraint == ColumnLevelConstraint( + type=ConstraintType.foreign_key, to="ref('my_model_to')", to_columns=["id"] + ) + # Assert column-level constraint ref included in node.depends_on + assert node_with_fk_constraint.refs == [RefArgs(name="my_model_to")] + assert node_with_fk_constraint.sources == [] + assert node_with_fk_constraint.depends_on.nodes == ["model.test.my_model_to"] + + # Assert compilation renders to from 'ref' to relation identifer + run_dbt(["compile"]) + manifest = get_artifact(project.project_root, "target", "manifest.json") + assert len(manifest["nodes"]["model.test.my_model"]["columns"]["id"]["constraints"]) == 1 + + compiled_constraint = manifest["nodes"]["model.test.my_model"]["columns"]["id"][ + "constraints" + ][0] + assert compiled_constraint["to"] == f'"dbt"."{unique_schema}"."my_model_to"' + # Other constraint fields should remain as parsed + assert compiled_constraint["to_columns"] == parsed_constraint.to_columns + assert compiled_constraint["type"] == parsed_constraint.type + + +class TestColumnLevelForeignKeyConstraintToSource: + @pytest.fixture(scope="class") + def models(self): + return { + "constraints_schema.yml": model_column_level_foreign_key_source_schema_yml, + "my_model.sql": "select 1 as id", + "my_model_to.sql": "select 1 as id", + } + + def test_model_level_fk_to(self, project, unique_schema): + manifest = run_dbt(["parse"]) + node_with_fk_constraint = manifest.nodes["model.test.my_model"] + assert len(node_with_fk_constraint.columns["id"].constraints) == 1 + + parsed_constraint = node_with_fk_constraint.columns["id"].constraints[0] + assert parsed_constraint == ColumnLevelConstraint( + type=ConstraintType.foreign_key, + to="source('test_source', 'test_table')", + to_columns=["id"], + ) + # Assert column-level constraint source included in node.depends_on + assert node_with_fk_constraint.refs == [] + assert node_with_fk_constraint.depends_on.nodes == ["source.test.test_source.test_table"] + assert node_with_fk_constraint.sources == [["test_source", "test_table"]] + + # Assert compilation renders to from 'ref' to relation identifer + run_dbt(["compile"]) + manifest = get_artifact(project.project_root, "target", "manifest.json") + assert len(manifest["nodes"]["model.test.my_model"]["columns"]["id"]["constraints"]) == 1 + + compiled_constraint = manifest["nodes"]["model.test.my_model"]["columns"]["id"][ + "constraints" + ][0] + assert compiled_constraint["to"] == '"dbt"."test_source"."test_table"' + # # Other constraint fields should remain as parsed + assert compiled_constraint["to_columns"] == parsed_constraint.to_columns + assert compiled_constraint["type"] == parsed_constraint.type + + +class TestColumnLevelForeignKeyConstraintRefNotFoundError: + @pytest.fixture(scope="class") + def models(self): + return { + "constraints_schema.yml": model_foreign_key_column_node_not_found_schema_yml, + "my_model.sql": "select 1 as id", + "my_model_to.sql": "select 1 as id", + } + + def test_model_level_fk_to_doesnt_exist(self, project): + with pytest.raises( + CompilationError, match="depends on a node named 'doesnt_exist' which was not found" + ): + run_dbt(["parse"]) + + +class TestColumnLevelForeignKeyConstraintRefSyntaxError: + @pytest.fixture(scope="class") + def models(self): + return { + "constraints_schema.yml": model_foreign_key_column_invalid_syntax_schema_yml, + "my_model.sql": "select 1 as id", + "my_model_to.sql": "select 1 as id", + } + + def test_model_level_fk_to(self, project): + with pytest.raises( + ParsingError, + match="Invalid 'ref' or 'source' syntax on foreign key constraint 'to' on model my_model: invalid.", + ): + run_dbt(["parse"]) diff --git a/tests/functional/context_methods/test_builtin_functions.py b/tests/functional/context_methods/test_builtin_functions.py index 3acaf25170c..727df1b63bc 100644 --- a/tests/functional/context_methods/test_builtin_functions.py +++ b/tests/functional/context_methods/test_builtin_functions.py @@ -1,9 +1,10 @@ -import pytest import json import os -from dbt.tests.util import run_dbt, run_dbt_and_capture, write_file +import pytest + from dbt.exceptions import CompilationError +from dbt.tests.util import run_dbt, run_dbt_and_capture, write_file macros__validate_set_sql = """ {% macro validate_set() %} diff --git a/tests/functional/context_methods/test_cli_var_override.py b/tests/functional/context_methods/test_cli_var_override.py index 22b26697bbf..b171c880451 100644 --- a/tests/functional/context_methods/test_cli_var_override.py +++ b/tests/functional/context_methods/test_cli_var_override.py @@ -2,14 +2,13 @@ from dbt.tests.util import run_dbt - models_override__schema_yml = """ version: 2 models: - name: test_vars columns: - name: field - tests: + data_tests: - accepted_values: values: - override diff --git a/tests/functional/context_methods/test_cli_vars.py b/tests/functional/context_methods/test_cli_vars.py index 5f5b222f5da..8c563335ed0 100644 --- a/tests/functional/context_methods/test_cli_vars.py +++ b/tests/functional/context_methods/test_cli_vars.py @@ -1,12 +1,16 @@ import pytest import yaml -from tests.fixtures.dbt_integration_project import dbt_integration_project # noqa: F401 - -from dbt.tests.util import run_dbt, get_artifact, write_config_file +from dbt.exceptions import CompilationError, DbtRuntimeError from dbt.tests.fixtures.project import write_project_files -from dbt.exceptions import DbtRuntimeError, CompilationError - +from dbt.tests.util import ( + get_artifact, + get_logging_events, + run_dbt, + run_dbt_and_capture, + write_config_file, +) +from tests.fixtures.dbt_integration_project import dbt_integration_project # noqa: F401 models_complex__schema_yml = """ version: 2 @@ -14,17 +18,17 @@ - name: complex_model columns: - name: var_1 - tests: + data_tests: - accepted_values: values: - abc - name: var_2 - tests: + data_tests: - accepted_values: values: - def - name: var_3 - tests: + data_tests: - accepted_values: values: - jkl @@ -43,7 +47,7 @@ - name: simple_model columns: - name: simple - tests: + data_tests: - accepted_values: values: - abc @@ -206,3 +210,76 @@ def test_vars_in_selectors(self, project): # Var in cli_vars works results = run_dbt(["run", "--vars", "snapshot_target: dev"]) assert len(results) == 1 + + +models_scrubbing__schema_yml = """ +version: 2 +models: +- name: simple_model + columns: + - name: simple + data_tests: + - accepted_values: + values: + - abc +""" + +models_scrubbing__simple_model_sql = """ +select + '{{ var("DBT_ENV_SECRET_simple") }}'::varchar as simple +""" + + +class TestCLIVarsScrubbing: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": models_scrubbing__schema_yml, + "simple_model.sql": models_scrubbing__simple_model_sql, + } + + def test__run_results_scrubbing(self, project): + results, output = run_dbt_and_capture( + [ + "--debug", + "--log-format", + "json", + "run", + "--vars", + "{DBT_ENV_SECRET_simple: abc, unused: def}", + ] + ) + assert len(results) == 1 + + run_results = get_artifact(project.project_root, "target", "run_results.json") + assert run_results["args"]["vars"] == { + "DBT_ENV_SECRET_simple": "*****", + "unused": "def", + } + + log_events = get_logging_events(log_output=output, event_name="StateCheckVarsHash") + assert len(log_events) == 1 + assert ( + log_events[0]["data"]["vars"] == "{'DBT_ENV_SECRET_simple': '*****', 'unused': 'def'}" + ) + + def test__exception_scrubbing(self, project): + results, output = run_dbt_and_capture( + [ + "--debug", + "--log-format", + "json", + "run", + "--vars", + "{DBT_ENV_SECRET_unused: abc, unused: def}", + ], + False, + ) + assert len(results) == 1 + + log_events = get_logging_events(log_output=output, event_name="CatchableExceptionOnRun") + assert len(log_events) == 1 + assert ( + '{\n "DBT_ENV_SECRET_unused": "*****",\n "unused": "def"\n }' + in log_events[0]["info"]["msg"] + ) diff --git a/tests/functional/context_methods/test_custom_env_vars.py b/tests/functional/context_methods/test_custom_env_vars.py index e74a5dcee09..bf93d826fcd 100644 --- a/tests/functional/context_methods/test_custom_env_vars.py +++ b/tests/functional/context_methods/test_custom_env_vars.py @@ -1,7 +1,8 @@ -import pytest import json import os +import pytest + from dbt.tests.util import run_dbt_and_capture diff --git a/tests/functional/context_methods/test_env_vars.py b/tests/functional/context_methods/test_env_vars.py index 506ed40d31c..d1441dcb7e2 100644 --- a/tests/functional/context_methods/test_env_vars.py +++ b/tests/functional/context_methods/test_env_vars.py @@ -1,9 +1,10 @@ -import pytest import os -from dbt.constants import SECRET_ENV_PREFIX, DEFAULT_ENV_PLACEHOLDER -from dbt.tests.util import run_dbt, get_manifest, run_dbt_and_capture +import pytest +from dbt.constants import DEFAULT_ENV_PLACEHOLDER +from dbt.tests.util import get_manifest, run_dbt, run_dbt_and_capture +from dbt_common.constants import SECRET_ENV_PREFIX context_sql = """ @@ -55,13 +56,13 @@ def setup(self): os.environ["DBT_TEST_ENV_VAR"] = "1" os.environ["DBT_TEST_USER"] = "root" os.environ["DBT_TEST_PASS"] = "password" - os.environ[SECRET_ENV_PREFIX + "SECRET"] = "secret_variable" + os.environ[SECRET_ENV_PREFIX + "_SECRET"] = "secret_variable" os.environ["DBT_TEST_NOT_SECRET"] = "regular_variable" os.environ["DBT_TEST_IGNORE_DEFAULT"] = "ignored_default" yield del os.environ["DBT_TEST_ENV_VAR"] del os.environ["DBT_TEST_USER"] - del os.environ[SECRET_ENV_PREFIX + "SECRET"] + del os.environ[SECRET_ENV_PREFIX + "_SECRET"] del os.environ["DBT_TEST_NOT_SECRET"] del os.environ["DBT_TEST_IGNORE_DEFAULT"] @@ -191,3 +192,37 @@ def test_env_vars_secrets(self, project): assert not ("secret_variable" in log_output) assert "regular_variable" in log_output + del os.environ["DBT_DEBUG"] + + +class TestEnvVarInCreateSchema: + """Test that the env_var() method works in overrides of the create_schema + macro, which is called during a different phase of execution than most + macros, causing problems.""" + + @pytest.fixture(scope="class", autouse=True) + def setup(self): + os.environ["DBT_TEST_ENV_VAR"] = "1" + + @pytest.fixture(scope="class") + def macros(self): + return { + "macros.sql": """ + {% macro create_schema(relation) %} + {%- call statement('create_schema') -%} + SELECT {{ env_var('DBT_TEST_ENV_VAR') }} as TEST + {% endcall %} + {% endmacro %}% + """ + } + + @pytest.fixture(scope="class") + def models(self): + return { + "mymodel.sql": """ + SELECT 1 as TEST -- {%- do adapter.create_schema(this) -%} + """ + } + + def test_env_var_in_create_schema(self, project): + run_dbt(["run"]) diff --git a/tests/functional/context_methods/test_secret_env_vars.py b/tests/functional/context_methods/test_secret_env_vars.py index a95c20b7f91..68ece050ff4 100644 --- a/tests/functional/context_methods/test_secret_env_vars.py +++ b/tests/functional/context_methods/test_secret_env_vars.py @@ -1,11 +1,11 @@ -import pytest import os -from dbt.constants import SECRET_ENV_PREFIX -from dbt.exceptions import ParsingError, DbtInternalError -from tests.functional.context_methods.first_dependency import FirstDependencyProject -from dbt.tests.util import run_dbt, run_dbt_and_capture +import pytest +from dbt.exceptions import DbtInternalError, ParsingError +from dbt.tests.util import read_file, run_dbt, run_dbt_and_capture +from dbt_common.constants import SECRET_ENV_PREFIX +from tests.functional.context_methods.first_dependency import FirstDependencyProject secret_bad__context_sql = """ @@ -73,13 +73,15 @@ def test_disallow_secret(self, project): class TestAllowSecretProfilePackage(FirstDependencyProject): @pytest.fixture(scope="class", autouse=True) def setup(self): - os.environ[SECRET_ENV_PREFIX + "USER"] = "root" - os.environ[SECRET_ENV_PREFIX + "PASS"] = "password" - os.environ[SECRET_ENV_PREFIX + "PACKAGE"] = "first_dependency" + os.environ[SECRET_ENV_PREFIX + "_USER"] = "root" + os.environ[SECRET_ENV_PREFIX + "_PASS"] = "password" + os.environ[SECRET_ENV_PREFIX + "_PACKAGE"] = "first_dependency" + os.environ[SECRET_ENV_PREFIX + "_GIT_TOKEN"] = "abc123" yield - del os.environ[SECRET_ENV_PREFIX + "USER"] - del os.environ[SECRET_ENV_PREFIX + "PASS"] - del os.environ[SECRET_ENV_PREFIX + "PACKAGE"] + del os.environ[SECRET_ENV_PREFIX + "_USER"] + del os.environ[SECRET_ENV_PREFIX + "_PASS"] + del os.environ[SECRET_ENV_PREFIX + "_PACKAGE"] + del os.environ[SECRET_ENV_PREFIX + "_GIT_TOKEN"] @pytest.fixture(scope="class") def models(self): @@ -89,7 +91,19 @@ def models(self): def packages(self): return { "packages": [ - {"local": "{{ env_var('DBT_ENV_SECRET_PACKAGE') }}"}, + { + # the raw value of this secret *will* be written to lock file + "local": "{{ env_var('DBT_ENV_SECRET_PACKAGE') }}" + }, + { + # this secret env var will *not* be written to lock file + "git": "https://{{ env_var('DBT_ENV_SECRET_GIT_TOKEN') }}@github.com/dbt-labs/dbt-external-tables.git" + }, + { + # this secret env var will *not* be written to lock file + "tarball": "https://{{ env_var('DBT_ENV_SECRET_GIT_TOKEN') }}@github.com/dbt-labs/dbt-utils/archive/refs/tags/1.1.1.tar.gz", + "name": "dbt_utils", + }, ] } @@ -108,13 +122,22 @@ def profile_target(self): def test_allow_secrets(self, project, first_dependency): _, log_output = run_dbt_and_capture(["deps"]) + lock_file_contents = read_file("package-lock.yml") + + # this will not be written to logs or lock file + assert not ("abc123" in log_output) + assert not ("abc123" in lock_file_contents) + assert "{{ env_var('DBT_ENV_SECRET_GIT_TOKEN') }}" in lock_file_contents + + # this will be scrubbed from logs, but not from the lock file assert not ("first_dependency" in log_output) + assert "first_dependency" in lock_file_contents class TestCloneFailSecretScrubbed: @pytest.fixture(scope="class", autouse=True) def setup(self): - os.environ[SECRET_ENV_PREFIX + "GIT_TOKEN"] = "abc123" + os.environ[SECRET_ENV_PREFIX + "_GIT_TOKEN"] = "abc123" @pytest.fixture(scope="class") def models(self): diff --git a/tests/functional/context_methods/test_var_dependency.py b/tests/functional/context_methods/test_var_dependency.py index 9755c8c9ab8..5822091b3b3 100644 --- a/tests/functional/context_methods/test_var_dependency.py +++ b/tests/functional/context_methods/test_var_dependency.py @@ -1,9 +1,9 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal +from dbt.tests.util import check_relations_equal, run_dbt from tests.functional.context_methods.first_dependency import ( - FirstDependencyProject, FirstDependencyConfigProject, + FirstDependencyProject, ) dependency_seeds__root_model_expected_csv = """first_dep_global,from_root diff --git a/tests/functional/context_methods/test_var_in_generate_name.py b/tests/functional/context_methods/test_var_in_generate_name.py index 2bbba457e58..d4c4e81d29a 100644 --- a/tests/functional/context_methods/test_var_in_generate_name.py +++ b/tests/functional/context_methods/test_var_in_generate_name.py @@ -1,7 +1,7 @@ import pytest -from dbt.tests.util import run_dbt, update_config_file from dbt.exceptions import CompilationError +from dbt.tests.util import run_dbt, update_config_file model_sql = """ select 1 as id diff --git a/tests/functional/context_methods/test_yaml_functions.py b/tests/functional/context_methods/test_yaml_functions.py index d07fea670d9..e90da5f7254 100644 --- a/tests/functional/context_methods/test_yaml_functions.py +++ b/tests/functional/context_methods/test_yaml_functions.py @@ -2,7 +2,6 @@ from dbt.tests.util import run_dbt - tests__from_yaml_sql = """ {% set simplest = (fromyaml('a: 1') == {'a': 1}) %} {% set nested_data %} diff --git a/tests/functional/contracts/test_contract_enforcement.py b/tests/functional/contracts/test_contract_enforcement.py new file mode 100644 index 00000000000..1d069f204d4 --- /dev/null +++ b/tests/functional/contracts/test_contract_enforcement.py @@ -0,0 +1,44 @@ +import pytest + +from dbt.tests.util import run_dbt, write_file + +my_model_sql = """ +select 'some string' as string_column +""" + +my_model_int_sql = """ +select 123 as int_column +""" + +model_schema_yml = """ +models: + - name: my_model + config: + materialized: incremental + on_schema_change: append_new_columns + contract: {enforced: true} + columns: + - name: string_column + data_type: text +""" + + +class TestIncrementalModelContractEnforcement: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "schema.yml": model_schema_yml, + } + + def test_contracted_incremental(self, project): + results = run_dbt() + assert len(results) == 1 + # now update the column type in the model to break the contract + write_file(my_model_int_sql, project.project_root, "models", "my_model.sql") + + expected_msg = "This model has an enforced contract that failed." + results = run_dbt(expect_pass=False) + assert len(results) == 1 + msg = results[0].message + assert expected_msg in msg diff --git a/tests/functional/contracts/test_contract_precision.py b/tests/functional/contracts/test_contract_precision.py new file mode 100644 index 00000000000..a4df2c8ace9 --- /dev/null +++ b/tests/functional/contracts/test_contract_precision.py @@ -0,0 +1,63 @@ +import pytest + +from dbt.tests.util import run_dbt_and_capture + +my_numeric_model_sql = """ +select + 1.234 as non_integer +""" + +model_schema_numerics_yml = """ +version: 2 +models: + - name: my_numeric_model + config: + contract: + enforced: true + columns: + - name: non_integer + data_type: numeric +""" + +model_schema_numerics_precision_yml = """ +version: 2 +models: + - name: my_numeric_model + config: + contract: + enforced: true + columns: + - name: non_integer + data_type: numeric(38,3) +""" + + +class TestModelContractNumericNoPrecision: + @pytest.fixture(scope="class") + def models(self): + return { + "my_numeric_model.sql": my_numeric_model_sql, + "schema.yml": model_schema_numerics_yml, + } + + def test_contracted_numeric_without_precision(self, project): + expected_msg = "Detected columns with numeric type and unspecified precision/scale, this can lead to unintended rounding: ['non_integer']" + _, logs = run_dbt_and_capture(["run"], expect_pass=True) + assert expected_msg in logs + _, logs = run_dbt_and_capture(["--warn-error", "run"], expect_pass=False) + assert "Compilation Error in model my_numeric_model" in logs + assert expected_msg in logs + + +class TestModelContractNumericPrecision: + @pytest.fixture(scope="class") + def models(self): + return { + "my_numeric_model.sql": my_numeric_model_sql, + "schema.yml": model_schema_numerics_precision_yml, + } + + def test_contracted_numeric_with_precision(self, project): + expected_msg = "Detected columns with numeric type and unspecified precision/scale, this can lead to unintended rounding: ['non_integer']" + _, logs = run_dbt_and_capture(["run"], expect_pass=True) + assert expected_msg not in logs diff --git a/tests/functional/contracts/test_nonstandard_data_type.py b/tests/functional/contracts/test_nonstandard_data_type.py new file mode 100644 index 00000000000..1bcb5e8bb65 --- /dev/null +++ b/tests/functional/contracts/test_nonstandard_data_type.py @@ -0,0 +1,76 @@ +import pytest + +from dbt.tests.util import run_dbt, run_dbt_and_capture + +my_numeric_model_sql = """ +select + 12.34 as price +""" + +my_money_model_sql = """ +select + cast('12.34' as money) as price +""" + +model_schema_money_yml = """ +models: + - name: my_model + config: + contract: + enforced: true + columns: + - name: price + data_type: money +""" + +model_schema_numeric_yml = """ +models: + - name: my_model + config: + contract: + enforced: true + columns: + - name: price + data_type: numeric +""" + + +class TestModelContractUnrecognizedTypeCode1: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_money_model_sql, + "schema.yml": model_schema_money_yml, + } + + def test_nonstandard_data_type(self, project): + run_dbt(["run"], expect_pass=True) + + +class TestModelContractUnrecognizedTypeCodeActualMismatch: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_money_model_sql, + "schema.yml": model_schema_numeric_yml, + } + + def test_nonstandard_data_type(self, project): + expected_msg = "unknown type_code 790 | DECIMAL | data type mismatch" + _, logs = run_dbt_and_capture(["run"], expect_pass=False) + assert expected_msg in logs + + +class TestModelContractUnrecognizedTypeCodeExpectedMismatch: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_numeric_model_sql, + "schema.yml": model_schema_money_yml, + } + + def test_nonstandard_data_type(self, project): + expected_msg = "DECIMAL | unknown type_code 790 | data type mismatch" + _, logs = run_dbt_and_capture(["run"], expect_pass=False) + print(logs) + assert expected_msg in logs diff --git a/tests/functional/custom_aliases/fixtures.py b/tests/functional/custom_aliases/fixtures.py index 6324e1249e4..dfa4d7a54cc 100644 --- a/tests/functional/custom_aliases/fixtures.py +++ b/tests/functional/custom_aliases/fixtures.py @@ -55,13 +55,13 @@ - name: model1 columns: - name: model_name - tests: + data_tests: - accepted_values: values: ['custom_alias'] - name: model2 columns: - name: model_name - tests: + data_tests: - accepted_values: values: ['model2'] diff --git a/tests/functional/custom_aliases/test_custom_aliases.py b/tests/functional/custom_aliases/test_custom_aliases.py index 86b44c3b3f0..22ff536c175 100644 --- a/tests/functional/custom_aliases/test_custom_aliases.py +++ b/tests/functional/custom_aliases/test_custom_aliases.py @@ -1,12 +1,11 @@ import pytest from dbt.tests.util import run_dbt - from tests.functional.custom_aliases.fixtures import ( + macros_config_sql, + macros_sql, model1_sql, model2_sql, - macros_sql, - macros_config_sql, schema_yml, ) diff --git a/tests/functional/custom_singular_tests/test_custom_singular_tests.py b/tests/functional/custom_singular_tests/test_custom_singular_tests.py index aec0586b873..4dc638f3827 100644 --- a/tests/functional/custom_singular_tests/test_custom_singular_tests.py +++ b/tests/functional/custom_singular_tests/test_custom_singular_tests.py @@ -1,7 +1,7 @@ -import pytest - from pathlib import Path +import pytest + from dbt.tests.util import run_dbt # from `test/integration/009_data_test` diff --git a/tests/functional/data_tests/test_hooks.py b/tests/functional/data_tests/test_hooks.py new file mode 100644 index 00000000000..60eee2f543f --- /dev/null +++ b/tests/functional/data_tests/test_hooks.py @@ -0,0 +1,111 @@ +from unittest import mock + +import pytest + +from dbt.tests.util import run_dbt, run_dbt_and_capture +from dbt_common.exceptions import CompilationError + +orders_csv = """order_id,order_date,customer_id +1,2024-06-01,1001 +2,2024-06-02,1002 +3,2024-06-03,1003 +4,2024-06-04,1004 +""" + + +orders_model_sql = """ +with source as ( + select + order_id, + order_date, + customer_id + from {{ ref('seed_orders') }} +), +final as ( + select + order_id, + order_date, + customer_id + from source +) +select * from final +""" + + +orders_test_sql = """ +select * +from {{ ref('orders') }} +where order_id is null +""" + + +class BaseSingularTestHooks: + @pytest.fixture(scope="class") + def seeds(self): + return {"seed_orders.csv": orders_csv} + + @pytest.fixture(scope="class") + def models(self): + return {"orders.sql": orders_model_sql} + + @pytest.fixture(scope="class") + def tests(self): + return {"orders_test.sql": orders_test_sql} + + +class TestSingularTestPreHook(BaseSingularTestHooks): + def test_data_test_runs_adapter_pre_hook_pass(self, project): + results = run_dbt(["seed"]) + assert len(results) == 1 + + results = run_dbt(["run"]) + assert len(results) == 1 + + mock_pre_model_hook = mock.Mock() + with mock.patch.object(type(project.adapter), "pre_model_hook", mock_pre_model_hook): + results = run_dbt(["test"], expect_pass=True) + assert len(results) == 1 + mock_pre_model_hook.assert_called_once() + + def test_data_test_runs_adapter_pre_hook_fails(self, project): + results = run_dbt(["seed"]) + assert len(results) == 1 + + results = run_dbt(["run"]) + assert len(results) == 1 + + mock_pre_model_hook = mock.Mock() + mock_pre_model_hook.side_effect = CompilationError("exception from adapter.pre_model_hook") + with mock.patch.object(type(project.adapter), "pre_model_hook", mock_pre_model_hook): + (_, log_output) = run_dbt_and_capture(["test"], expect_pass=False) + assert "exception from adapter.pre_model_hook" in log_output + + +class TestSingularTestPostHook(BaseSingularTestHooks): + def test_data_test_runs_adapter_post_hook_pass(self, project): + results = run_dbt(["seed"]) + assert len(results) == 1 + + results = run_dbt(["run"]) + assert len(results) == 1 + + mock_post_model_hook = mock.Mock() + with mock.patch.object(type(project.adapter), "post_model_hook", mock_post_model_hook): + results = run_dbt(["test"], expect_pass=True) + assert len(results) == 1 + mock_post_model_hook.assert_called_once() + + def test_data_test_runs_adapter_post_hook_fails(self, project): + results = run_dbt(["seed"]) + assert len(results) == 1 + + results = run_dbt(["run"]) + assert len(results) == 1 + + mock_post_model_hook = mock.Mock() + mock_post_model_hook.side_effect = CompilationError( + "exception from adapter.post_model_hook" + ) + with mock.patch.object(type(project.adapter), "post_model_hook", mock_post_model_hook): + (_, log_output) = run_dbt_and_capture(["test"], expect_pass=False) + assert "exception from adapter.post_model_hook" in log_output diff --git a/tests/functional/dbt_runner/test_dbt_runner.py b/tests/functional/dbt_runner/test_dbt_runner.py index 20041f05952..b2e52d1237b 100644 --- a/tests/functional/dbt_runner/test_dbt_runner.py +++ b/tests/functional/dbt_runner/test_dbt_runner.py @@ -1,10 +1,15 @@ +import os from unittest import mock import pytest +from dbt.adapters.factory import FACTORY, reset_adapters from dbt.cli.exceptions import DbtUsageException from dbt.cli.main import dbtRunner from dbt.exceptions import DbtProjectError +from dbt.tests.util import read_file, write_file +from dbt.version import __version__ as dbt_version +from dbt_common.events.contextvars import get_node_info class TestDbtRunner: @@ -12,6 +17,12 @@ class TestDbtRunner: def dbt(self) -> dbtRunner: return dbtRunner() + @pytest.fixture(scope="class") + def models(self): + return { + "models.sql": "select 1 as id", + } + def test_group_invalid_option(self, dbt: dbtRunner) -> None: res = dbt.invoke(["--invalid-option"]) assert type(res.exception) == DbtUsageException @@ -23,6 +34,11 @@ def test_command_invalid_option(self, dbt: dbtRunner) -> None: def test_command_mutually_exclusive_option(self, dbt: dbtRunner) -> None: res = dbt.invoke(["--warn-error", "--warn-error-options", '{"include": "all"}', "deps"]) assert type(res.exception) == DbtUsageException + res = dbt.invoke(["deps", "--warn-error", "--warn-error-options", '{"include": "all"}']) + assert type(res.exception) == DbtUsageException + + res = dbt.invoke(["compile", "--select", "models", "--inline", "select 1 as id"]) + assert type(res.exception) == DbtUsageException def test_invalid_command(self, dbt: dbtRunner) -> None: res = dbt.invoke(["invalid-command"]) @@ -70,3 +86,80 @@ def test_invoke_kwargs_profiles_dir(self, project, dbt): def test_invoke_kwargs_and_flags(self, project, dbt): res = dbt.invoke(["--log-format=text", "run"], log_format="json") assert res.result.args["log_format"] == "json" + + def test_pass_in_manifest(self, project, dbt): + result = dbt.invoke(["parse"]) + manifest = result.result + + reset_adapters() + assert len(FACTORY.adapters) == 0 + result = dbtRunner(manifest=manifest).invoke(["run"]) + # Check that the adapters are registered again. + assert result.success + assert len(FACTORY.adapters) == 1 + + def test_pass_in_args_variable(self, dbt): + args = ["--log-format", "text"] + args_before = args.copy() + dbt.invoke(args) + assert args == args_before + + def test_directory_does_not_change(self, project, dbt: dbtRunner) -> None: + project_dir = os.getcwd() # The directory where dbt_project.yml exists. + os.chdir("../") + cmd_execution_dir = os.getcwd() # The directory where dbt command will be run + + commands = ["init", "deps", "clean"] + for command in commands: + args = [command, "--project-dir", project_dir] + if command == "init": + args.append("--skip-profile-setup") + res = dbt.invoke(args) + after_dir = os.getcwd() + assert res.success is True + assert cmd_execution_dir == after_dir + + +class TestDbtRunnerQueryComments: + @pytest.fixture(scope="class") + def models(self): + return { + "models.sql": "select 1 as id", + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "query-comment": { + "comment": f"comment: {dbt_version}", + "append": True, + } + } + + def test_query_comment_saved_manifest(self, project, logs_dir): + dbt = dbtRunner() + dbt.invoke(["build", "--select", "models"]) + result = dbt.invoke(["parse"]) + write_file("", logs_dir, "dbt.log") + # pass in manifest from parse command + dbt = dbtRunner(result.result) + dbt.invoke(["build", "--select", "models"]) + log_file = read_file(logs_dir, "dbt.log") + assert f"comment: {dbt_version}" in log_file + + +class TestDbtRunnerHooks: + @pytest.fixture(scope="class") + def models(self): + return { + "models.sql": "select 1 as id", + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"on-run-end": ["select 1;"]} + + def test_node_info_non_persistence(self, project): + dbt = dbtRunner() + dbt.invoke(["run", "--select", "models"]) + assert get_node_info() == {} diff --git a/tests/functional/defer_state/fixtures.py b/tests/functional/defer_state/fixtures.py index 7bf6885f85a..832bf258f56 100644 --- a/tests/functional/defer_state/fixtures.py +++ b/tests/functional/defer_state/fixtures.py @@ -67,7 +67,7 @@ - name: view_model columns: - name: id - tests: + data_tests: - unique: severity: error - not_null @@ -82,7 +82,7 @@ columns: - name: id data_type: integer - tests: + data_tests: - unique: severity: error - not_null @@ -100,7 +100,26 @@ columns: - name: id data_type: integer - tests: + data_tests: + - unique: + severity: error + - not_null + - name: name + data_type: text +""" + +disabled_contract_schema_yml = """ +version: 2 +models: + - name: table_model + config: + contract: + enforced: True + enabled: False + columns: + - name: id + data_type: integer + data_tests: - unique: severity: error - not_null @@ -118,7 +137,7 @@ columns: - name: id data_type: integer - tests: + data_tests: - unique: severity: error - not_null @@ -126,7 +145,7 @@ data_type: text """ -disabled_contract_schema_yml = """ +unenforced_contract_schema_yml = """ version: 2 models: - name: table_model @@ -136,7 +155,26 @@ columns: - name: id data_type: integer - tests: + data_tests: + - unique: + severity: error + - not_null + - name: name + data_type: text +""" + +disabled_unenforced_contract_schema_yml = """ +version: 2 +models: + - name: table_model + config: + contract: + enforced: False + enabled: False + columns: + - name: id + data_type: integer + data_tests: - unique: severity: error - not_null @@ -154,7 +192,7 @@ columns: - name: id data_type: integer - tests: + data_tests: - unique: severity: error - not_null @@ -174,7 +212,28 @@ columns: - name: id data_type: integer - tests: + data_tests: + - unique: + severity: error + - not_null + - name: name + data_type: text +""" + +disabled_versioned_contract_schema_yml = """ +version: 2 +models: + - name: table_model + config: + contract: + enforced: True + enabled: False + versions: + - v: 1 + columns: + - name: id + data_type: integer + data_tests: - unique: severity: error - not_null @@ -194,7 +253,7 @@ columns: - name: id data_type: integer - tests: + data_tests: - unique: severity: error - not_null @@ -202,19 +261,40 @@ data_type: text """ -versioned_disabled_contract_schema_yml = """ +disabled_versioned_unenforced_contract_schema_yml = """ version: 2 models: - name: table_model config: contract: enforced: False + enabled: False versions: - v: 1 columns: - name: id data_type: integer - tests: + data_tests: + - unique: + severity: error + - not_null + - name: name + data_type: text +""" + +versioned_unenforced_contract_schema_yml = """ +version: 2 +models: + - name: table_model + config: + contract: + enforced: False + versions: + - v: 1 + columns: + - name: id + data_type: integer + data_tests: - unique: severity: error - not_null @@ -228,7 +308,7 @@ - name: view_model columns: - name: id - tests: + data_tests: - unique: severity: error - not_null @@ -245,7 +325,7 @@ constraints: - type: not_null data_type: integer - tests: + data_tests: - unique: severity: error - not_null @@ -259,7 +339,7 @@ - name: view_model columns: - name: id - tests: + data_tests: - unique: severity: error - not_null @@ -274,7 +354,7 @@ columns: - name: id data_type: integer - tests: + data_tests: - unique: severity: error - not_null @@ -288,7 +368,7 @@ - name: view_model columns: - name: id - tests: + data_tests: - unique: severity: error - not_null @@ -302,7 +382,7 @@ constraints: - type: not_null data_type: integer - tests: + data_tests: - unique: severity: error - not_null @@ -359,3 +439,104 @@ {% endsnapshot %} """ + + +semantic_model_schema_yml = """ +models: + - name: view_model + columns: + - name: id + data_tests: + - unique: + severity: error + - not_null + - name: name + +semantic_models: + - name: my_sm + model: ref('view_model') +""" + +modified_semantic_model_schema_yml = """ +models: + - name: view_model + columns: + - name: id + data_tests: + - unique: + severity: error + - not_null + - name: name + +semantic_models: + - name: my_sm + model: ref('view_model') + description: modified description +""" + +model_1_sql = """ +select * from {{ ref('seed') }} +""" + +modified_model_1_sql = """ +select * from {{ ref('seed') }} +order by 1 +""" + +model_2_sql = """ +select id from {{ ref('model_1') }} +""" + +modified_model_2_sql = """ +select * from {{ ref('model_1') }} +order by 1 +""" + + +group_schema_yml = """ +groups: + - name: finance + owner: + email: finance@jaffleshop.com + +models: + - name: model_1 + config: + group: finance + - name: model_2 + config: + group: finance +""" + + +group_modified_schema_yml = """ +groups: + - name: accounting + owner: + email: finance@jaffleshop.com +models: + - name: model_1 + config: + group: accounting + - name: model_2 + config: + group: accounting +""" + +group_modified_fail_schema_yml = """ +groups: + - name: finance + owner: + email: finance@jaffleshop.com +models: + - name: model_1 + config: + group: accounting + - name: model_2 + config: + group: finance +""" + +metricflow_time_spine_sql = """ +SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day +""" diff --git a/tests/functional/defer_state/test_defer_state.py b/tests/functional/defer_state/test_defer_state.py index f8b062e1076..994ece5aa61 100644 --- a/tests/functional/defer_state/test_defer_state.py +++ b/tests/functional/defer_state/test_defer_state.py @@ -1,28 +1,26 @@ -import json import os import shutil from copy import deepcopy import pytest -from dbt.cli.exceptions import DbtUsageException from dbt.contracts.results import RunStatus from dbt.exceptions import DbtRuntimeError -from dbt.tests.util import run_dbt, write_file, rm_file +from dbt.tests.util import rm_file, run_dbt, write_file from tests.functional.defer_state.fixtures import ( - seed_csv, - table_model_sql, + changed_ephemeral_model_sql, changed_table_model_sql, - view_model_sql, changed_view_model_sql, ephemeral_model_sql, - changed_ephemeral_model_sql, - schema_yml, exposures_yml, - macros_sql, infinite_macros_sql, + macros_sql, + schema_yml, + seed_csv, snapshot_sql, + table_model_sql, view_model_now_table_sql, + view_model_sql, ) @@ -88,28 +86,20 @@ def copy_state(self, project_root): def run_and_save_state(self, project_root, with_snapshot=False): results = run_dbt(["seed"]) assert len(results) == 1 - assert not any(r.node.deferred for r in results) results = run_dbt(["run"]) assert len(results) == 2 - assert not any(r.node.deferred for r in results) results = run_dbt(["test"]) assert len(results) == 2 if with_snapshot: results = run_dbt(["snapshot"]) assert len(results) == 1 - assert not any(r.node.deferred for r in results) # copy files self.copy_state(project_root) class TestDeferStateUnsupportedCommands(BaseDeferState): - def test_unsupported_commands(self, project): - # make sure these commands don"t work with --defer - with pytest.raises(DbtUsageException): - run_dbt(["seed", "--defer"]) - def test_no_state(self, project): # no "state" files present, snapshot fails with pytest.raises(DbtRuntimeError): @@ -178,8 +168,7 @@ def test_run_and_defer(self, project, unique_schema, other_schema): "otherschema", ] ) - assert other_schema not in catalog.nodes["seed.test.seed"].metadata.schema - assert unique_schema in catalog.nodes["seed.test.seed"].metadata.schema + assert "seed.test.seed" not in catalog.nodes # with state it should work though results = run_dbt( @@ -188,10 +177,6 @@ def test_run_and_defer(self, project, unique_schema, other_schema): assert other_schema not in results[0].node.compiled_code assert unique_schema in results[0].node.compiled_code - with open("target/manifest.json") as fp: - data = json.load(fp) - assert data["nodes"]["seed.test.seed"]["deferred"] - assert len(results) == 1 @@ -245,6 +230,26 @@ def test_run_defer_iff_not_exists(self, project, unique_schema, other_schema): assert len(results) == 2 assert other_schema not in results[0].node.compiled_code + # again with --favor-state, but this time select both the seed and the view + # because the seed is also selected, the view should select from the seed in our schema ('other_schema') + results = run_dbt( + [ + "build", + "--state", + "state", + "--select", + "seed view_model", + "--resource-type", + "seed model", + "--defer", + "--favor-state", + "--target", + "otherschema", + ] + ) + assert len(results) == 2 + assert other_schema in results[1].node.compiled_code + class TestDeferStateDeletedUpstream(BaseDeferState): def test_run_defer_deleted_upstream(self, project, unique_schema, other_schema): @@ -312,6 +317,9 @@ def test_defer_state_flag(self, project, unique_schema, other_schema): expect_pass=False, ) + # Test that retry of a defer command works + run_dbt(["retry"], expect_pass=False) + # this will fail because we haven't passed in --state with pytest.raises( DbtRuntimeError, match="Got a state selector method, but no comparison manifest" diff --git a/tests/functional/defer_state/test_group_updates.py b/tests/functional/defer_state/test_group_updates.py new file mode 100644 index 00000000000..ab1cf1f2460 --- /dev/null +++ b/tests/functional/defer_state/test_group_updates.py @@ -0,0 +1,119 @@ +import os + +import pytest + +from dbt.exceptions import ParsingError +from dbt.tests.util import copy_file, run_dbt, write_file +from tests.functional.defer_state.fixtures import ( + group_modified_fail_schema_yml, + group_modified_schema_yml, + group_schema_yml, + model_1_sql, + model_2_sql, + modified_model_1_sql, + modified_model_2_sql, + seed_csv, +) + + +class GroupSetup: + @pytest.fixture(scope="class") + def models(self): + return { + "model_1.sql": model_1_sql, + "model_2.sql": model_2_sql, + "schema.yml": group_schema_yml, + } + + @pytest.fixture(scope="class") + def seeds(self): + return { + "seed.csv": seed_csv, + } + + def group_setup(self): + # save initial state + run_dbt(["seed"]) + results = run_dbt(["compile"]) + + # add sanity checks for first result + assert len(results) == 3 + seed_result = results[0].node + assert seed_result.unique_id == "seed.test.seed" + model_1_result = results[1].node + assert model_1_result.unique_id == "model.test.model_1" + assert model_1_result.group == "finance" + model_2_result = results[2].node + assert model_2_result.unique_id == "model.test.model_2" + assert model_2_result.group == "finance" + + +class TestFullyModifiedGroups(GroupSetup): + def test_changed_groups(self, project): + self.group_setup() + + # copy manifest.json to "state" directory + os.makedirs("state") + target_path = os.path.join(project.project_root, "target") + copy_file(target_path, "manifest.json", project.project_root, ["state", "manifest.json"]) + + # update group name, modify model so it gets picked up + write_file(modified_model_1_sql, "models", "model_1.sql") + write_file(modified_model_2_sql, "models", "model_2.sql") + write_file(group_modified_schema_yml, "models", "schema.yml") + + # this test is flaky if you don't clean first before the build + run_dbt(["clean"]) + # only thing in results should be model_1 + results = run_dbt(["build", "-s", "state:modified", "--defer", "--state", "./state"]) + + assert len(results) == 2 + model_1_result = results[0].node + assert model_1_result.unique_id == "model.test.model_1" + assert model_1_result.group == "accounting" # new group name! + model_2_result = results[1].node + assert model_2_result.unique_id == "model.test.model_2" + assert model_2_result.group == "accounting" # new group name! + + +class TestPartiallyModifiedGroups(GroupSetup): + def test_changed_groups(self, project): + self.group_setup() + + # copy manifest.json to "state" directory + os.makedirs("state") + target_path = os.path.join(project.project_root, "target") + copy_file(target_path, "manifest.json", project.project_root, ["state", "manifest.json"]) + + # update group name, modify model so it gets picked up + write_file(modified_model_1_sql, "models", "model_1.sql") + write_file(group_modified_schema_yml, "models", "schema.yml") + + # this test is flaky if you don't clean first before the build + run_dbt(["clean"]) + # only thing in results should be model_1 + results = run_dbt(["build", "-s", "state:modified", "--defer", "--state", "./state"]) + + assert len(results) == 1 + model_1_result = results[0].node + assert model_1_result.unique_id == "model.test.model_1" + assert model_1_result.group == "accounting" # new group name! + + +class TestBadGroups(GroupSetup): + def test_changed_groups(self, project): + self.group_setup() + + # copy manifest.json to "state" directory + os.makedirs("state") + target_path = os.path.join(project.project_root, "target") + copy_file(target_path, "manifest.json", project.project_root, ["state", "manifest.json"]) + + # update group with invalid name, modify model so it gets picked up + write_file(modified_model_1_sql, "models", "model_1.sql") + write_file(group_modified_fail_schema_yml, "models", "schema.yml") + + # this test is flaky if you don't clean first before the build + run_dbt(["clean"]) + with pytest.raises(ParsingError, match="Invalid group 'accounting'"): + run_dbt(["build", "-s", "state:modified", "--defer", "--state", "./state"]) diff --git a/tests/functional/defer_state/test_modified_state.py b/tests/functional/defer_state/test_modified_state.py index 1aa28473b78..2ded38e742b 100644 --- a/tests/functional/defer_state/test_modified_state.py +++ b/tests/functional/defer_state/test_modified_state.py @@ -5,33 +5,45 @@ import pytest -from dbt.tests.util import run_dbt, update_config_file, write_file, get_manifest - from dbt.exceptions import CompilationError, ContractBreakingChangeError - +from dbt.tests.util import ( + get_manifest, + rm_file, + run_dbt, + run_dbt_and_capture, + update_config_file, + write_file, +) from tests.functional.defer_state.fixtures import ( - seed_csv, - table_model_sql, - view_model_sql, + constraint_schema_yml, + contract_schema_yml, + disabled_contract_schema_yml, + disabled_unenforced_contract_schema_yml, + disabled_versioned_contract_schema_yml, + disabled_versioned_unenforced_contract_schema_yml, ephemeral_model_sql, - schema_yml, exposures_yml, - macros_sql, infinite_macros_sql, - no_contract_schema_yml, - contract_schema_yml, - modified_contract_schema_yml, - disabled_contract_schema_yml, - constraint_schema_yml, - versioned_no_contract_schema_yml, - versioned_contract_schema_yml, - versioned_disabled_contract_schema_yml, - versioned_modified_contract_schema_yml, + macros_sql, + metricflow_time_spine_sql, modified_column_constraint_schema_yml, + modified_contract_schema_yml, modified_model_constraint_schema_yml, - table_model_now_view_sql, + modified_semantic_model_schema_yml, + no_contract_schema_yml, + schema_yml, + seed_csv, + semantic_model_schema_yml, table_model_now_incremental_sql, + table_model_now_view_sql, + table_model_sql, + unenforced_contract_schema_yml, + versioned_contract_schema_yml, + versioned_modified_contract_schema_yml, + versioned_no_contract_schema_yml, + versioned_unenforced_contract_schema_yml, view_model_now_table_sql, + view_model_sql, ) @@ -496,11 +508,11 @@ def test_changed_exposure(self, project): assert len(results) == 0 -class TestChangedContract(BaseModifiedState): +class TestChangedContractUnversioned(BaseModifiedState): MODEL_UNIQUE_ID = "model.test.table_model" CONTRACT_SCHEMA_YML = contract_schema_yml MODIFIED_SCHEMA_YML = modified_contract_schema_yml - DISABLED_SCHEMA_YML = disabled_contract_schema_yml + UNENFORCED_SCHEMA_YML = unenforced_contract_schema_yml NO_CONTRACT_SCHEMA_YML = no_contract_schema_yml def test_changed_contract(self, project): @@ -535,6 +547,90 @@ def test_changed_contract(self, project): # save a new state self.copy_state() + # This should raise because a column name has changed + write_file(self.MODIFIED_SCHEMA_YML, "models", "schema.yml") + results = run_dbt(["run"], expect_pass=False) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + model = manifest.nodes[model_unique_id] + second_contract_checksum = model.contract.checksum + # double check different contract_checksums + assert first_contract_checksum != second_contract_checksum + + _, logs = run_dbt_and_capture( + ["run", "--models", "state:modified.contract", "--state", "./state"], expect_pass=False + ) + expected_error = "This model has an enforced contract that failed." + expected_warning = "While comparing to previous project state, dbt detected a breaking change to an unversioned model" + expected_change = "Please ensure the name, data_type, and number of columns in your contract match the columns in your model's definition" + assert expected_error in logs + assert expected_warning in logs + assert expected_change in logs + + # Go back to schema file without contract. Should throw a warning. + write_file(self.NO_CONTRACT_SCHEMA_YML, "models", "schema.yml") + _, logs = run_dbt_and_capture( + ["run", "--models", "state:modified.contract", "--state", "./state"] + ) + expected_warning = "While comparing to previous project state, dbt detected a breaking change to an unversioned model" + expected_change = "Contract enforcement was removed" + + # Now unenforce the contract. Should throw a warning - force warning into an error. + write_file(self.UNENFORCED_SCHEMA_YML, "models", "schema.yml") + with pytest.raises(CompilationError): + _, logs = run_dbt_and_capture( + [ + "--warn-error", + "run", + "--models", + "state:modified.contract", + "--state", + "./state", + ] + ) + expected_warning = "While comparing to previous project state, dbt detected a breaking change to an unversioned model" + expected_change = "Contract enforcement was removed" + + +class TestChangedContractVersioned(BaseModifiedState): + MODEL_UNIQUE_ID = "model.test.table_model.v1" + CONTRACT_SCHEMA_YML = versioned_contract_schema_yml + MODIFIED_SCHEMA_YML = versioned_modified_contract_schema_yml + UNENFORCED_SCHEMA_YML = versioned_unenforced_contract_schema_yml + NO_CONTRACT_SCHEMA_YML = versioned_no_contract_schema_yml + + def test_changed_contract_versioned(self, project): + self.run_and_save_state() + + # update contract for table_model + write_file(self.CONTRACT_SCHEMA_YML, "models", "schema.yml") + + # This will find the table_model node modified both through a config change + # and by a non-breaking change to contract: true + results = run_dbt(["run", "--models", "state:modified", "--state", "./state"]) + assert len(results) == 1 + assert results[0].node.name == "table_model" + + results = run_dbt(["run", "--exclude", "state:unmodified", "--state", "./state"]) + assert len(results) == 1 + assert results[0].node.name == "table_model" + + manifest = get_manifest(project.project_root) + model_unique_id = self.MODEL_UNIQUE_ID + model = manifest.nodes[model_unique_id] + expected_unrendered_config = {"contract": {"enforced": True}, "materialized": "table"} + assert model.unrendered_config == expected_unrendered_config + + # Run it again with "state:modified:contract", still finds modified due to contract: true + results = run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) + assert len(results) == 1 + manifest = get_manifest(project.project_root) + model = manifest.nodes[model_unique_id] + first_contract_checksum = model.contract.checksum + assert first_contract_checksum + # save a new state + self.copy_state() + # This should raise because a column name has changed write_file(self.MODIFIED_SCHEMA_YML, "models", "schema.yml") results = run_dbt(["run"], expect_pass=False) @@ -552,21 +648,139 @@ def test_changed_contract(self, project): with pytest.raises(ContractBreakingChangeError): results = run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) - # Now disable the contract. Should raise an error. - write_file(self.DISABLED_SCHEMA_YML, "models", "schema.yml") + # Now unenforce the contract. Should raise an error. + write_file(self.UNENFORCED_SCHEMA_YML, "models", "schema.yml") with pytest.raises(ContractBreakingChangeError): results = run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) -class TestChangedContractVersioned(TestChangedContract): +class TestDeleteUnversionedContractedModel(BaseModifiedState): + MODEL_UNIQUE_ID = "model.test.table_model" + CONTRACT_SCHEMA_YML = contract_schema_yml + + def test_delete_unversioned_contracted_model(self, project): + # ensure table_model is contracted + write_file(self.CONTRACT_SCHEMA_YML, "models", "schema.yml") + self.run_and_save_state() + + # delete versioned contracted model + rm_file(project.project_root, "models", "table_model.sql") + + # since the models are unversioned, they raise a warning but not an error + _, logs = run_dbt_and_capture( + ["run", "--models", "state:modified.contract", "--state", "./state"] + ) + + expected_warning = "While comparing to previous project state, dbt detected a breaking change to an unversioned model" + expected_change = "Contracted model 'model.test.table_model' was deleted or renamed" + assert expected_warning in logs + assert expected_change in logs + + +class TestDeleteVersionedContractedModel(BaseModifiedState): MODEL_UNIQUE_ID = "model.test.table_model.v1" CONTRACT_SCHEMA_YML = versioned_contract_schema_yml - MODIFIED_SCHEMA_YML = versioned_modified_contract_schema_yml - DISABLED_SCHEMA_YML = versioned_disabled_contract_schema_yml - NO_CONTRACT_SCHEMA_YML = versioned_no_contract_schema_yml + def test_delete_versioned_contracted_model(self, project): + # ensure table_model is versioned + contracted + write_file(self.CONTRACT_SCHEMA_YML, "models", "schema.yml") + self.run_and_save_state() + + # delete versioned contracted model + rm_file(project.project_root, "models", "table_model.sql") + + # since the models are versioned, they raise an error + with pytest.raises(ContractBreakingChangeError) as e: + run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) + + assert "Contracted model 'model.test.table_model.v1' was deleted or renamed." in str( + e.value + ) + + +class TestDisableUnversionedContractedModel(BaseModifiedState): + MODEL_UNIQUE_ID = "model.test.table_model" + CONTRACT_SCHEMA_YML = contract_schema_yml + DISABLED_CONTRACT_SCHEMA_YML = disabled_contract_schema_yml + + def test_disable_unversioned_contracted_model(self, project): + # ensure table_model is contracted and enabled + write_file(self.CONTRACT_SCHEMA_YML, "models", "schema.yml") + self.run_and_save_state() + + # disable unversioned + contracted model + write_file(self.DISABLED_CONTRACT_SCHEMA_YML, "models", "schema.yml") + + # since the models are unversioned, they raise a warning but not an error + _, logs = run_dbt_and_capture( + ["run", "--models", "state:modified.contract", "--state", "./state"] + ) + + expected_warning = "While comparing to previous project state, dbt detected a breaking change to an unversioned model" + expected_change = "Contracted model 'model.test.table_model' was disabled" + assert expected_warning in logs + assert expected_change in logs + + +class TestDisableVersionedContractedModel(BaseModifiedState): + MODEL_UNIQUE_ID = "model.test.table_model.v1" + CONTRACT_SCHEMA_YML = versioned_contract_schema_yml + DISABLED_CONTRACT_SCHEMA_YML = disabled_versioned_contract_schema_yml + + def test_disable_versioned_contracted_model(self, project): + # ensure table_model is versioned + contracted + write_file(self.CONTRACT_SCHEMA_YML, "models", "schema.yml") + self.run_and_save_state() + + # disable versioned + contracted model + write_file(self.DISABLED_CONTRACT_SCHEMA_YML, "models", "schema.yml") + + # since the models are versioned, they raise an error + with pytest.raises(ContractBreakingChangeError) as e: + run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) + + assert "Contracted model 'model.test.table_model.v1' was disabled." in str(e.value) + + +class TestDisableUnversionedUncontractedModel(BaseModifiedState): + MODEL_UNIQUE_ID = "model.test.table_model" + NO_CONTRACT_SCHEMA_YML = unenforced_contract_schema_yml + DISABLED_NO_CONTRACT_SCHEMA_YML = disabled_unenforced_contract_schema_yml + + def test_delete_versioned_contracted_model(self, project): + # ensure table_model is not contracted + write_file(self.NO_CONTRACT_SCHEMA_YML, "models", "schema.yml") + self.run_and_save_state() + + # disable uncontracted model + write_file(self.DISABLED_NO_CONTRACT_SCHEMA_YML, "models", "schema.yml") + + # since the models are unversioned, no warning or error is raised + _, logs = run_dbt_and_capture( + ["run", "--models", "state:modified.contract", "--state", "./state"] + ) + + assert "breaking change" not in logs.lower() + + +class TestDisableVersionedUncontractedModel(BaseModifiedState): + MODEL_UNIQUE_ID = "model.test.table_model.v1" + NO_CONTRACT_SCHEMA_YML = versioned_unenforced_contract_schema_yml + DISABLED_NO_CONTRACT_SCHEMA_YML = disabled_versioned_unenforced_contract_schema_yml + + def test_delete_versioned_contracted_model(self, project): + # ensure table_model is not contracted + write_file(self.NO_CONTRACT_SCHEMA_YML, "models", "schema.yml") + self.run_and_save_state() + + # disable uncontracted model + write_file(self.DISABLED_NO_CONTRACT_SCHEMA_YML, "models", "schema.yml") -class TestChangedConstraint(BaseModifiedState): + # since the models are unversioned, no warning or error is raised + run_dbt_and_capture(["run", "--models", "state:modified.contract", "--state", "./state"]) + + +class TestChangedConstraintUnversioned(BaseModifiedState): def test_changed_constraint(self, project): self.run_and_save_state() @@ -609,8 +823,14 @@ def test_changed_constraint(self, project): second_contract_checksum = model.contract.checksum # double check different contract_checksums assert first_contract_checksum != second_contract_checksum - with pytest.raises(ContractBreakingChangeError): - run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) + # since the models are unversioned, they raise a warning but not an error + _, logs = run_dbt_and_capture( + ["run", "--models", "state:modified.contract", "--state", "./state"] + ) + expected_warning = "While comparing to previous project state, dbt detected a breaking change to an unversioned model" + expected_change = "Enforced column level constraints were removed" + assert expected_warning in logs + assert expected_change in logs # This should raise because a model level constraint was removed (primary_key on id) write_file(modified_model_constraint_schema_yml, "models", "schema.yml") @@ -622,8 +842,13 @@ def test_changed_constraint(self, project): second_contract_checksum = model.contract.checksum # double check different contract_checksums assert first_contract_checksum != second_contract_checksum - with pytest.raises(ContractBreakingChangeError): - run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) + _, logs = run_dbt_and_capture( + ["run", "--models", "state:modified.contract", "--state", "./state"] + ) + expected_warning = "While comparing to previous project state, dbt detected a breaking change to an unversioned model" + expected_change = "Enforced model level constraints were removed" + assert expected_warning in logs + assert expected_change in logs class TestChangedMaterializationConstraint(BaseModifiedState): @@ -669,8 +894,13 @@ def test_changed_materialization(self, project): second_contract_checksum = model.contract.checksum # double check different contract_checksums assert first_contract_checksum != second_contract_checksum - with pytest.raises(ContractBreakingChangeError): - run_dbt(["run", "--models", "state:modified.contract", "--state", "./state"]) + _, logs = run_dbt_and_capture( + ["run", "--models", "state:modified.contract", "--state", "./state"] + ) + expected_warning = "While comparing to previous project state, dbt detected a breaking change to an unversioned model" + expected_change = "Materialization changed with enforced constraints" + assert expected_warning in logs + assert expected_change in logs # This should not raise because materialization changed from table to incremental, both enforce constraints write_file(table_model_now_incremental_sql, "models", "table_model.sql") @@ -709,28 +939,35 @@ def test_changed_materialization(self, project): my_model_yml = """ models: - name: my_model + latest_version: 1 config: contract: enforced: true columns: - name: id data_type: int + versions: + - v: 1 """ modified_my_model_yml = """ models: - name: my_model + latest_version: 1 config: contract: enforced: true columns: - name: id data_type: text + versions: + - v: 1 """ modified_my_model_non_breaking_yml = """ models: - name: my_model + latest_version: 1 config: contract: enforced: true @@ -739,6 +976,8 @@ def test_changed_materialization(self, project): data_type: int - name: color data_type: text + versions: + - v: 1 """ @@ -780,4 +1019,128 @@ def test_modified_body_and_contract(self, project): # The model's contract has changed, even if non-breaking, so it should be selected by 'state:modified.contract' results = run_dbt(["list", "-s", "state:modified.contract", "--state", "./state"]) - assert results == ["test.my_model"] + assert results == ["test.my_model.v1"] + + +modified_table_model_access_yml = """ +version: 2 +models: + - name: table_model + access: public +""" + + +class TestModifiedAccess(BaseModifiedState): + def test_changed_access(self, project): + self.run_and_save_state() + + # No access change + assert not run_dbt(["list", "-s", "state:modified", "--state", "./state"]) + + # Modify access (protected -> public) + write_file(modified_table_model_access_yml, "models", "schema.yml") + assert run_dbt(["list", "-s", "state:modified", "--state", "./state"]) + + results = run_dbt(["list", "-s", "state:modified", "--state", "./state"]) + assert results == ["test.table_model"] + + +modified_table_model_access_yml = """ +version: 2 +models: + - name: table_model + deprecation_date: 2020-01-01 +""" + + +class TestModifiedDeprecationDate(BaseModifiedState): + def test_changed_access(self, project): + self.run_and_save_state() + + # No access change + assert not run_dbt(["list", "-s", "state:modified", "--state", "./state"]) + + # Modify deprecation_date (None -> 2020-01-01) + write_file(modified_table_model_access_yml, "models", "schema.yml") + assert run_dbt(["list", "-s", "state:modified", "--state", "./state"]) + + results = run_dbt(["list", "-s", "state:modified", "--state", "./state"]) + assert results == ["test.table_model"] + + +modified_table_model_version_yml = """ +version: 2 +models: + - name: table_model + versions: + - v: 1 + defined_in: table_model +""" + + +class TestModifiedVersion(BaseModifiedState): + def test_changed_access(self, project): + self.run_and_save_state() + + # Change version (null -> v1) + write_file(modified_table_model_version_yml, "models", "schema.yml") + + results = run_dbt(["list", "-s", "state:modified", "--state", "./state"]) + assert results == ["test.table_model.v1"] + + +table_model_latest_version_yml = """ +version: 2 +models: + - name: table_model + latest_version: 1 + versions: + - v: 1 + defined_in: table_model +""" + + +modified_table_model_latest_version_yml = """ +version: 2 +models: + - name: table_model + latest_version: 2 + versions: + - v: 1 + defined_in: table_model + - v: 2 +""" + + +class TestModifiedLatestVersion(BaseModifiedState): + def test_changed_access(self, project): + # Setup initial latest_version: 1 + write_file(table_model_latest_version_yml, "models", "schema.yml") + + self.run_and_save_state() + + # Bump latest version + write_file(table_model_sql, "models", "table_model_v2.sql") + write_file(modified_table_model_latest_version_yml, "models", "schema.yml") + + results = run_dbt(["list", "-s", "state:modified", "--state", "./state"]) + assert results == ["test.table_model.v1", "test.table_model.v2"] + + +class TestChangedSemanticModelContents(BaseModifiedState): + @pytest.fixture(scope="class") + def models(self): + return { + "view_model.sql": view_model_sql, + "schema.yml": semantic_model_schema_yml, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + } + + def test_changed_semantic_model_contents(self, project): + self.run_and_save_state() + results = run_dbt(["list", "-s", "state:modified", "--state", "./state"]) + assert len(results) == 0 + + write_file(modified_semantic_model_schema_yml, "models", "schema.yml") + results = run_dbt(["list", "-s", "state:modified", "--state", "./state"]) + assert len(results) == 1 diff --git a/tests/functional/defer_state/test_run_results_state.py b/tests/functional/defer_state/test_run_results_state.py index 69dc77a1dd3..e4b467d8e37 100644 --- a/tests/functional/defer_state/test_run_results_state.py +++ b/tests/functional/defer_state/test_run_results_state.py @@ -4,16 +4,15 @@ import pytest from dbt.tests.util import run_dbt, write_file - from tests.functional.defer_state.fixtures import ( - seed_csv, - table_model_sql, - view_model_sql, ephemeral_model_sql, - schema_yml, exposures_yml, - macros_sql, infinite_macros_sql, + macros_sql, + schema_yml, + seed_csv, + table_model_sql, + view_model_sql, ) diff --git a/tests/functional/dependencies/test_dependency_options.py b/tests/functional/dependencies/test_dependency_options.py new file mode 100644 index 00000000000..66fcc96109a --- /dev/null +++ b/tests/functional/dependencies/test_dependency_options.py @@ -0,0 +1,102 @@ +import os +import shutil + +import pytest + +from dbt.tests.util import run_dbt + + +class TestDepsOptions(object): + # this revision of dbt-integration-project requires dbt-utils.git@0.5.0, which the + # package config handling should detect + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "package": "fivetran/fivetran_utils", + "version": "0.4.7", + }, + ] + } + + @pytest.fixture + def clean_start(self, project): + if os.path.exists("dbt_packages"): + shutil.rmtree("dbt_packages") + if os.path.exists("package-lock.yml"): + os.remove("package-lock.yml") + + def test_deps_lock(self, clean_start): + run_dbt(["deps", "--lock"]) + assert not os.path.exists("dbt_packages") + assert os.path.exists("package-lock.yml") + with open("package-lock.yml") as fp: + contents = fp.read() + + fivetran_package = "- package: fivetran/fivetran_utils\n version: 0.4.7" + # dbt-utils is a dep in fivetran so we can't check for a specific version or this test fails everytime a new dbt-utils version comes out + dbt_labs_package = "- package: dbt-labs/dbt_utils" + package_sha = "sha1_hash: 71304bca2138cf8004070b3573a1e17183c0c1a8" + assert fivetran_package in contents + assert dbt_labs_package in contents + assert package_sha in contents + + def test_deps_default(self, clean_start): + run_dbt(["deps"]) + assert len(os.listdir("dbt_packages")) == 2 + assert os.path.exists("package-lock.yml") + with open("package-lock.yml") as fp: + contents = fp.read() + fivetran_package = "- package: fivetran/fivetran_utils\n version: 0.4.7" + # dbt-utils is a dep in fivetran so we can't check for a specific version or this test fails everytime a new dbt-utils version comes out + dbt_labs_package = "- package: dbt-labs/dbt_utils" + package_sha = "sha1_hash: 71304bca2138cf8004070b3573a1e17183c0c1a8" + assert fivetran_package in contents + assert dbt_labs_package in contents + assert package_sha in contents + + def test_deps_add(self, clean_start): + run_dbt(["deps", "--add-package", "dbt-labs/audit_helper@0.9.0"]) + with open("packages.yml") as fp: + contents = fp.read() + assert ( + contents + == """packages: + - package: fivetran/fivetran_utils + version: 0.4.7 + - package: dbt-labs/audit_helper + version: 0.9.0 +""" + ) + assert len(os.listdir("dbt_packages")) == 3 + + def test_deps_add_without_install(self, clean_start): + os.rename("packages.yml", "dependencies.yml") + run_dbt( + [ + "deps", + "--add-package", + "dbt-labs/audit_helper@0.9.0", + "--lock", + ] + ) + assert not os.path.exists("dbt_packages") + assert not os.path.exists("packages.yml") + with open("dependencies.yml") as fp: + contents = fp.read() + assert ( + contents + == """packages: + - package: fivetran/fivetran_utils + version: 0.4.7 + - package: dbt-labs/audit_helper + version: 0.9.0 +""" + ) + + def test_deps_upgrade(self, clean_start, mocker): + run_dbt(["deps", "--lock"]) + patched_lock = mocker.patch("dbt.task.deps.DepsTask.lock") + run_dbt(["deps", "--upgrade"]) + assert patched_lock.call_count == 1 diff --git a/tests/functional/dependencies/test_dependency_secrets.py b/tests/functional/dependencies/test_dependency_secrets.py new file mode 100644 index 00000000000..02dc4f0f8f1 --- /dev/null +++ b/tests/functional/dependencies/test_dependency_secrets.py @@ -0,0 +1,32 @@ +import os + +import pytest + +from dbt.tests.util import run_dbt_and_capture +from dbt_common.constants import SECRET_ENV_PREFIX + + +class TestSecretInPackage: + @pytest.fixture(scope="class", autouse=True) + def setUp(self): + os.environ[SECRET_ENV_PREFIX + "_FOR_LOGGING"] = "super secret" + yield + del os.environ[SECRET_ENV_PREFIX + "_FOR_LOGGING"] + + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "package": "dbt-labs/dbt_utils{{ log(env_var('DBT_ENV_SECRET_FOR_LOGGING'), info = true) }}", + "version": "1.0.0", + } + ] + } + + def test_mask_secrets(self, project): + _, log_output = run_dbt_and_capture(["deps"]) + # this will not be written to logs + assert not ("super secret" in log_output) + assert "*****" in log_output + assert not ("DBT_ENV_SECRET_FOR_LOGGING" in log_output) diff --git a/tests/functional/dependencies/test_local_dependency.py b/tests/functional/dependencies/test_local_dependency.py index c7a9f01cc0a..d26345f2da6 100644 --- a/tests/functional/dependencies/test_local_dependency.py +++ b/tests/functional/dependencies/test_local_dependency.py @@ -1,20 +1,27 @@ -import os -import pytest import json +import os import shutil -import yaml - -# todo: make self.unique_schema to fixture - from pathlib import Path from unittest import mock -from contextlib import contextmanager -import dbt.semver +import pytest +import yaml + import dbt.config import dbt.exceptions +import dbt_common.exceptions +import dbt_common.semver as semver +from dbt import deprecations +from dbt.tests.util import ( + check_relations_equal, + run_dbt, + run_dbt_and_capture, + write_file, +) +from tests.functional.utils import up_one + +# todo: make self.unique_schema to fixture -from dbt.tests.util import check_relations_equal, run_dbt, run_dbt_and_capture models__dep_source = """ {# If our dependency source didn't exist, this would be an errror #} @@ -83,16 +90,6 @@ """ -@contextmanager -def up_one(): - current_path = Path.cwd() - os.chdir("../") - try: - yield - finally: - os.chdir(current_path) - - class BaseDependencyTest(object): @pytest.fixture(scope="class") def macros(self): @@ -207,7 +204,7 @@ def models(self): def test_missing_dependency(self, project): # dbt should raise a runtime exception - with pytest.raises(dbt.exceptions.DbtRuntimeError): + with pytest.raises(dbt_common.exceptions.DbtRuntimeError): run_dbt(["compile"]) @@ -229,7 +226,7 @@ def project_config(self): @mock.patch("dbt.config.project.get_installed_version") def test_local_dependency_out_of_date(self, mock_get, project): - mock_get.return_value = dbt.semver.VersionSpecifier.from_version_string("0.0.1") + mock_get.return_value = semver.VersionSpecifier.from_version_string("0.0.1") run_dbt(["deps"] + self.dbt_vargs(project.test_schema)) # check seed with pytest.raises(dbt.exceptions.DbtProjectError) as exc: @@ -242,7 +239,7 @@ def test_local_dependency_out_of_date(self, mock_get, project): @mock.patch("dbt.config.project.get_installed_version") def test_local_dependency_out_of_date_no_check(self, mock_get): - mock_get.return_value = dbt.semver.VersionSpecifier.from_version_string("0.0.1") + mock_get.return_value = semver.VersionSpecifier.from_version_string("0.0.1") run_dbt(["deps"]) run_dbt(["seed", "--no-version-check"]) results = run_dbt(["run", "--no-version-check"]) @@ -253,6 +250,10 @@ class TestSimpleDependencyNoVersionCheckConfig(BaseDependencyTest): @pytest.fixture(scope="class") def project_config_update(self): return { + "flags": { + "send_anonymous_usage_stats": False, + "version_check": False, + }, "models": { "schema": "dbt_test", }, @@ -261,15 +262,6 @@ def project_config_update(self): }, } - @pytest.fixture(scope="class") - def profiles_config_update(self): - return { - "config": { - "send_anonymous_usage_stats": False, - "version_check": False, - } - } - @pytest.fixture(scope="class") def macros(self): return {"macro.sql": macros__macro_override_schema_sql} @@ -284,7 +276,7 @@ def test_local_dependency_out_of_date_no_check(self, mock_get, project): } ) - mock_get.return_value = dbt.semver.VersionSpecifier.from_version_string("0.0.1") + mock_get.return_value = semver.VersionSpecifier.from_version_string("0.0.1") run_dbt(["deps", "--vars", vars_arg]) run_dbt(["seed", "--vars", vars_arg]) results = run_dbt(["run", "--vars", vars_arg]) @@ -362,8 +354,40 @@ def test_local_dependency_same_name(self, prepare_dependencies, project): def test_local_dependency_same_name_sneaky(self, prepare_dependencies, project): shutil.copytree("duplicate_dependency", "./dbt_packages/duplicate_dependency") - with pytest.raises(dbt.exceptions.CompilationError): + with pytest.raises(dbt_common.exceptions.CompilationError): run_dbt(["compile"]) # needed to avoid compilation errors from duplicate package names in test autocleanup run_dbt(["clean"]) + + +source_with_tests = """ +sources: + - name: my_source + schema: invalid_schema + tables: + - name: my_table + - name: seed_source + schema: "{{ var('schema_override', target.schema) }}" + tables: + - name: "seed" + identifier: "seed_subpackage_generate_alias_name" + columns: + - name: id + tests: + - unique + - not_null +""" + + +class TestDependencyTestsConfig(BaseDependencyTest): + def test_dependency_tests_config(self, project): + run_dbt(["deps"]) + # Write a file to local_dependency with a "tests" config + write_file( + source_with_tests, project.project_root, "local_dependency", "models", "schema.yml" + ) + run_dbt(["parse"]) + # Check that project-test-config is NOT in active deprecations, since "tests" is only + # in a dependent project. + assert "project-test-config" not in deprecations.active_deprecations diff --git a/tests/functional/dependencies/test_simple_dependency.py b/tests/functional/dependencies/test_simple_dependency.py index 18003fa7ee1..e63a648e0be 100644 --- a/tests/functional/dependencies/test_simple_dependency.py +++ b/tests/functional/dependencies/test_simple_dependency.py @@ -1,14 +1,11 @@ import os -import pytest import tempfile - from pathlib import Path -from dbt.tests.util import ( - check_relations_equal, - run_dbt, -) +import pytest +from dbt.exceptions import DbtProjectError +from dbt.tests.util import check_relations_equal, run_dbt, write_config_file models__disabled_one = """ {{config(enabled=False)}} @@ -123,6 +120,7 @@ def dependencies(self): def test_dependency_with_dependencies_file(self, run_deps, project): # Tests that "packages" defined in a dependencies.yml file works + run_dbt(["deps"]) results = run_dbt() assert len(results) == 4 @@ -215,6 +213,43 @@ def test_simple_dependency_deps(self, project): run_dbt(["deps"]) +class TestSimpleDependencyWithSubdirs(object): + # dbt should convert these into a single dependency internally + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "git": "https://github.com/dbt-labs/dbt-multipe-packages.git", + "subdirectory": "dbt-utils-main", + "revision": "v0.1.0", + }, + { + "git": "https://github.com/dbt-labs/dbt-multipe-packages.git", + "subdirectory": "dbt-date-main", + "revision": "v0.1.0", + }, + ] + } + + def test_git_with_multiple_subdir(self, project): + run_dbt(["deps"]) + assert os.path.exists("package-lock.yml") + expected = """packages: + - git: https://github.com/dbt-labs/dbt-multipe-packages.git + revision: 53782f3ede8fdf307ee1d8e418aa65733a4b72fa + subdirectory: dbt-utils-main + - git: https://github.com/dbt-labs/dbt-multipe-packages.git + revision: 53782f3ede8fdf307ee1d8e418aa65733a4b72fa + subdirectory: dbt-date-main +sha1_hash: b9c8042f29446c55a33f9f211737f445a640c7a1 +""" + with open("package-lock.yml") as fp: + contents = fp.read() + assert contents == expected + assert len(os.listdir("dbt_packages")) == 2 + + class TestRekeyedDependencyWithSubduplicates(object): # this revision of dbt-integration-project requires dbt-utils.git@0.5.0, which the # package config handling should detect @@ -238,6 +273,25 @@ def test_simple_dependency_deps(self, project): assert len(os.listdir("dbt_packages")) == 2 +class TestTarballNestedDependencies(object): + # this version of dbt_expectations has a dependency on dbt_date, which the + # package config handling should detect + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "tarball": "https://github.com/calogica/dbt-expectations/archive/refs/tags/0.9.0.tar.gz", + "name": "dbt_expectations", + }, + ] + } + + def test_simple_dependency_deps(self, project): + run_dbt(["deps"]) + assert set(os.listdir("dbt_packages")) == set(["dbt_expectations", "dbt_date"]) + + class DependencyBranchBase(object): @pytest.fixture(scope="class", autouse=True) def setUp(self, project): @@ -336,3 +390,83 @@ def test_deps_bad_profile(self, project): del os.environ["PROFILE_TEST_HOST"] run_dbt(["deps"]) run_dbt(["clean"]) + + +class TestSimpleDependcyTarball(object): + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "tarball": "https://codeload.github.com/dbt-labs/dbt-utils/tar.gz/0.9.6", + "name": "dbt_utils", + } + ] + } + + def test_deps_simple_tarball_doesnt_error_out(self, project): + run_dbt(["deps"]) + assert len(os.listdir("dbt_packages")) == 1 + + +class TestBadTarballDependency(object): + def test_malformed_tarball_package_causes_exception(self, project): + # We have to specify the bad formatted package here because if we do it + # in a `packages` fixture, the test will blow up in the setup phase, meaning + # we can't appropriately catch it with a `pytest.raises` + bad_tarball_package_spec = { + "packages": [ + { + "tarball": "https://codeload.github.com/dbt-labs/dbt-utils/tar.gz/0.9.6", + "version": "dbt_utils", + } + ] + } + write_config_file(bad_tarball_package_spec, "packages.yml") + + with pytest.raises( + DbtProjectError, match=r"The packages.yml file in this project is malformed" + ) as e: + run_dbt(["deps"]) + assert e is not None + + +class TestEmptyDependency: + def test_empty_package(self, project): + # We have to specify the bad formatted package here because if we do it + # in a `packages` fixture, the test will blow up in the setup phase, meaning + # we can't appropriately catch it with a `pytest.raises` + empty_hub_package = { + "packages": [ + { + "package": "", + "version": "1.0.0", + } + ] + } + write_config_file(empty_hub_package, "packages.yml") + with pytest.raises(DbtProjectError, match="A hub package is missing the value"): + run_dbt(["deps"]) + + empty_git_package = { + "packages": [ + { + "git": "", + "revision": "1.0.0", + } + ] + } + write_config_file(empty_git_package, "packages.yml") + with pytest.raises(DbtProjectError, match="A git package is missing the value"): + run_dbt(["deps"]) + + empty_local_package = { + "packages": [ + { + "local": "", + } + ] + } + write_config_file(empty_local_package, "packages.yml") + with pytest.raises(DbtProjectError, match="A local package is missing the value"): + run_dbt(["deps"]) diff --git a/tests/functional/dependencies/test_simple_dependency_with_configs.py b/tests/functional/dependencies/test_simple_dependency_with_configs.py index 86ab911a2b1..40642d4e81a 100644 --- a/tests/functional/dependencies/test_simple_dependency_with_configs.py +++ b/tests/functional/dependencies/test_simple_dependency_with_configs.py @@ -1,12 +1,8 @@ -import pytest - from pathlib import Path -from dbt.tests.util import ( - check_relations_equal, - run_dbt, -) +import pytest +from dbt.tests.util import check_relations_equal, run_dbt models__view_summary = """ {{ diff --git a/tests/functional/deprecations/fixtures.py b/tests/functional/deprecations/fixtures.py new file mode 100644 index 00000000000..d9ae97603db --- /dev/null +++ b/tests/functional/deprecations/fixtures.py @@ -0,0 +1,123 @@ +models__already_exists_sql = """ +select 1 as id + +{% if adapter.already_exists(this.schema, this.identifier) and not should_full_refresh() %} + where id > (select max(id) from {{this}}) +{% endif %} +""" + +models_trivial__model_sql = """ +select 1 as id +""" + +macros__custom_test_sql = """ +{% test custom(model) %} + select * from {{ model }} + limit 0 +{% endtest %} +""" + + +bad_name_yaml = """ +version: 2 + +exposures: + - name: simple exposure spaced!! + type: dashboard + depends_on: + - ref('model') + owner: + email: something@example.com +""" + +# deprecated test config fixtures +data_tests_yaml = """ +models: + - name: model + columns: + - name: id + data_tests: + - not_null +""" + +test_type_mixed_yaml = """ +models: + - name: model + columns: + - name: id + data_tests: + - not_null + tests: + - unique +""" + +old_tests_yml = """ +models: + - name: model + tests: + - custom + columns: + - name: id + tests: + - not_null + + - name: versioned_model + tests: + - custom + versions: + - v: 1 + tests: + columns: + - name: id + tests: + - not_null +""" + +sources_old_tests_yaml = """ +sources: + - name: seed_source + schema: "{{ var('schema_override', target.schema) }}" + tables: + - name: "seed" + tests: + - custom + columns: + - name: id + tests: + - unique +""" + +seed_csv = """id,name +1,Mary +2,Sam +3,John +""" + + +local_dependency__dbt_project_yml = """ + +name: 'local_dep' +version: '1.0' + +seeds: + quote_columns: False + +""" + +local_dependency__schema_yml = """ +sources: + - name: seed_source + schema: "{{ var('schema_override', target.schema) }}" + tables: + - name: "seed" + columns: + - name: id + tests: + - unique +""" + +local_dependency__seed_csv = """id,name +1,Mary +2,Sam +3,John +""" diff --git a/tests/functional/deprecations/model_deprecations.py b/tests/functional/deprecations/model_deprecations.py index 94a67c3bfe2..03e38b1220e 100644 --- a/tests/functional/deprecations/model_deprecations.py +++ b/tests/functional/deprecations/model_deprecations.py @@ -1,10 +1,9 @@ import pytest -from dbt.exceptions import EventCompilationError from dbt.cli.main import dbtRunner +from dbt.exceptions import EventCompilationError from dbt.tests.util import run_dbt - deprecated_model__yml = """ version: 2 @@ -53,7 +52,7 @@ def test_deprecation_warning_error_options(self, project): run_dbt(["--warn-error-options", '{"include": ["DeprecatedModel"]}', "parse"]) -class TestReferenceDeprecatingWarning: +class TestUpcomingReferenceDeprecatingWarning: @pytest.fixture(scope="class") def models(self): return { @@ -81,7 +80,7 @@ def test_deprecation_warning_error_options(self, project): ) -class TestReferenceDeprecatedWarning: +class TestDeprecatedReferenceWarning: @pytest.fixture(scope="class") def models(self): return { diff --git a/tests/functional/deprecations/test_config_deprecations.py b/tests/functional/deprecations/test_config_deprecations.py new file mode 100644 index 00000000000..077dd7da103 --- /dev/null +++ b/tests/functional/deprecations/test_config_deprecations.py @@ -0,0 +1,165 @@ +import pytest + +from dbt import deprecations +from dbt.exceptions import CompilationError, ProjectContractError, YamlParseDictError +from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import run_dbt, update_config_file +from tests.functional.deprecations.fixtures import ( + data_tests_yaml, + local_dependency__dbt_project_yml, + local_dependency__schema_yml, + local_dependency__seed_csv, + macros__custom_test_sql, + models_trivial__model_sql, + old_tests_yml, + seed_csv, + sources_old_tests_yaml, + test_type_mixed_yaml, +) + + +# test deprecation messages +class TestTestsConfigDeprecation: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": models_trivial__model_sql} + + @pytest.fixture(scope="class") + def project_config_update(self, unique_schema): + return {"tests": {"enabled": "true"}} + + def test_project_tests_config(self, project): + deprecations.reset_deprecations() + assert deprecations.active_deprecations == set() + run_dbt(["parse"]) + expected = set() + assert expected == deprecations.active_deprecations + + def test_project_tests_config_fail(self, project): + deprecations.reset_deprecations() + assert deprecations.active_deprecations == set() + with pytest.raises(CompilationError) as exc: + run_dbt(["--warn-error", "--no-partial-parse", "parse"]) + exc_str = " ".join(str(exc.value).split()) # flatten all whitespace + expected_msg = "Configuration paths exist in your dbt_project.yml file which do not apply to any resources. There are 1 unused configuration paths: - data_tests" + assert expected_msg in exc_str + + +class TestSchemaTestDeprecation: + @pytest.fixture(scope="class") + def models(self): + return { + "model.sql": models_trivial__model_sql, + "versioned_model.sql": models_trivial__model_sql, + "schema.yml": old_tests_yml, + } + + @pytest.fixture(scope="class") + def macros(self): + return {"custom.sql": macros__custom_test_sql} + + def test_generic_tests_config(self, project): + deprecations.reset_deprecations() + assert deprecations.active_deprecations == set() + run_dbt(["parse"]) + expected = set() + assert expected == deprecations.active_deprecations + + def test_generic_tests_fail(self, project): + deprecations.reset_deprecations() + assert deprecations.active_deprecations == set() + run_dbt(["--warn-error", "--no-partial-parse", "parse"]) + + def test_generic_data_test_parsing(self, project): + results = run_dbt(["list", "--resource-type", "test"]) + assert len(results) == 4 + + +class TestSourceSchemaTestDeprecation: + @pytest.fixture(scope="class") + def models(self): + return {"schema.yml": sources_old_tests_yaml} + + @pytest.fixture(scope="class") + def macros(self): + return {"custom.sql": macros__custom_test_sql} + + @pytest.fixture(scope="class") + def seeds(self): + return { + "seed.csv": seed_csv, + } + + def test_source_tests_config(self, project): + deprecations.reset_deprecations() + assert deprecations.active_deprecations == set() + run_dbt(["parse"]) + expected = set() + assert expected == deprecations.active_deprecations + + def test_generic_data_tests(self, project): + run_dbt(["seed"]) + results = run_dbt(["test"]) + assert len(results) == 2 + + +# test for failure with test and data_tests in the same file +class TestBothSchemaTestDeprecation: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": models_trivial__model_sql, "schema.yml": test_type_mixed_yaml} + + def test_schema(self, project): + expected_msg = "Invalid test config: cannot have both 'tests' and 'data_tests' defined" + with pytest.raises(YamlParseDictError) as excinfo: + run_dbt(["parse"]) + assert expected_msg in str(excinfo.value) + + +# test for failure with test and data_tests in the same dbt_project.yml +class TestBothProjectTestDeprecation: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": models_trivial__model_sql} + + def test_tests_config(self, project): + config_patch = {"tests": {"+enabled": "true"}, "data_tests": {"+tags": "super"}} + update_config_file(config_patch, project.project_root, "dbt_project.yml") + + expected_msg = "Invalid project config: cannot have both 'tests' and 'data_tests' defined" + with pytest.raises(ProjectContractError) as excinfo: + run_dbt(["parse"]) + assert expected_msg in str(excinfo.value) + + +# test a local dependency can have tests while the rest of the project uses data_tests +class TestTestConfigInDependency: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root): + local_dependency_files = { + "dbt_project.yml": local_dependency__dbt_project_yml, + "models": { + "schema.yml": local_dependency__schema_yml, + }, + "seeds": {"seed.csv": local_dependency__seed_csv}, + } + write_project_files(project_root, "local_dependency", local_dependency_files) + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "local_dependency"}]} + + @pytest.fixture(scope="class") + def models(self): + return { + "model.sql": models_trivial__model_sql, + "schema.yml": data_tests_yaml, + } + + def test_test_dep(self, project): + run_dbt(["deps"]) + run_dbt(["seed"]) + run_dbt(["run"]) + results = run_dbt(["test"]) + # 1 data_test in the dep and 1 in the project + assert len(results) == 2 diff --git a/tests/functional/deprecations/test_deprecations.py b/tests/functional/deprecations/test_deprecations.py index 6c2678433b0..95779338d8a 100644 --- a/tests/functional/deprecations/test_deprecations.py +++ b/tests/functional/deprecations/test_deprecations.py @@ -1,34 +1,14 @@ import pytest +import yaml +import dbt_common from dbt import deprecations -import dbt.exceptions -from dbt.tests.util import run_dbt - - -models__already_exists_sql = """ -select 1 as id - -{% if adapter.already_exists(this.schema, this.identifier) and not should_full_refresh() %} - where id > (select max(id) from {{this}}) -{% endif %} -""" - -models_trivial__model_sql = """ -select 1 as id -""" - - -bad_name_yaml = """ -version: 2 - -exposures: - - name: simple exposure spaced!! - type: dashboard - depends_on: - - ref('model') - owner: - email: something@example.com -""" +from dbt.tests.util import run_dbt, run_dbt_and_capture, write_file +from dbt_common.exceptions import EventCompilationError +from tests.functional.deprecations.fixtures import ( + bad_name_yaml, + models_trivial__model_sql, +) class TestConfigPathDeprecation: @@ -59,31 +39,13 @@ def test_data_path(self, project): def test_data_path_fail(self, project): deprecations.reset_deprecations() assert deprecations.active_deprecations == set() - with pytest.raises(dbt.exceptions.CompilationError) as exc: + with pytest.raises(dbt_common.exceptions.CompilationError) as exc: run_dbt(["--warn-error", "debug"]) exc_str = " ".join(str(exc.value).split()) # flatten all whitespace expected_msg = "The `data-paths` config has been renamed" assert expected_msg in exc_str -class TestAdapterDeprecations: - @pytest.fixture(scope="class") - def models(self): - return {"already_exists.sql": models__already_exists_sql} - - def test_adapter(self, project): - deprecations.reset_deprecations() - assert deprecations.active_deprecations == set() - run_dbt(["run"]) - expected = {"adapter:already_exists"} - assert expected == deprecations.active_deprecations - - def test_adapter_fail(self, project): - deprecations.reset_deprecations() - assert deprecations.active_deprecations == set() - run_dbt(["--warn-error", "run"], expect_pass=False) - - class TestPackageInstallPathDeprecation: @pytest.fixture(scope="class") def models_trivial(self): @@ -103,7 +65,7 @@ def test_package_path(self, project): def test_package_path_not_set(self, project): deprecations.reset_deprecations() assert deprecations.active_deprecations == set() - with pytest.raises(dbt.exceptions.CompilationError) as exc: + with pytest.raises(dbt_common.exceptions.CompilationError) as exc: run_dbt(["--warn-error", "clean"]) exc_str = " ".join(str(exc.value).split()) # flatten all whitespace expected_msg = "path has changed from `dbt_modules` to `dbt_packages`." @@ -130,7 +92,7 @@ def test_package_redirect(self, project): def test_package_redirect_fail(self, project): deprecations.reset_deprecations() assert deprecations.active_deprecations == set() - with pytest.raises(dbt.exceptions.CompilationError) as exc: + with pytest.raises(dbt_common.exceptions.CompilationError) as exc: run_dbt(["--warn-error", "deps"]) exc_str = " ".join(str(exc.value).split()) # flatten all whitespace expected_msg = "The `fishtown-analytics/dbt_utils` package is deprecated in favor of `dbt-labs/dbt_utils`" @@ -152,8 +114,75 @@ def test_exposure_name(self, project): def test_exposure_name_fail(self, project): deprecations.reset_deprecations() assert deprecations.active_deprecations == set() - with pytest.raises(dbt.exceptions.CompilationError) as exc: + with pytest.raises(dbt_common.exceptions.CompilationError) as exc: run_dbt(["--warn-error", "--no-partial-parse", "parse"]) exc_str = " ".join(str(exc.value).split()) # flatten all whitespace expected_msg = "Starting in v1.3, the 'name' of an exposure should contain only letters, numbers, and underscores." assert expected_msg in exc_str + + +class TestProjectFlagsMovedDeprecation: + @pytest.fixture(scope="class") + def profiles_config_update(self): + return { + "config": {"send_anonymous_usage_stats": False}, + } + + @pytest.fixture(scope="class") + def dbt_project_yml(self, project_root, project_config_update): + project_config = { + "name": "test", + "profile": "test", + } + write_file(yaml.safe_dump(project_config), project_root, "dbt_project.yml") + return project_config + + @pytest.fixture(scope="class") + def models(self): + return {"my_model.sql": "select 1 as fun"} + + def test_profile_config_deprecation(self, project): + deprecations.reset_deprecations() + assert deprecations.active_deprecations == set() + + _, logs = run_dbt_and_capture(["parse"]) + + assert ( + "User config should be moved from the 'config' key in profiles.yml to the 'flags' key in dbt_project.yml." + in logs + ) + assert deprecations.active_deprecations == {"project-flags-moved"} + + +class TestProjectFlagsMovedDeprecationQuiet(TestProjectFlagsMovedDeprecation): + def test_profile_config_deprecation(self, project): + deprecations.reset_deprecations() + assert deprecations.active_deprecations == set() + + _, logs = run_dbt_and_capture(["--quiet", "parse"]) + + assert ( + "User config should be moved from the 'config' key in profiles.yml to the 'flags' key in dbt_project.yml." + not in logs + ) + assert deprecations.active_deprecations == {"project-flags-moved"} + + +class TestProjectFlagsMovedDeprecationWarnErrorOptions(TestProjectFlagsMovedDeprecation): + def test_profile_config_deprecation(self, project): + deprecations.reset_deprecations() + with pytest.raises(EventCompilationError): + run_dbt(["--warn-error-options", "{'include': 'all'}", "parse"]) + + with pytest.raises(EventCompilationError): + run_dbt( + ["--warn-error-options", "{'include': ['ProjectFlagsMovedDeprecation']}", "parse"] + ) + + _, logs = run_dbt_and_capture( + ["--warn-error-options", "{'silence': ['ProjectFlagsMovedDeprecation']}", "parse"] + ) + assert ( + "User config should be moved from the 'config' key in profiles.yml to the 'flags' key in dbt_project.yml." + not in logs + ) diff --git a/tests/functional/docs/test_duplicate_docs_block.py b/tests/functional/docs/test_duplicate_docs_block.py index 2ff9459e4b3..95262bcb734 100644 --- a/tests/functional/docs/test_duplicate_docs_block.py +++ b/tests/functional/docs/test_duplicate_docs_block.py @@ -1,8 +1,7 @@ import pytest +import dbt_common.exceptions from dbt.tests.util import run_dbt -import dbt.exceptions - duplicate_doc_blocks_model_sql = "select 1 as id, 'joe' as first_name" @@ -31,5 +30,5 @@ def models(self): } def test_duplicate_doc_ref(self, project): - with pytest.raises(dbt.exceptions.CompilationError): + with pytest.raises(dbt_common.exceptions.CompilationError): run_dbt(expect_pass=False) diff --git a/tests/functional/docs/test_generate.py b/tests/functional/docs/test_generate.py index d28a084ee59..8e5873e602d 100644 --- a/tests/functional/docs/test_generate.py +++ b/tests/functional/docs/test_generate.py @@ -1,14 +1,52 @@ +from unittest import mock + import pytest -from dbt.tests.util import run_dbt, get_manifest -import json +from dbt.plugins.manifest import ModelNodeArgs, PluginNodes +from dbt.tests.util import get_manifest, run_dbt + +sample_seed = """sample_num,sample_bool +1,true +2,false +3,true +""" + +second_seed = """sample_num,sample_bool +4,true +5,false +6,true +""" +sample_config = """ +sources: + - name: my_source_schema + schema: "{{ target.schema }}" + tables: + - name: sample_source + - name: second_source + - name: non_existent_source + - name: source_from_seed +""" -class TestGenerate: + +class TestBaseGenerate: @pytest.fixture(scope="class") def models(self): - return {"my_model.sql": "select 1 as fun"} + return { + "my_model.sql": "select 1 as fun", + "alt_model.sql": "select 1 as notfun", + "sample_config.yml": sample_config, + } + + @pytest.fixture(scope="class") + def seeds(self): + return { + "sample_seed.csv": sample_seed, + "second_seed.csv": sample_seed, + } + +class TestGenerateManifestNotCompiled(TestBaseGenerate): def test_manifest_not_compiled(self, project): run_dbt(["docs", "generate", "--no-compile"]) # manifest.json is written out in parsing now, but it @@ -18,10 +56,176 @@ def test_manifest_not_compiled(self, project): assert model_id in manifest.nodes assert manifest.nodes[model_id].compiled is False + +class TestGenerateEmptyCatalog(TestBaseGenerate): def test_generate_empty_catalog(self, project): - run_dbt(["docs", "generate", "--empty-catalog"]) - with open("./target/catalog.json") as file: - catalog = json.load(file) - assert catalog["nodes"] == {}, "nodes should be empty" - assert catalog["sources"] == {}, "sources should be empty" - assert catalog["errors"] is None, "errors should be null" + catalog = run_dbt(["docs", "generate", "--empty-catalog"]) + assert catalog.nodes == {}, "nodes should be empty" + assert catalog.sources == {}, "sources should be empty" + assert catalog.errors is None, "errors should be null" + + +class TestGenerateSelectLimitsCatalog(TestBaseGenerate): + def test_select_limits_catalog(self, project): + run_dbt(["run"]) + catalog = run_dbt(["docs", "generate", "--select", "my_model"]) + assert len(catalog.nodes) == 1 + assert "model.test.my_model" in catalog.nodes + + +class TestGenerateSelectLimitsNoMatch(TestBaseGenerate): + def test_select_limits_no_match(self, project): + run_dbt(["run"]) + catalog = run_dbt(["docs", "generate", "--select", "my_missing_model"]) + assert len(catalog.nodes) == 0 + assert len(catalog.sources) == 0 + + +class TestGenerateCatalogWithSources(TestBaseGenerate): + def test_catalog_with_sources(self, project): + # populate sources other than non_existent_source + project.run_sql("create table {}.sample_source (id int)".format(project.test_schema)) + project.run_sql("create table {}.second_source (id int)".format(project.test_schema)) + + # build nodes + run_dbt(["build"]) + + catalog = run_dbt(["docs", "generate"]) + + # 2 seeds + 2 models + assert len(catalog.nodes) == 4 + # 2 sources (only ones that exist) + assert len(catalog.sources) == 2 + + +class TestGenerateCatalogWithExternalNodes(TestBaseGenerate): + @mock.patch("dbt.plugins.get_plugin_manager") + def test_catalog_with_external_node(self, get_plugin_manager, project): + project.run_sql("create table {}.external_model (id int)".format(project.test_schema)) + + run_dbt(["build"]) + + external_nodes = PluginNodes() + external_model_node = ModelNodeArgs( + name="external_model", + package_name="external_package", + identifier="external_model", + schema=project.test_schema, + database="dbt", + ) + external_nodes.add_model(external_model_node) + get_plugin_manager.return_value.get_nodes.return_value = external_nodes + catalog = run_dbt(["docs", "generate"]) + + assert "model.external_package.external_model" in catalog.nodes + + +class TestGenerateSelectSource(TestBaseGenerate): + @pytest.fixture(scope="class") + def seeds(self): + return { + "sample_seed.csv": sample_seed, + "second_seed.csv": sample_seed, + "source_from_seed.csv": sample_seed, + } + + def test_select_source(self, project): + run_dbt(["build"]) + + project.run_sql("create table {}.sample_source (id int)".format(project.test_schema)) + project.run_sql("create table {}.second_source (id int)".format(project.test_schema)) + + # 2 existing sources, 1 selected + catalog = run_dbt( + ["docs", "generate", "--select", "source:test.my_source_schema.sample_source"] + ) + assert len(catalog.sources) == 1 + assert "source.test.my_source_schema.sample_source" in catalog.sources + # no nodes selected + assert len(catalog.nodes) == 0 + + # 2 existing sources sources, 1 selected that has relation as a seed + catalog = run_dbt( + ["docs", "generate", "--select", "source:test.my_source_schema.source_from_seed"] + ) + assert len(catalog.sources) == 1 + assert "source.test.my_source_schema.source_from_seed" in catalog.sources + # seed with same relation that was not selected not in catalog + assert len(catalog.nodes) == 0 + + +class TestGenerateSelectOverMaxSchemaMetadataRelations(TestBaseGenerate): + @pytest.fixture(scope="class") + def seeds(self): + return { + "sample_seed.csv": sample_seed, + "second_seed.csv": sample_seed, + "source_from_seed.csv": sample_seed, + } + + def test_select_source(self, project): + run_dbt(["build"]) + + project.run_sql("create table {}.sample_source (id int)".format(project.test_schema)) + project.run_sql("create table {}.second_source (id int)".format(project.test_schema)) + + with mock.patch.object(type(project.adapter), "MAX_SCHEMA_METADATA_RELATIONS", 1): + # more relations than MAX_SCHEMA_METADATA_RELATIONS -> all sources and nodes correctly returned + catalog = run_dbt(["docs", "generate"]) + assert len(catalog.sources) == 3 + assert len(catalog.nodes) == 5 + + # full source selection respected + catalog = run_dbt(["docs", "generate", "--select", "source:*"]) + assert len(catalog.sources) == 3 + assert len(catalog.nodes) == 0 + + # full node selection respected + catalog = run_dbt(["docs", "generate", "--exclude", "source:*"]) + assert len(catalog.sources) == 0 + assert len(catalog.nodes) == 5 + + # granular source selection respected (> MAX_SCHEMA_METADATA_RELATIONS selected sources) + catalog = run_dbt( + [ + "docs", + "generate", + "--select", + "source:test.my_source_schema.sample_source", + "source:test.my_source_schema.second_source", + ] + ) + assert len(catalog.sources) == 2 + assert len(catalog.nodes) == 0 + + # granular node selection respected (> MAX_SCHEMA_METADATA_RELATIONS selected nodes) + catalog = run_dbt(["docs", "generate", "--select", "my_model", "alt_model"]) + assert len(catalog.sources) == 0 + assert len(catalog.nodes) == 2 + + +class TestGenerateSelectSeed(TestBaseGenerate): + @pytest.fixture(scope="class") + def seeds(self): + return { + "sample_seed.csv": sample_seed, + "second_seed.csv": sample_seed, + "source_from_seed.csv": sample_seed, + } + + def test_select_seed(self, project): + run_dbt(["build"]) + + # 3 seeds, 1 selected + catalog = run_dbt(["docs", "generate", "--select", "sample_seed"]) + assert len(catalog.nodes) == 1 + assert "seed.test.sample_seed" in catalog.nodes + # no sources selected + assert len(catalog.sources) == 0 + + # 3 seeds, 1 selected that has same relation as a source + catalog = run_dbt(["docs", "generate", "--select", "source_from_seed"]) + assert len(catalog.nodes) == 1 + assert "seed.test.source_from_seed" in catalog.nodes + # source with same relation that was not selected not in catalog + assert len(catalog.sources) == 0 diff --git a/tests/functional/docs/test_good_docs_blocks.py b/tests/functional/docs/test_good_docs_blocks.py index 782489854f5..e1ed96c5eb7 100644 --- a/tests/functional/docs/test_good_docs_blocks.py +++ b/tests/functional/docs/test_good_docs_blocks.py @@ -1,11 +1,11 @@ import json import os from pathlib import Path + import pytest from dbt.tests.util import run_dbt, update_config_file, write_file - good_docs_blocks_model_sql = "select 1 as id, 'joe' as first_name" good_docs_blocks_docs_md = """{% docs my_model_doc %} @@ -91,6 +91,7 @@ def test_valid_doc_ref(self, project): "meta": {}, "quote": None, "tags": [], + "granularity": None, } == model_data["columns"]["id"] assert { @@ -101,6 +102,7 @@ def test_valid_doc_ref(self, project): "meta": {}, "quote": None, "tags": [], + "granularity": None, } == model_data["columns"]["first_name"] assert { @@ -111,6 +113,7 @@ def test_valid_doc_ref(self, project): "meta": {}, "quote": None, "tags": [], + "granularity": None, } == model_data["columns"]["last_name"] assert len(model_data["columns"]) == 3 @@ -152,6 +155,7 @@ def test_alternative_docs_path(self, project): "meta": {}, "quote": None, "tags": [], + "granularity": None, } == model_data["columns"]["id"] assert { @@ -162,6 +166,7 @@ def test_alternative_docs_path(self, project): "meta": {}, "quote": None, "tags": [], + "granularity": None, } == model_data["columns"]["first_name"] assert { @@ -172,6 +177,7 @@ def test_alternative_docs_path(self, project): "meta": {}, "quote": None, "tags": [], + "granularity": None, } == model_data["columns"]["last_name"] assert len(model_data["columns"]) == 3 diff --git a/tests/functional/docs/test_invalid_doc_ref.py b/tests/functional/docs/test_invalid_doc_ref.py index 7c486938124..cfcd65da6e0 100644 --- a/tests/functional/docs/test_invalid_doc_ref.py +++ b/tests/functional/docs/test_invalid_doc_ref.py @@ -1,8 +1,7 @@ import pytest +import dbt_common.exceptions from dbt.tests.util import run_dbt -import dbt.exceptions - invalid_doc_ref_model_sql = "select 1 as id, 'joe' as first_name" @@ -43,5 +42,5 @@ def models(self): def test_invalid_doc_ref(self, project): # The run should fail since we could not find the docs reference. - with pytest.raises(dbt.exceptions.CompilationError): + with pytest.raises(dbt_common.exceptions.CompilationError): run_dbt(expect_pass=False) diff --git a/tests/functional/docs/test_missing_docs_blocks.py b/tests/functional/docs/test_missing_docs_blocks.py index 3b6f4e540b9..193644eca76 100644 --- a/tests/functional/docs/test_missing_docs_blocks.py +++ b/tests/functional/docs/test_missing_docs_blocks.py @@ -1,8 +1,7 @@ import pytest +import dbt_common.exceptions from dbt.tests.util import run_dbt -import dbt.exceptions - missing_docs_blocks_model_sql = "select 1 as id, 'joe' as first_name" @@ -39,5 +38,5 @@ def models(self): def test_missing_doc_ref(self, project): # The run should fail since we could not find the docs reference. - with pytest.raises(dbt.exceptions.CompilationError): + with pytest.raises(dbt_common.exceptions.CompilationError): run_dbt() diff --git a/tests/functional/docs/test_model_version_docs_blocks.py b/tests/functional/docs/test_model_version_docs_blocks.py new file mode 100644 index 00000000000..335ef8e8937 --- /dev/null +++ b/tests/functional/docs/test_model_version_docs_blocks.py @@ -0,0 +1,74 @@ +import pytest + +from dbt.tests.util import run_dbt + +model_1 = """ +select 1 as id, 'joe' as first_name +""" + +model_versioned = """ +select 1 as id, 'joe' as first_name +""" + +docs_md = """ +{% docs model_description %} +unversioned model +{% enddocs %} + +{% docs column_id_doc %} +column id for some thing +{% enddocs %} + +{% docs versioned_model_description %} +versioned model +{% enddocs %} + +""" + +schema_yml = """ +models: + - name: model_1 + description: '{{ doc("model_description") }}' + columns: + - name: id + description: '{{ doc("column_id_doc") }}' + + - name: model_versioned + description: '{{ doc("versioned_model_description") }}' + latest_version: 1 + versions: + - v: 1 + config: + alias: my_alias + columns: + - name: id + description: '{{ doc("column_id_doc") }}' + - name: first_name + description: 'plain text' + - v: 2 + columns: + - name: other_id +""" + + +class TestVersionedModelDocsBlock: + @pytest.fixture(scope="class") + def models(self): + return { + "model_1.sql": model_1, + "model_versioned.sql": model_versioned, + "schema.yml": schema_yml, + "docs.md": docs_md, + } + + def test_versioned_doc_ref(self, project): + manifest = run_dbt(["parse"]) + model_1 = manifest.nodes["model.test.model_1"] + model_v1 = manifest.nodes["model.test.model_versioned.v1"] + + assert model_1.description == "unversioned model" + assert model_v1.description == "versioned model" + + assert model_1.columns["id"].description == "column id for some thing" + assert model_v1.columns["id"].description == "column id for some thing" + assert model_v1.columns["first_name"].description == "plain text" diff --git a/tests/functional/docs/test_static.py b/tests/functional/docs/test_static.py new file mode 100644 index 00000000000..05f3a1ef7ca --- /dev/null +++ b/tests/functional/docs/test_static.py @@ -0,0 +1,51 @@ +import os + +import pytest + +from dbt.task.docs import DOCS_INDEX_FILE_PATH +from dbt.tests.util import run_dbt +from dbt_common.clients.system import load_file_contents + + +class TestStaticGenerate: + @pytest.fixture(scope="class") + def models(self): + return {"my_model.sql": "select 1 as fun"} + + def test_static_generated(self, project): + run_dbt(["docs", "generate", "--static"]) + + source_index_html = load_file_contents(DOCS_INDEX_FILE_PATH) + + target_index_html = load_file_contents( + os.path.join(project.project_root, "target", "index.html") + ) + + # Validate index.html was copied correctly + assert len(target_index_html) == len(source_index_html) + assert hash(target_index_html) == hash(source_index_html) + + manifest_data = load_file_contents( + os.path.join(project.project_root, "target", "manifest.json") + ) + + catalog_data = load_file_contents( + os.path.join(project.project_root, "target", "catalog.json") + ) + + static_index_html = load_file_contents( + os.path.join(project.project_root, "target", "static_index.html") + ) + + # Calculate expected static_index.html + expected_static_index_html = source_index_html + expected_static_index_html = expected_static_index_html.replace( + '"MANIFEST.JSON INLINE DATA"', manifest_data + ) + expected_static_index_html = expected_static_index_html.replace( + '"CATALOG.JSON INLINE DATA"', catalog_data + ) + + # Validate static_index.html was generated correctly + assert len(expected_static_index_html) == len(static_index_html) + assert hash(expected_static_index_html) == hash(static_index_html) diff --git a/tests/functional/duplicates/test_duplicate_analysis.py b/tests/functional/duplicates/test_duplicate_analysis.py index 44dc4c6f167..126f6ae6907 100644 --- a/tests/functional/duplicates/test_duplicate_analysis.py +++ b/tests/functional/duplicates/test_duplicate_analysis.py @@ -3,7 +3,6 @@ from dbt.exceptions import CompilationError from dbt.tests.util import run_dbt - my_model_sql = """ select 1 as id """ diff --git a/tests/functional/duplicates/test_duplicate_exposure.py b/tests/functional/duplicates/test_duplicate_exposure.py index 140db21cd07..349c6b78cc7 100644 --- a/tests/functional/duplicates/test_duplicate_exposure.py +++ b/tests/functional/duplicates/test_duplicate_exposure.py @@ -3,7 +3,6 @@ from dbt.exceptions import CompilationError from dbt.tests.util import run_dbt - exposure_dupes_schema_yml = """ version: 2 exposures: diff --git a/tests/functional/duplicates/test_duplicate_macro.py b/tests/functional/duplicates/test_duplicate_macro.py index 35b843f5891..0c1ba3d76d3 100644 --- a/tests/functional/duplicates/test_duplicate_macro.py +++ b/tests/functional/duplicates/test_duplicate_macro.py @@ -3,7 +3,6 @@ from dbt.exceptions import CompilationError from dbt.tests.util import run_dbt - bad_same_macros_sql = """ {% macro some_macro() %} {% endmacro %} diff --git a/tests/functional/duplicates/test_duplicate_metric.py b/tests/functional/duplicates/test_duplicate_metric.py index a5f6b60e8f3..1b172b09caf 100644 --- a/tests/functional/duplicates/test_duplicate_metric.py +++ b/tests/functional/duplicates/test_duplicate_metric.py @@ -3,7 +3,6 @@ from dbt.exceptions import CompilationError from dbt.tests.util import run_dbt - metric_dupes_schema_yml = """ version: 2 diff --git a/tests/functional/duplicates/test_duplicate_model.py b/tests/functional/duplicates/test_duplicate_model.py index 17be1ff20b9..01fad6e949d 100644 --- a/tests/functional/duplicates/test_duplicate_model.py +++ b/tests/functional/duplicates/test_duplicate_model.py @@ -1,9 +1,8 @@ import pytest -from dbt.exceptions import CompilationError, AmbiguousAliasError +from dbt.exceptions import AmbiguousAliasError, CompilationError from dbt.tests.fixtures.project import write_project_files -from dbt.tests.util import run_dbt, get_manifest - +from dbt.tests.util import get_manifest, run_dbt disabled_model_sql = """ {{ @@ -43,6 +42,26 @@ """ +local_dep_schema_yml = """ +models: + - name: table_model + config: + alias: table_model_local_dep + columns: + - name: id + data_tests: + - unique +""" + +local_dep_versions_schema_yml = """ +models: + - name: table_model + config: + alias: table_model_local_dep + versions: + - v: 1 +""" + class TestDuplicateModelEnabled: @pytest.fixture(scope="class") @@ -142,6 +161,72 @@ def test_duplicate_model_disabled_across_packages(self, project): assert model_id in manifest.disabled +class TestDuplicateModelNameWithTestAcrossPackages: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root): + local_dependency_files = { + "dbt_project.yml": dbt_project_yml, + "models": {"table_model.sql": enabled_model_sql, "schema.yml": local_dep_schema_yml}, + } + write_project_files(project_root, "local_dependency", local_dependency_files) + + @pytest.fixture(scope="class") + def models(self): + return {"table_model.sql": enabled_model_sql} + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "local_dependency"}]} + + def test_duplicate_model_name_with_test_across_packages(self, project): + run_dbt(["deps"]) + manifest = run_dbt(["parse"]) + assert len(manifest.nodes) == 3 + + # model nodes with duplicate names exist + local_dep_model_node_id = "model.local_dep.table_model" + root_model_node_id = "model.test.table_model" + assert local_dep_model_node_id in manifest.nodes + assert root_model_node_id in manifest.nodes + + # test node exists and is attached to correct node + test_node_id = "test.local_dep.unique_table_model_id.1da9e464d9" + assert test_node_id in manifest.nodes + assert manifest.nodes[test_node_id].attached_node == local_dep_model_node_id + + +class TestDuplicateModelNameWithVersionAcrossPackages: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root): + local_dependency_files = { + "dbt_project.yml": dbt_project_yml, + "models": { + "table_model.sql": enabled_model_sql, + "schema.yml": local_dep_versions_schema_yml, + }, + } + write_project_files(project_root, "local_dependency", local_dependency_files) + + @pytest.fixture(scope="class") + def models(self): + return {"table_model.sql": enabled_model_sql} + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "local_dependency"}]} + + def test_duplicate_model_name_with_test_across_packages(self, project): + run_dbt(["deps"]) + manifest = run_dbt(["parse"]) + assert len(manifest.nodes) == 2 + + # model nodes with duplicate names exist + local_dep_model_node_id = "model.local_dep.table_model.v1" + root_model_node_id = "model.test.table_model" + assert local_dep_model_node_id in manifest.nodes + assert root_model_node_id in manifest.nodes + + class TestModelTestOverlap: @pytest.fixture(scope="class") def models(self): diff --git a/tests/functional/duplicates/test_duplicate_resource.py b/tests/functional/duplicates/test_duplicate_resource.py index 0bc070ff39f..87c52cf5712 100644 --- a/tests/functional/duplicates/test_duplicate_resource.py +++ b/tests/functional/duplicates/test_duplicate_resource.py @@ -2,7 +2,6 @@ from dbt.tests.util import run_dbt - models_naming_dupes_schema_yml = """ version: 2 models: diff --git a/tests/functional/duplicates/test_duplicate_source.py b/tests/functional/duplicates/test_duplicate_source.py index 1100345aabc..4b9b6c4a08e 100644 --- a/tests/functional/duplicates/test_duplicate_source.py +++ b/tests/functional/duplicates/test_duplicate_source.py @@ -3,7 +3,6 @@ from dbt.exceptions import CompilationError from dbt.tests.util import run_dbt - source_dupes_schema_yml = """ version: 2 sources: diff --git a/tests/functional/events/events.py b/tests/functional/events/events.py new file mode 100644 index 00000000000..e43743ed16a --- /dev/null +++ b/tests/functional/events/events.py @@ -0,0 +1,33 @@ +import os + +from dbt.cli.main import dbtRunner +from dbt_common.events.base_types import EventLevel + + +def test_performance_report(project): + + resource_report_level = None + + def check_for_report(e): + # If we see a ResourceReport event, record its level + if e.info.name == "ResourceReport": + nonlocal resource_report_level + resource_report_level = e.info.level + + runner = dbtRunner(callbacks=[check_for_report]) + + runner.invoke(["run"]) + + # With not cli flag or env var set, ResourceReport should be debug level. + assert resource_report_level == EventLevel.DEBUG + + try: + os.environ["DBT_SHOW_RESOURCE_REPORT"] = "1" + runner.invoke(["run"]) + + # With the appropriate env var set, ResourceReport should be info level. + # This allows this fairly technical log line to be omitted by default + # but still available in production scenarios. + assert resource_report_level == EventLevel.INFO + finally: + del os.environ["DBT_SHOW_RESOURCE_REPORT"] diff --git a/tests/functional/exit_codes/fixtures.py b/tests/functional/exit_codes/fixtures.py index 296e1a3f6c0..90a9bb2514d 100644 --- a/tests/functional/exit_codes/fixtures.py +++ b/tests/functional/exit_codes/fixtures.py @@ -44,17 +44,17 @@ - name: good columns: - name: updated_at - tests: + data_tests: - not_null - name: bad columns: - name: updated_at - tests: + data_tests: - not_null - name: dupe columns: - name: updated_at - tests: + data_tests: - unique """ diff --git a/tests/functional/exit_codes/test_exit_codes.py b/tests/functional/exit_codes/test_exit_codes.py index 44672beecae..5013d6fc070 100644 --- a/tests/functional/exit_codes/test_exit_codes.py +++ b/tests/functional/exit_codes/test_exit_codes.py @@ -4,10 +4,10 @@ from dbt.tests.util import check_table_does_exist, check_table_does_not_exist, run_dbt from tests.functional.exit_codes.fixtures import ( BaseConfigProject, - snapshots_bad_sql, - snapshots_good_sql, data_seed_bad_csv, data_seed_good_csv, + snapshots_bad_sql, + snapshots_good_sql, ) diff --git a/tests/functional/experimental_parser/test_all_experimental_parser.py b/tests/functional/experimental_parser/test_all_experimental_parser.py index b426c308bb5..ff481cd0c04 100644 --- a/tests/functional/experimental_parser/test_all_experimental_parser.py +++ b/tests/functional/experimental_parser/test_all_experimental_parser.py @@ -1,10 +1,10 @@ -import pytest +import os -from dbt.tests.util import run_dbt, run_dbt_and_capture +import pytest +from dbt.artifacts.resources import RefArgs from dbt.contracts.graph.manifest import Manifest -from dbt.contracts.graph.nodes import RefArgs -import os +from dbt.tests.util import run_dbt def get_manifest(): @@ -49,49 +49,6 @@ def get_manifest(): """ -ref_macro__schema_yml = """ -version: 2 - -""" - -ref_macro__models__model_a_sql = """ -select 1 as id - -""" - -source_macro__macros__source_sql = """ -{% macro source(source_name, table_name) %} - -{% endmacro %} -""" - -source_macro__schema_yml = """ -version: 2 - -""" - -source_macro__models__model_a_sql = """ -select 1 as id - -""" - -config_macro__macros__config_sql = """ -{% macro config() %} - -{% endmacro %} -""" - -config_macro__schema_yml = """ -version: 2 - -""" - -config_macro__models__model_a_sql = """ -select 1 as id - -""" - - class BasicExperimentalParser: @pytest.fixture(scope="class") def models(self): @@ -102,54 +59,6 @@ def models(self): } -class TestBasicExperimentalParserFlag(BasicExperimentalParser): - @pytest.fixture(scope="class", autouse=True) - def setup(self, project): - os.environ["DBT_USE_EXPERIMENTAL_PARSER"] = "true" - yield - del os.environ["DBT_USE_EXPERIMENTAL_PARSER"] - - def test_env_use_experimental_parser(self, project): - _, log_output = run_dbt_and_capture(["--debug", "parse"]) - - # successful stable static parsing - assert not ("1699: " in log_output) - # successful experimental static parsing - assert "1698: " in log_output - # experimental parser failed - assert not ("1604: " in log_output) - # static parser failed - assert not ("1603: " in log_output) - # jinja rendering - assert not ("1602: " in log_output) - - -class TestBasicStaticParserFlag(BasicExperimentalParser): - @pytest.fixture(scope="class", autouse=True) - def setup(self, project): - os.environ["DBT_STATIC_PARSER"] = "false" - yield - del os.environ["DBT_STATIC_PARSER"] - - def test_env_static_parser(self, project): - _, log_output = run_dbt_and_capture(["--debug", "parse"]) - - print(log_output) - - # jinja rendering because of --no-static-parser - assert "1605: " in log_output - # successful stable static parsing - assert not ("1699: " in log_output) - # successful experimental static parsing - assert not ("1698: " in log_output) - # experimental parser failed - assert not ("1604: " in log_output) - # static parser failed - assert not ("1603: " in log_output) - # fallback jinja rendering - assert not ("1602: " in log_output) - - class TestBasicExperimentalParser(BasicExperimentalParser): # test that the experimental parser extracts some basic ref, source, and config calls. def test_experimental_parser_basic( @@ -169,18 +78,7 @@ class TestBasicStaticParser(BasicExperimentalParser): # test that the static parser extracts some basic ref, source, and config calls by default # without the experimental flag and without rendering jinja def test_static_parser_basic(self, project): - _, log_output = run_dbt_and_capture(["--debug", "parse"]) - - # successful stable static parsing - assert "1699: " in log_output - # successful experimental static parsing - assert not ("1698: " in log_output) - # experimental parser failed - assert not ("1604: " in log_output) - # static parser failed - assert not ("1603: " in log_output) - # jinja rendering - assert not ("1602: " in log_output) + run_dbt(["--debug", "parse"]) manifest = get_manifest() node = manifest.nodes["model.test.model_a"] @@ -193,112 +91,11 @@ def test_static_parser_basic(self, project): class TestBasicNoStaticParser(BasicExperimentalParser): # test that the static parser doesn't run when the flag is set def test_static_parser_is_disabled(self, project): - _, log_output = run_dbt_and_capture(["--debug", "--no-static-parser", "parse"]) - - # jinja rendering because of --no-static-parser - assert "1605: " in log_output - # successful stable static parsing - assert not ("1699: " in log_output) - # successful experimental static parsing - assert not ("1698: " in log_output) - # experimental parser failed - assert not ("1604: " in log_output) - # static parser failed - assert not ("1603: " in log_output) - # fallback jinja rendering - assert not ("1602: " in log_output) - - -class TestRefOverrideExperimentalParser: - @pytest.fixture(scope="class") - def models(self): - return { - "model_a.sql": ref_macro__models__model_a_sql, - "schema.yml": ref_macro__schema_yml, - } - - @pytest.fixture(scope="class") - def macros(self): - return { - "source.sql": source_macro__macros__source_sql, - } - - # test that the experimental parser doesn't run if the ref built-in is overriden with a macro - def test_experimental_parser_ref_override( - self, - project, - ): - _, log_output = run_dbt_and_capture(["--debug", "--use-experimental-parser", "parse"]) - - print(log_output) - - # successful experimental static parsing - assert not ("1698: " in log_output) - # fallback to jinja rendering - assert "1602: " in log_output - # experimental parser failed - assert not ("1604: " in log_output) - # didn't run static parser because dbt detected a built-in macro override - assert "1601: " in log_output + run_dbt(["--debug", "--no-static-parser", "parse"]) - -class TestSourceOverrideExperimentalParser: - @pytest.fixture(scope="class") - def models(self): - return { - "model_a.sql": source_macro__models__model_a_sql, - "schema.yml": source_macro__schema_yml, - } - - @pytest.fixture(scope="class") - def macros(self): - return { - "source.sql": source_macro__macros__source_sql, - } - - # test that the experimental parser doesn't run if the source built-in is overriden with a macro - def test_experimental_parser_source_override( - self, - project, - ): - _, log_output = run_dbt_and_capture(["--debug", "--use-experimental-parser", "parse"]) - - # successful experimental static parsing - assert not ("1698: " in log_output) - # fallback to jinja rendering - assert "1602: " in log_output - # experimental parser failed - assert not ("1604: " in log_output) - # didn't run static parser because dbt detected a built-in macro override - assert "1601: " in log_output - - -class TestConfigOverrideExperimentalParser: - @pytest.fixture(scope="class") - def models(self): - return { - "model_a.sql": config_macro__models__model_a_sql, - "schema.yml": config_macro__schema_yml, - } - - @pytest.fixture(scope="class") - def macros(self): - return { - "config.sql": config_macro__macros__config_sql, - } - - # test that the experimental parser doesn't run if the config built-in is overriden with a macro - def test_experimental_parser_config_override( - self, - project, - ): - _, log_output = run_dbt_and_capture(["--debug", "--use-experimental-parser", "parse"]) - - # successful experimental static parsing - assert not ("1698: " in log_output) - # fallback to jinja rendering - assert "1602: " in log_output - # experimental parser failed - assert not ("1604: " in log_output) - # didn't run static parser because dbt detected a built-in macro override - assert "1601: " in log_output + manifest = get_manifest() + node = manifest.nodes["model.test.model_a"] + assert node.refs == [RefArgs(name="model_b")] + assert node.sources == [["my_src", "my_tbl"]] + assert node.config._extra == {"x": True} + assert node.config.tags == ["hello", "world"] diff --git a/tests/functional/exposures/test_exposure_configs.py b/tests/functional/exposures/test_exposure_configs.py index 34c5570a84e..2ec309623a7 100644 --- a/tests/functional/exposures/test_exposure_configs.py +++ b/tests/functional/exposures/test_exposure_configs.py @@ -1,20 +1,19 @@ import pytest -from hologram import ValidationError -from dbt.contracts.graph.model_config import ExposureConfig - -from dbt.tests.util import run_dbt, update_config_file, get_manifest +from dbt.artifacts.resources import ExposureConfig +from dbt.tests.util import get_manifest, run_dbt, update_config_file +from dbt_common.dataclass_schema import ValidationError from tests.functional.exposures.fixtures import ( - models_sql, - second_model_sql, - simple_exposure_yml, disabled_models_exposure_yml, enabled_yaml_level_exposure_yml, invalid_config_exposure_yml, - source_schema_yml, metricflow_time_spine_sql, - semantic_models_schema_yml, metrics_schema_yml, + models_sql, + second_model_sql, + semantic_models_schema_yml, + simple_exposure_yml, + source_schema_yml, ) diff --git a/tests/functional/exposures/test_exposures.py b/tests/functional/exposures/test_exposures.py index 1988dd976b3..be42ffd26c0 100644 --- a/tests/functional/exposures/test_exposures.py +++ b/tests/functional/exposures/test_exposures.py @@ -1,14 +1,14 @@ import pytest -from dbt.tests.util import run_dbt, get_manifest +from dbt.tests.util import get_manifest, run_dbt from tests.functional.exposures.fixtures import ( + metricflow_time_spine_sql, + metrics_schema_yml, models_sql, second_model_sql, + semantic_models_schema_yml, simple_exposure_yml, source_schema_yml, - metrics_schema_yml, - semantic_models_schema_yml, - metricflow_time_spine_sql, ) diff --git a/tests/functional/external_reference/test_external_reference.py b/tests/functional/external_reference/test_external_reference.py index 8b5294155d8..7ac561ce862 100644 --- a/tests/functional/external_reference/test_external_reference.py +++ b/tests/functional/external_reference/test_external_reference.py @@ -2,7 +2,6 @@ from dbt.tests.util import run_dbt - external_model_sql = """ {{ config( diff --git a/tests/functional/fail_fast/test_fail_fast_run.py b/tests/functional/fail_fast/test_fail_fast_run.py index ea956a2d540..457d620cd8d 100644 --- a/tests/functional/fail_fast/test_fail_fast_run.py +++ b/tests/functional/fail_fast/test_fail_fast_run.py @@ -44,15 +44,15 @@ def test_fail_fast_run( class TestFailFastFromConfig(FailFastBase): @pytest.fixture(scope="class") - def profiles_config_update(self): + def project_config_update(self): return { - "config": { + "flags": { "send_anonymous_usage_stats": False, "fail_fast": True, } } - def test_fail_fast_run_user_config( + def test_fail_fast_run_project_flags( self, project, models, # noqa: F811 diff --git a/tests/functional/fixtures/__init__.py b/tests/functional/fixtures/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional/fixtures/happy_path_fixture.py b/tests/functional/fixtures/happy_path_fixture.py new file mode 100644 index 00000000000..3f365f36413 --- /dev/null +++ b/tests/functional/fixtures/happy_path_fixture.py @@ -0,0 +1,32 @@ +import os +from distutils.dir_util import copy_tree + +import pytest + + +def delete_files_in_directory(directory_path): + try: + with os.scandir(directory_path) as entries: + for entry in entries: + if entry.is_file(): + os.unlink(entry.path) + print("All files deleted successfully.") + except OSError: + print("Error occurred while deleting files.") + + +@pytest.fixture(scope="class") +def happy_path_project_files(project_root): + # copy fixture files to the project root + delete_files_in_directory(project_root) + copy_tree( + os.path.dirname(os.path.realpath(__file__)) + "/happy_path_project", str(project_root) + ) + + +# We do project_setup first because it will write out a dbt_project.yml. +# This file will be overwritten by the files in happy_path_project later on. +@pytest.fixture(scope="class") +def happy_path_project(project_setup, happy_path_project_files): + # A fixture that gives functional test the project living in happy_path_project + return project_setup diff --git a/tests/functional/fixtures/happy_path_project/analyses/a.sql b/tests/functional/fixtures/happy_path_project/analyses/a.sql new file mode 100644 index 00000000000..1f7d87c55a9 --- /dev/null +++ b/tests/functional/fixtures/happy_path_project/analyses/a.sql @@ -0,0 +1 @@ +select 4 as id diff --git a/tests/functional/fixtures/happy_path_project/dbt_project.yml b/tests/functional/fixtures/happy_path_project/dbt_project.yml new file mode 100644 index 00000000000..716ab611ef3 --- /dev/null +++ b/tests/functional/fixtures/happy_path_project/dbt_project.yml @@ -0,0 +1,17 @@ +analysis-paths: +- analyses +config-version: 2 +flags: + send_anonymous_usage_stats: false +macro-paths: +- macros +name: test +profile: test +seed-paths: +- seeds +seeds: + quote_columns: false +snapshot-paths: +- snapshots +test-paths: +- tests diff --git a/tests/functional/fixtures/happy_path_project/macros/macro_stuff.sql b/tests/functional/fixtures/happy_path_project/macros/macro_stuff.sql new file mode 100644 index 00000000000..2ac8c3b572f --- /dev/null +++ b/tests/functional/fixtures/happy_path_project/macros/macro_stuff.sql @@ -0,0 +1,7 @@ +{% macro cool_macro() %} + wow! +{% endmacro %} + +{% macro other_cool_macro(a, b) %} + cool! +{% endmacro %} diff --git a/tests/functional/fixtures/happy_path_project/models/docs.md b/tests/functional/fixtures/happy_path_project/models/docs.md new file mode 100644 index 00000000000..3f547f37fd9 --- /dev/null +++ b/tests/functional/fixtures/happy_path_project/models/docs.md @@ -0,0 +1,3 @@ +{% docs my_docs %} + some docs +{% enddocs %} diff --git a/tests/functional/fixtures/happy_path_project/models/ephemeral.sql b/tests/functional/fixtures/happy_path_project/models/ephemeral.sql new file mode 100644 index 00000000000..4f9208f28a2 --- /dev/null +++ b/tests/functional/fixtures/happy_path_project/models/ephemeral.sql @@ -0,0 +1,5 @@ +{{ config(materialized='ephemeral') }} + +select + 1 as id, + {{ dbt.date_trunc('day', dbt.current_timestamp()) }} as created_at diff --git a/tests/functional/fixtures/happy_path_project/models/incremental.sql b/tests/functional/fixtures/happy_path_project/models/incremental.sql new file mode 100644 index 00000000000..2bb15542da9 --- /dev/null +++ b/tests/functional/fixtures/happy_path_project/models/incremental.sql @@ -0,0 +1,12 @@ +{{ + config( + materialized = "incremental", + incremental_strategy = "delete+insert", + ) +}} + +select * from {{ ref('seed') }} + +{% if is_incremental() %} + where a > (select max(a) from {{this}}) +{% endif %} diff --git a/tests/functional/fixtures/happy_path_project/models/m.yml b/tests/functional/fixtures/happy_path_project/models/m.yml new file mode 100644 index 00000000000..38e5be96af2 --- /dev/null +++ b/tests/functional/fixtures/happy_path_project/models/m.yml @@ -0,0 +1,7 @@ +metrics: + - name: total_outer + type: simple + description: The total count of outer + label: Total Outer + type_params: + measure: total_outer_count diff --git a/tests/functional/fixtures/happy_path_project/models/metricflow_time_spine.sql b/tests/functional/fixtures/happy_path_project/models/metricflow_time_spine.sql new file mode 100644 index 00000000000..b672fabcad2 --- /dev/null +++ b/tests/functional/fixtures/happy_path_project/models/metricflow_time_spine.sql @@ -0,0 +1,2 @@ +select + {{ dbt.date_trunc('day', dbt.current_timestamp()) }} as date_day diff --git a/tests/functional/fixtures/happy_path_project/models/metricflow_time_spine_second.sql b/tests/functional/fixtures/happy_path_project/models/metricflow_time_spine_second.sql new file mode 100644 index 00000000000..656724dbe97 --- /dev/null +++ b/tests/functional/fixtures/happy_path_project/models/metricflow_time_spine_second.sql @@ -0,0 +1,2 @@ +select + {{ dbt.date_trunc('second', dbt.current_timestamp()) }} as ts_second diff --git a/tests/functional/fixtures/happy_path_project/models/outer.sql b/tests/functional/fixtures/happy_path_project/models/outer.sql new file mode 100644 index 00000000000..85bbabe21e3 --- /dev/null +++ b/tests/functional/fixtures/happy_path_project/models/outer.sql @@ -0,0 +1 @@ +select * from {{ ref('ephemeral') }} diff --git a/tests/functional/fixtures/happy_path_project/models/schema.yml b/tests/functional/fixtures/happy_path_project/models/schema.yml new file mode 100644 index 00000000000..3d75adab62d --- /dev/null +++ b/tests/functional/fixtures/happy_path_project/models/schema.yml @@ -0,0 +1,25 @@ +version: 2 +models: + - name: outer + description: The outer table + columns: + - name: id + description: The id value + data_tests: + - unique + - not_null + - name: metricflow_time_spine + description: Day time spine + columns: + - name: date_day + granularity: day + - name: metricflow_time_spine_second + description: Second time spine + columns: + - name: ts_second + granularity: second + +sources: + - name: my_source + tables: + - name: my_table diff --git a/tests/functional/fixtures/happy_path_project/models/sm.yml b/tests/functional/fixtures/happy_path_project/models/sm.yml new file mode 100644 index 00000000000..6db38e3ae4c --- /dev/null +++ b/tests/functional/fixtures/happy_path_project/models/sm.yml @@ -0,0 +1,18 @@ +semantic_models: + - name: my_sm + model: ref('outer') + defaults: + agg_time_dimension: created_at + entities: + - name: my_entity + type: primary + expr: id + dimensions: + - name: created_at + type: time + type_params: + time_granularity: day + measures: + - name: total_outer_count + agg: count + expr: 1 diff --git a/tests/functional/fixtures/happy_path_project/models/sq.yml b/tests/functional/fixtures/happy_path_project/models/sq.yml new file mode 100644 index 00000000000..e28e3805a3f --- /dev/null +++ b/tests/functional/fixtures/happy_path_project/models/sq.yml @@ -0,0 +1,14 @@ +saved_queries: + - name: my_saved_query + label: My Saved Query + query_params: + metrics: + - total_outer + group_by: + - "Dimension('my_entity__created_at')" + exports: + - name: my_export + config: + alias: my_export_alias + export_as: table + schema: my_export_schema_name diff --git a/tests/functional/fixtures/happy_path_project/models/sub/inner.sql b/tests/functional/fixtures/happy_path_project/models/sub/inner.sql new file mode 100644 index 00000000000..a90004d480d --- /dev/null +++ b/tests/functional/fixtures/happy_path_project/models/sub/inner.sql @@ -0,0 +1 @@ +select * from {{ ref('outer') }} diff --git a/tests/functional/fixtures/happy_path_project/seeds/seed.csv b/tests/functional/fixtures/happy_path_project/seeds/seed.csv new file mode 100644 index 00000000000..cfa20f81071 --- /dev/null +++ b/tests/functional/fixtures/happy_path_project/seeds/seed.csv @@ -0,0 +1,2 @@ +a,b +1,2 diff --git a/tests/functional/fixtures/happy_path_project/snapshots/snapshot.sql b/tests/functional/fixtures/happy_path_project/snapshots/snapshot.sql new file mode 100644 index 00000000000..60d803dfbb4 --- /dev/null +++ b/tests/functional/fixtures/happy_path_project/snapshots/snapshot.sql @@ -0,0 +1,12 @@ +{% snapshot my_snapshot %} + {{ + config( + target_database=var('target_database', database), + target_schema=schema, + unique_key='id', + strategy='timestamp', + updated_at='updated_at', + ) + }} + select * from {{database}}.{{schema}}.seed +{% endsnapshot %} diff --git a/tests/functional/fixtures/happy_path_project/tests/t.sql b/tests/functional/fixtures/happy_path_project/tests/t.sql new file mode 100644 index 00000000000..9757b1b7ae9 --- /dev/null +++ b/tests/functional/fixtures/happy_path_project/tests/t.sql @@ -0,0 +1 @@ +select 1 as id limit 0 diff --git a/tests/functional/graph_selection/fixtures.py b/tests/functional/graph_selection/fixtures.py index 2295200901a..b4d4a677d25 100644 --- a/tests/functional/graph_selection/fixtures.py +++ b/tests/functional/graph_selection/fixtures.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import read_file +from dbt.tests.util import read_file schema_yml = """ version: 2 @@ -33,20 +33,20 @@ group: emails_group columns: - name: email - tests: + data_tests: - not_null: severity: warn - name: users group: users_group columns: - name: id - tests: + data_tests: - unique - name: users_rollup group: users_rollup_group columns: - name: gender - tests: + data_tests: - unique - name: versioned latest_version: 2 diff --git a/tests/functional/graph_selection/test_graph_selection.py b/tests/functional/graph_selection/test_graph_selection.py index 88b45c8bcf5..36a8203ef4f 100644 --- a/tests/functional/graph_selection/test_graph_selection.py +++ b/tests/functional/graph_selection/test_graph_selection.py @@ -1,11 +1,11 @@ -import os import json +import os + import pytest -from dbt.tests.util import run_dbt, check_result_nodes_by_name +from dbt.tests.util import check_result_nodes_by_name, run_dbt from tests.functional.graph_selection.fixtures import SelectionFixtures - selectors_yml = """ selectors: - name: bi_selector @@ -142,6 +142,23 @@ def test_locally_qualified_name(self, project): check_result_nodes_by_name(results, ["subdir"]) assert_correct_schemas(project) + # Check that list command works + os.chdir( + project.profiles_dir + ) # Change to random directory to test that Path selector works with project-dir + results = run_dbt( + [ + "-q", + "ls", + "-s", + "path:models/test/subdir.sql", + "--project-dir", + str(project.project_root), + ] + # ["list", "--project-dir", str(project.project_root), "--select", "models/test/subdir*"] + ) + assert len(results) == 1 + def test_locally_qualified_name_model_with_dots(self, project): results = run_dbt(["run", "--select", "alternative.users"], expect_pass=False) check_result_nodes_by_name(results, ["alternative.users"]) @@ -268,3 +285,22 @@ def test_exposure_parents(self, project): "users", ], ) + + +class TestListPathGraphSelection(SelectionFixtures): + def test_list_select_with_project_dir(self, project): + # Check that list command works + os.chdir( + project.profiles_dir + ) # Change to random directory to test that Path selector works with project-dir + results = run_dbt( + [ + "-q", + "ls", + "-s", + "path:models/test/subdir.sql", + "--project-dir", + str(project.project_root), + ] + ) + assert results == ["test.test.subdir"] diff --git a/tests/functional/graph_selection/test_group_selection.py b/tests/functional/graph_selection/test_group_selection.py index c14f008b7d9..fe028666956 100644 --- a/tests/functional/graph_selection/test_group_selection.py +++ b/tests/functional/graph_selection/test_group_selection.py @@ -1,22 +1,21 @@ import pytest -from dbt.tests.util import run_dbt, read_file +from dbt.tests.util import read_file, run_dbt from tests.functional.graph_selection.fixtures import ( - schema_yml, + alternative_users_sql, base_users_sql, - users_sql, - users_rollup_sql, - users_rollup_dependency_sql, - emails_sql, emails_alt_sql, - alternative_users_sql, - never_selected_sql, - subdir_sql, + emails_sql, nested_users_sql, + never_selected_sql, properties_yml, + schema_yml, + subdir_sql, + users_rollup_dependency_sql, + users_rollup_sql, + users_sql, ) - selectors_yml = """ selectors: - name: group_specified_as_string_str diff --git a/tests/functional/graph_selection/test_inline.py b/tests/functional/graph_selection/test_inline.py new file mode 100644 index 00000000000..bf01ec8ae6a --- /dev/null +++ b/tests/functional/graph_selection/test_inline.py @@ -0,0 +1,64 @@ +import pytest + +from dbt.cli.exceptions import DbtUsageException +from dbt.tests.util import run_dbt, run_dbt_and_capture, write_file + +selectors_yml = """ + selectors: + - name: test_selector + description: Exclude everything + default: true + definition: + method: package + value: "foo" + """ + +dbt_project_yml = """ +name: test +profile: test +flags: + send_anonymous_usage_stats: false +""" + +dbt_project_yml_disabled_models = """ +name: test +profile: test +flags: + send_anonymous_usage_stats: false +models: + +enabled: false +""" + + +class TestCompileInlineWithSelector: + @pytest.fixture(scope="class") + def models(self): + return { + "first_model.sql": "select 1 as id", + } + + @pytest.fixture(scope="class") + def selectors(self): + return selectors_yml + + def test_inline_selectors(self, project): + (results, log_output) = run_dbt_and_capture( + ["compile", "--inline", "select * from {{ ref('first_model') }}"] + ) + assert len(results) == 1 + assert "Compiled inline node is:" in log_output + + # Set all models to disabled, check that we still get inline result + write_file(dbt_project_yml_disabled_models, project.project_root, "dbt_project.yml") + (results, log_output) = run_dbt_and_capture(["compile", "--inline", "select 1 as id"]) + assert len(results) == 1 + + # put back non-disabled dbt_project and check for mutually exclusive error message + # for --select and --inline + write_file(dbt_project_yml, project.project_root, "dbt_project.yml") + with pytest.raises(DbtUsageException): + run_dbt(["compile", "--select", "first_model", "--inline", "select 1 as id"]) + + # check for mutually exclusive --selector and --inline + with pytest.raises(DbtUsageException): + run_dbt(["compile", "--selector", "test_selector", "--inline", "select 1 as id"]) diff --git a/tests/functional/graph_selection/test_intersection_syntax.py b/tests/functional/graph_selection/test_intersection_syntax.py index 29c53fd405c..35cf78e6d6d 100644 --- a/tests/functional/graph_selection/test_intersection_syntax.py +++ b/tests/functional/graph_selection/test_intersection_syntax.py @@ -1,9 +1,8 @@ import pytest -from dbt.tests.util import run_dbt, check_result_nodes_by_name +from dbt.tests.util import check_result_nodes_by_name, run_dbt from tests.functional.graph_selection.fixtures import SelectionFixtures - selectors_yml = """ selectors: - name: same_intersection diff --git a/tests/functional/graph_selection/test_schema_test_graph_selection.py b/tests/functional/graph_selection/test_schema_test_graph_selection.py index 105397d4112..48ca572f370 100644 --- a/tests/functional/graph_selection/test_schema_test_graph_selection.py +++ b/tests/functional/graph_selection/test_schema_test_graph_selection.py @@ -1,8 +1,7 @@ import pytest -from dbt.tests.util import run_dbt from dbt.tests.fixtures.project import write_project_files - +from dbt.tests.util import run_dbt from tests.fixtures.dbt_integration_project import dbt_integration_project # noqa: F401 from tests.functional.graph_selection.fixtures import SelectionFixtures diff --git a/tests/functional/graph_selection/test_tag_selection.py b/tests/functional/graph_selection/test_tag_selection.py index 7e954ad22db..7561937e18d 100644 --- a/tests/functional/graph_selection/test_tag_selection.py +++ b/tests/functional/graph_selection/test_tag_selection.py @@ -1,9 +1,8 @@ import pytest -from dbt.tests.util import run_dbt, check_result_nodes_by_name +from dbt.tests.util import check_result_nodes_by_name, run_dbt from tests.functional.graph_selection.fixtures import SelectionFixtures - selectors_yml = """ selectors: - name: tag_specified_as_string_str diff --git a/tests/functional/graph_selection/test_version_selection.py b/tests/functional/graph_selection/test_version_selection.py index 4f9325a1fb8..335fad25270 100644 --- a/tests/functional/graph_selection/test_version_selection.py +++ b/tests/functional/graph_selection/test_version_selection.py @@ -1,15 +1,14 @@ import pytest -from dbt.tests.util import run_dbt, read_file +from dbt.tests.util import read_file, run_dbt from tests.functional.graph_selection.fixtures import ( - schema_yml, base_users_sql, - users_sql, - users_rollup_sql, properties_yml, + schema_yml, + users_rollup_sql, + users_sql, ) - selectors_yml = """ selectors: - name: version_specified_as_string_str diff --git a/tests/functional/incremental_schema_tests/fixtures.py b/tests/functional/incremental_schema_tests/fixtures.py index b80bea45e80..2391feb72dd 100644 --- a/tests/functional/incremental_schema_tests/fixtures.py +++ b/tests/functional/incremental_schema_tests/fixtures.py @@ -9,49 +9,49 @@ columns: - name: id tags: [column_level_tag] - tests: + data_tests: - unique - name: incremental_ignore columns: - name: id tags: [column_level_tag] - tests: + data_tests: - unique - name: incremental_ignore_target columns: - name: id tags: [column_level_tag] - tests: + data_tests: - unique - name: incremental_append_new_columns columns: - name: id tags: [column_level_tag] - tests: + data_tests: - unique - name: incremental_append_new_columns_target columns: - name: id tags: [column_level_tag] - tests: + data_tests: - unique - name: incremental_sync_all_columns columns: - name: id tags: [column_level_tag] - tests: + data_tests: - unique - name: incremental_sync_all_columns_target columns: - name: id tags: [column_leveL_tag] - tests: + data_tests: - unique """ diff --git a/tests/functional/incremental_schema_tests/test_incremental_schema.py b/tests/functional/incremental_schema_tests/test_incremental_schema.py index 8203f497331..28d4ab546bf 100644 --- a/tests/functional/incremental_schema_tests/test_incremental_schema.py +++ b/tests/functional/incremental_schema_tests/test_incremental_schema.py @@ -1,31 +1,27 @@ import pytest -from dbt.tests.util import ( - check_relations_equal, - run_dbt, -) - +from dbt.tests.util import check_relations_equal, run_dbt from tests.functional.incremental_schema_tests.fixtures import ( - _PROPERTIES__SCHEMA, - _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY, + _MODELS__A, + _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS, + _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE, + _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE_TARGET, + _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_TARGET, + _MODELS__INCREMENTAL_FAIL, _MODELS__INCREMENTAL_IGNORE, - _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY_TARGET, _MODELS__INCREMENTAL_IGNORE_TARGET, - _MODELS__INCREMENTAL_FAIL, _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS, - _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE, - _MODELS__A, - _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_TARGET, - _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS, _MODELS__INCREMENTAL_SYNC_ALL_COLUMNS_TARGET, - _MODELS__INCREMENTAL_APPEND_NEW_COLUMNS_REMOVE_ONE_TARGET, - _TESTS__SELECT_FROM_INCREMENTAL_IGNORE, + _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY, + _MODELS__INCREMENTAL_SYNC_REMOVE_ONLY_TARGET, + _PROPERTIES__SCHEMA, _TESTS__SELECT_FROM_A, + _TESTS__SELECT_FROM_INCREMENTAL_APPEND_NEW_COLUMNS, _TESTS__SELECT_FROM_INCREMENTAL_APPEND_NEW_COLUMNS_TARGET, + _TESTS__SELECT_FROM_INCREMENTAL_IGNORE, + _TESTS__SELECT_FROM_INCREMENTAL_IGNORE_TARGET, _TESTS__SELECT_FROM_INCREMENTAL_SYNC_ALL_COLUMNS, _TESTS__SELECT_FROM_INCREMENTAL_SYNC_ALL_COLUMNS_TARGET, - _TESTS__SELECT_FROM_INCREMENTAL_IGNORE_TARGET, - _TESTS__SELECT_FROM_INCREMENTAL_APPEND_NEW_COLUMNS, ) diff --git a/tests/functional/init/test_init.py b/tests/functional/init/test_init.py index 8c0444bd0b2..1c477f9bf0c 100644 --- a/tests/functional/init/test_init.py +++ b/tests/functional/init/test_init.py @@ -1,10 +1,13 @@ -import click import os -import pytest from pathlib import Path from unittest import mock from unittest.mock import Mock, call +import click +import pytest +import yaml + +from dbt.exceptions import DbtRuntimeError from dbt.tests.util import run_dbt @@ -67,9 +70,7 @@ def test_init_task_in_project_with_existing_profiles_yml( with open(os.path.join(project.profiles_dir, "profiles.yml"), "r") as f: assert ( f.read() - == """config: - send_anonymous_usage_stats: false -test: + == """test: outputs: dev: dbname: test_db @@ -84,6 +85,11 @@ def test_init_task_in_project_with_existing_profiles_yml( """ ) + def test_init_task_in_project_specifying_profile_errors(self): + with pytest.raises(DbtRuntimeError) as error: + run_dbt(["init", "--profile", "test"], expect_pass=False) + assert "Can not init existing project with specified profile" in str(error) + class TestInitProjectWithoutExistingProfilesYml: @mock.patch("dbt.task.init._get_adapter_plugin_names") @@ -159,6 +165,20 @@ def exists_side_effect(path): """ ) + @mock.patch.object(Path, "exists", autospec=True) + def test_init_task_in_project_without_profile_yml_specifying_profile_errors(self, exists): + def exists_side_effect(path): + # Override responses on specific files, default to 'real world' if not overriden + return {"profiles.yml": False}.get(path.name, os.path.exists(path)) + + exists.side_effect = exists_side_effect + + # Even through no profiles.yml file exists, the init will not modify project.yml, + # so this errors + with pytest.raises(DbtRuntimeError) as error: + run_dbt(["init", "--profile", "test"], expect_pass=False) + assert "Could not find profile named test" in str(error) + class TestInitProjectWithoutExistingProfilesYmlOrTemplate: @mock.patch("dbt.task.init._get_adapter_plugin_names") @@ -369,9 +389,7 @@ def test_init_task_in_project_with_invalid_profile_template( with open(os.path.join(project.profiles_dir, "profiles.yml"), "r") as f: assert ( f.read() - == """config: - send_anonymous_usage_stats: false -test: + == """test: outputs: dev: dbname: test_db @@ -408,7 +426,6 @@ class TestInitOutsideOfProject(TestInitOutsideOfProjectBase): @pytest.fixture(scope="class") def dbt_profile_data(self, unique_schema): return { - "config": {"send_anonymous_usage_stats": False}, "test": { "outputs": { "default2": { @@ -491,9 +508,7 @@ def test_init_task_outside_of_project( with open(os.path.join(project.profiles_dir, "profiles.yml"), "r") as f: assert ( f.read() - == f"""config: - send_anonymous_usage_stats: false -{project_name}: + == f"""{project_name}: outputs: dev: dbname: test_db @@ -538,7 +553,6 @@ def test_init_task_outside_of_project( # name or the intended use of these models name: '{project_name}' version: '1.0.0' -config-version: 2 # This setting configures which "profile" dbt uses for this project. profile: '{project_name}' @@ -657,7 +671,6 @@ def test_init_provided_project_name_and_skip_profile_setup( # name or the intended use of these models name: '{project_name}' version: '1.0.0' -config-version: 2 # This setting configures which "profile" dbt uses for this project. profile: '{project_name}' @@ -708,3 +721,127 @@ def test_init_inside_project_and_skip_profile_setup( # skip interactive profile setup run_dbt(["init", "--skip-profile-setup"]) assert len(manager.mock_calls) == 0 + + +class TestInitOutsideOfProjectWithSpecifiedProfile(TestInitOutsideOfProjectBase): + @mock.patch("dbt.task.init._get_adapter_plugin_names") + @mock.patch("click.prompt") + def test_init_task_outside_of_project_with_specified_profile( + self, mock_prompt, mock_get_adapter, project, project_name, unique_schema, dbt_profile_data + ): + manager = Mock() + manager.attach_mock(mock_prompt, "prompt") + manager.prompt.side_effect = [ + project_name, + ] + mock_get_adapter.return_value = [project.adapter.type()] + run_dbt(["init", "--profile", "test"]) + + manager.assert_has_calls( + [ + call.prompt("Enter a name for your project (letters, digits, underscore)"), + ] + ) + + # profiles.yml is NOT overwritten, so assert that the text matches that of the + # original fixture + with open(os.path.join(project.profiles_dir, "profiles.yml"), "r") as f: + assert f.read() == yaml.safe_dump(dbt_profile_data) + + with open(os.path.join(project.project_root, project_name, "dbt_project.yml"), "r") as f: + assert ( + f.read() + == f""" +# Name your project! Project names should contain only lowercase characters +# and underscores. A good package name should reflect your organization's +# name or the intended use of these models +name: '{project_name}' +version: '1.0.0' + +# This setting configures which "profile" dbt uses for this project. +profile: 'test' + +# These configurations specify where dbt should look for different types of files. +# The `model-paths` config, for example, states that models in this project can be +# found in the "models/" directory. You probably won't need to change these! +model-paths: ["models"] +analysis-paths: ["analyses"] +test-paths: ["tests"] +seed-paths: ["seeds"] +macro-paths: ["macros"] +snapshot-paths: ["snapshots"] + +clean-targets: # directories to be removed by `dbt clean` + - "target" + - "dbt_packages" + + +# Configuring models +# Full documentation: https://docs.getdbt.com/docs/configuring-models + +# In this example config, we tell dbt to build all models in the example/ +# directory as views. These settings can be overridden in the individual model +# files using the `{{{{ config(...) }}}}` macro. +models: + {project_name}: + # Config indicated by + and applies to all files under models/example/ + example: + +materialized: view +""" + ) + + +class TestInitOutsideOfProjectSpecifyingInvalidProfile(TestInitOutsideOfProjectBase): + @mock.patch("dbt.task.init._get_adapter_plugin_names") + @mock.patch("click.prompt") + def test_init_task_outside_project_specifying_invalid_profile_errors( + self, mock_prompt, mock_get_adapter, project, project_name + ): + manager = Mock() + manager.attach_mock(mock_prompt, "prompt") + manager.prompt.side_effect = [ + project_name, + ] + mock_get_adapter.return_value = [project.adapter.type()] + + with pytest.raises(DbtRuntimeError) as error: + run_dbt(["init", "--profile", "invalid"], expect_pass=False) + assert "Could not find profile named invalid" in str(error) + + manager.assert_has_calls( + [ + call.prompt("Enter a name for your project (letters, digits, underscore)"), + ] + ) + + +class TestInitOutsideOfProjectSpecifyingProfileNoProfilesYml(TestInitOutsideOfProjectBase): + @mock.patch("dbt.task.init._get_adapter_plugin_names") + @mock.patch("click.prompt") + def test_init_task_outside_project_specifying_profile_no_profiles_yml_errors( + self, mock_prompt, mock_get_adapter, project, project_name + ): + manager = Mock() + manager.attach_mock(mock_prompt, "prompt") + manager.prompt.side_effect = [ + project_name, + ] + mock_get_adapter.return_value = [project.adapter.type()] + + # Override responses on specific files, default to 'real world' if not overriden + original_isfile = os.path.isfile + with mock.patch( + "os.path.isfile", + new=lambda path: {"profiles.yml": False}.get( + os.path.basename(path), original_isfile(path) + ), + ): + with pytest.raises(DbtRuntimeError) as error: + run_dbt(["init", "--profile", "test"], expect_pass=False) + assert "Could not find profile named invalid" in str(error) + + manager.assert_has_calls( + [ + call.prompt("Enter a name for your project (letters, digits, underscore)"), + ] + ) diff --git a/tests/functional/invalid_model_tests/test_invalid_models.py b/tests/functional/invalid_model_tests/test_invalid_models.py index 09db17bc325..d931b81331a 100644 --- a/tests/functional/invalid_model_tests/test_invalid_models.py +++ b/tests/functional/invalid_model_tests/test_invalid_models.py @@ -1,10 +1,7 @@ import pytest from dbt.exceptions import CompilationError, ParsingError - -from dbt.tests.util import ( - run_dbt, -) +from dbt.tests.util import run_dbt # from `test/integration/011_invalid_model_tests`, invalid_model_tests diff --git a/tests/functional/list/fixtures.py b/tests/functional/list/fixtures.py index c35988e2999..48cd5710347 100644 --- a/tests/functional/list/fixtures.py +++ b/tests/functional/list/fixtures.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.fixtures.project import write_project_files +from dbt.tests.fixtures.project import write_project_files snapshots__snapshot_sql = """ {% snapshot my_snapshot %} @@ -31,7 +31,7 @@ columns: - name: id description: The id value - tests: + data_tests: - unique - not_null @@ -46,7 +46,16 @@ {{ config(materialized='ephemeral') }} -select 1 as id +select + 1 as id, + {{ dbt.date_trunc('day', dbt.current_timestamp()) }} as created_at + +""" + +models__metric_flow = """ + +select + {{ dbt.date_trunc('day', dbt.current_timestamp()) }} as date_day """ @@ -103,6 +112,56 @@ """ +semantic_models__sm_yml = """ +semantic_models: + - name: my_sm + model: ref('outer') + defaults: + agg_time_dimension: created_at + entities: + - name: my_entity + type: primary + expr: id + dimensions: + - name: created_at + type: time + type_params: + time_granularity: day + measures: + - name: total_outer_count + agg: count + expr: 1 + +""" + +metrics__m_yml = """ +metrics: + - name: total_outer + type: simple + description: The total count of outer + label: Total Outer + type_params: + measure: total_outer_count +""" + + +saved_queries__sq_yml = """ +saved_queries: + - name: my_saved_query + label: My Saved Query + query_params: + metrics: + - total_outer + group_by: + - "Dimension('my_entity__created_at')" + exports: + - name: my_export + config: + alias: my_export_alias + export_as: table + schema: my_export_schema_name +""" + @pytest.fixture(scope="class") def snapshots(): @@ -122,6 +181,10 @@ def models(): "incremental.sql": models__incremental_sql, "docs.md": models__docs_md, "outer.sql": models__outer_sql, + "metricflow_time_spine.sql": models__metric_flow, + "sq.yml": saved_queries__sq_yml, + "sm.yml": semantic_models__sm_yml, + "m.yml": metrics__m_yml, "sub": {"inner.sql": models__sub__inner_sql}, } @@ -141,6 +204,21 @@ def analyses(): return {"a.sql": analyses__a_sql} +@pytest.fixture(scope="class") +def semantic_models(): + return {"sm.yml": semantic_models__sm_yml} + + +@pytest.fixture(scope="class") +def metrics(): + return {"m.yml": metrics__m_yml} + + +@pytest.fixture(scope="class") +def saved_queries(): + return {"sq.yml": saved_queries__sq_yml} + + @pytest.fixture(scope="class") def project_files( project_root, diff --git a/tests/functional/list/test_commands.py b/tests/functional/list/test_commands.py new file mode 100644 index 00000000000..70739e1c4f7 --- /dev/null +++ b/tests/functional/list/test_commands.py @@ -0,0 +1,105 @@ +import shutil + +import pytest + +from dbt.artifacts.resources.types import NodeType +from dbt.cli.main import dbtRunner +from dbt.cli.types import Command +from dbt.events.types import NoNodesSelected +from dbt.tests.util import run_dbt +from tests.utils import EventCatcher + +""" +Testing different commands against the happy path fixture + +The general flow +1. Declare the commands to be tested +2. Write a paramaterized test ensure a given command reaches causes and associated desired state. +""" + +# These are commands we're skipping as they don't make sense or don't work with the +# happy path fixture currently +commands_to_skip = { + "clone", + "generate", + "server", + "init", + "list", + "run-operation", + "show", + "snapshot", + "freshness", +} + +# Commands to happy path test +commands = [command.value for command in Command if command.value not in commands_to_skip] + + +class TestRunCommands: + @pytest.fixture(scope="class", autouse=True) + def drop_snapshots(self, happy_path_project, project_root: str) -> None: + """The snapshots are erroring out, so lets drop them. + + Seems to be database related. Ideally snapshots should work in these tests. It's a bad sign that they don't. That + may have more to do with our fixture setup than the source code though. + + Note: that the `happy_path_fixture_files` are a _class_ based fixture. Thus although this fixture _modifies_ the + files available to the happy path project, it doesn't affect that fixture for tests in other test classes. + """ + + shutil.rmtree(f"{project_root}/snapshots") + + @pytest.mark.parametrize("dbt_command", [(command,) for command in commands]) + def test_run_commmand( + self, + happy_path_project, + dbt_command, + ): + run_dbt([dbt_command]) + + +""" +Testing command interactions with specific node types + +The general flow +1. Declare resource (node) types to be tested +2. Write a parameterized test that ensures commands interact successfully with each resource type +""" + +# TODO: Figure out which of these are just missing from the happy path fixture vs which ones aren't selectable +skipped_resource_types = { + "analysis", + "operation", + "rpc", + "sql_operation", + "doc", + "macro", + "exposure", + "group", + "unit_test", + "fixture", +} +resource_types = [ + node_type.value for node_type in NodeType if node_type.value not in skipped_resource_types +] + + +class TestSelectResourceType: + @pytest.fixture(scope="function") + def catcher(self) -> EventCatcher: + return EventCatcher(event_to_catch=NoNodesSelected) + + @pytest.fixture(scope="function") + def runner(self, catcher: EventCatcher) -> dbtRunner: + return dbtRunner(callbacks=[catcher.catch]) + + @pytest.mark.parametrize("resource_type", resource_types) + def test_select_by_resource_type( + self, + resource_type: str, + happy_path_project, + runner: dbtRunner, + catcher: EventCatcher, + ) -> None: + runner.invoke(["list", "--select", f"resource_type:{resource_type}"]) + assert len(catcher.caught_events) == 0 diff --git a/tests/functional/list/test_list.py b/tests/functional/list/test_list.py index 84dc313b38c..84b1e382e89 100644 --- a/tests/functional/list/test_list.py +++ b/tests/functional/list/test_list.py @@ -1,18 +1,10 @@ -import pytest -import os import json +import os from dbt.tests.util import run_dbt -from dbt.logger import log_manager - -from tests.functional.list.fixtures import ( # noqa: F401 - snapshots, - tests, - models, - macros, - seeds, - analyses, - project_files, +from tests.functional.fixtures.happy_path_fixture import ( # noqa: F401 + happy_path_project, + happy_path_project_files, ) @@ -20,29 +12,19 @@ class TestList: def dir(self, value): return os.path.normpath(value) - @pytest.fixture(scope="class") - def project_config_update(self): - return { - "config-version": 2, - "analysis-paths": [self.dir("analyses")], - "snapshot-paths": [self.dir("snapshots")], - "macro-paths": [self.dir("macros")], - "seed-paths": [self.dir("seeds")], - "test-paths": [self.dir("tests")], - "seeds": { - "quote_columns": False, - }, - } + def test_packages_install_path_does_not_exist(self, happy_path_project): # noqa: F811 + run_dbt(["list"]) + packages_install_path = "dbt_packages" + + # the packages-install-path should not be created by `dbt list` + assert not os.path.exists(packages_install_path) def run_dbt_ls(self, args=None, expect_pass=True): - log_manager.stdout_console() full_args = ["ls"] if args is not None: full_args += args - result = run_dbt(args=full_args, expect_pass=expect_pass) - log_manager.stdout_console() return result def assert_json_equal(self, json_str, expected): @@ -60,7 +42,7 @@ def expect_given_output(self, args, expectations): else: assert got == expected - def expect_snapshot_output(self, project): + def expect_snapshot_output(self, happy_path_project): # noqa: F811 expectations = { "name": "my_snapshot", "selector": "test.snapshot.my_snapshot", @@ -79,8 +61,14 @@ def expect_snapshot_output(self, project): "quoting": {}, "column_types": {}, "persist_docs": {}, - "target_database": project.database, - "target_schema": project.test_schema, + "target_database": happy_path_project.database, + "target_schema": happy_path_project.test_schema, + "snapshot_meta_column_names": { + "dbt_scd_id": None, + "dbt_updated_at": None, + "dbt_valid_from": None, + "dbt_valid_to": None, + }, "unique_key": "id", "strategy": "timestamp", "updated_at": "updated_at", @@ -96,7 +84,10 @@ def expect_snapshot_output(self, project): "packages": [], "incremental_strategy": None, "docs": {"node_color": None, "show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, + "event_time": None, + "lookback": 0, + "batch_size": None, }, "unique_id": "snapshot.test.my_snapshot", "original_file_path": normalize("snapshots/snapshot.sql"), @@ -138,7 +129,10 @@ def expect_analyses_output(self): "packages": [], "incremental_strategy": None, "docs": {"node_color": None, "show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, + "event_time": None, + "lookback": 0, + "batch_size": None, }, "unique_id": "analysis.test.a", "original_file_path": normalize("analyses/a.sql"), @@ -151,13 +145,30 @@ def expect_analyses_output(self): def expect_model_output(self): expectations = { - "name": ("ephemeral", "incremental", "inner", "outer"), - "selector": ("test.ephemeral", "test.incremental", "test.sub.inner", "test.outer"), + "name": ( + "ephemeral", + "incremental", + "inner", + "metricflow_time_spine", + "metricflow_time_spine_second", + "outer", + ), + "selector": ( + "test.ephemeral", + "test.incremental", + "test.sub.inner", + "test.metricflow_time_spine", + "test.metricflow_time_spine_second", + "test.outer", + ), "json": ( { "name": "ephemeral", "package_name": "test", - "depends_on": {"nodes": [], "macros": []}, + "depends_on": { + "nodes": [], + "macros": ["macro.dbt.current_timestamp", "macro.dbt.date_trunc"], + }, "tags": [], "config": { "enabled": True, @@ -181,7 +192,11 @@ def expect_model_output(self): "packages": [], "incremental_strategy": None, "docs": {"node_color": None, "show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, + "access": "protected", + "event_time": None, + "lookback": 0, + "batch_size": None, }, "original_file_path": normalize("models/ephemeral.sql"), "unique_id": "model.test.ephemeral", @@ -218,7 +233,11 @@ def expect_model_output(self): "packages": [], "incremental_strategy": "delete+insert", "docs": {"node_color": None, "show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, + "access": "protected", + "event_time": None, + "lookback": 0, + "batch_size": None, }, "original_file_path": normalize("models/incremental.sql"), "unique_id": "model.test.incremental", @@ -255,13 +274,99 @@ def expect_model_output(self): "packages": [], "incremental_strategy": None, "docs": {"node_color": None, "show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, + "access": "protected", + "event_time": None, + "lookback": 0, + "batch_size": None, }, "original_file_path": normalize("models/sub/inner.sql"), "unique_id": "model.test.inner", "alias": "inner", "resource_type": "model", }, + { + "name": "metricflow_time_spine", + "package_name": "test", + "depends_on": { + "nodes": [], + "macros": ["macro.dbt.current_timestamp", "macro.dbt.date_trunc"], + }, + "tags": [], + "config": { + "enabled": True, + "group": None, + "materialized": "view", + "post-hook": [], + "tags": [], + "pre-hook": [], + "quoting": {}, + "column_types": {}, + "persist_docs": {}, + "full_refresh": None, + "unique_key": None, + "on_schema_change": "ignore", + "on_configuration_change": "apply", + "database": None, + "schema": None, + "alias": None, + "meta": {}, + "grants": {}, + "packages": [], + "incremental_strategy": None, + "docs": {"node_color": None, "show": True}, + "contract": {"enforced": False, "alias_types": True}, + "access": "protected", + "event_time": None, + "lookback": 0, + "batch_size": None, + }, + "original_file_path": normalize("models/metricflow_time_spine.sql"), + "unique_id": "model.test.metricflow_time_spine", + "alias": "metricflow_time_spine", + "resource_type": "model", + }, + { + "name": "metricflow_time_spine_second", + "package_name": "test", + "depends_on": { + "nodes": [], + "macros": ["macro.dbt.current_timestamp", "macro.dbt.date_trunc"], + }, + "tags": [], + "config": { + "enabled": True, + "group": None, + "materialized": "view", + "post-hook": [], + "tags": [], + "pre-hook": [], + "quoting": {}, + "column_types": {}, + "persist_docs": {}, + "full_refresh": None, + "unique_key": None, + "on_schema_change": "ignore", + "on_configuration_change": "apply", + "database": None, + "schema": None, + "alias": None, + "meta": {}, + "grants": {}, + "packages": [], + "incremental_strategy": None, + "docs": {"node_color": None, "show": True}, + "contract": {"enforced": False, "alias_types": True}, + "access": "protected", + "event_time": None, + "lookback": 0, + "batch_size": None, + }, + "original_file_path": normalize("models/metricflow_time_spine_second.sql"), + "unique_id": "model.test.metricflow_time_spine_second", + "alias": "metricflow_time_spine_second", + "resource_type": "model", + }, { "name": "outer", "package_name": "test", @@ -292,7 +397,11 @@ def expect_model_output(self): "packages": [], "incremental_strategy": None, "docs": {"node_color": None, "show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, + "access": "protected", + "event_time": None, + "lookback": 0, + "batch_size": None, }, "original_file_path": normalize("models/outer.sql"), "unique_id": "model.test.outer", @@ -304,6 +413,8 @@ def expect_model_output(self): self.dir("models/ephemeral.sql"), self.dir("models/incremental.sql"), self.dir("models/sub/inner.sql"), + self.dir("models/metricflow_time_spine.sql"), + self.dir("models/metricflow_time_spine_second.sql"), self.dir("models/outer.sql"), ), } @@ -340,6 +451,7 @@ def expect_model_ephemeral_output(self): "packages": [], "incremental_strategy": None, "docs": {"node_color": None, "show": True}, + "access": "protected", }, "unique_id": "model.test.ephemeral", "original_file_path": normalize("models/ephemeral.sql"), @@ -358,6 +470,7 @@ def expect_source_output(self): "json": { "config": { "enabled": True, + "event_time": None, }, "unique_id": "source.test.my_source.my_table", "original_file_path": normalize("models/schema.yml"), @@ -392,6 +505,7 @@ def expect_seed_output(self): "pre-hook": [], "quoting": {}, "column_types": {}, + "delimiter": ",", "persist_docs": {}, "quote_columns": False, "full_refresh": None, @@ -406,7 +520,10 @@ def expect_seed_output(self): "packages": [], "incremental_strategy": None, "docs": {"node_color": None, "show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, + "event_time": None, + "lookback": 0, + "batch_size": None, }, "depends_on": {"macros": []}, "unique_id": "seed.test.seed", @@ -437,6 +554,7 @@ def expect_test_output(self): "materialized": "test", "severity": "ERROR", "store_failures": None, + "store_failures_as": None, "warn_if": "!= 0", "error_if": "!= 0", "fail_calc": "count(*)", @@ -464,6 +582,7 @@ def expect_test_output(self): "materialized": "test", "severity": "ERROR", "store_failures": None, + "store_failures_as": None, "warn_if": "!= 0", "error_if": "!= 0", "fail_calc": "count(*)", @@ -494,6 +613,7 @@ def expect_test_output(self): "materialized": "test", "severity": "ERROR", "store_failures": None, + "store_failures_as": None, "warn_if": "!= 0", "error_if": "!= 0", "fail_calc": "count(*)", @@ -533,7 +653,12 @@ def expect_all_output(self): "source:test.my_source.my_table", "test.not_null_outer_id", "test.unique_outer_id", + "test.metricflow_time_spine", + "test.metricflow_time_spine_second", "test.t", + "semantic_model:test.my_sm", + "metric:test.total_outer", + "saved_query:test.my_saved_query", } # analyses have their type inserted into their fqn like tests expected_all = expected_default | {"test.analysis.a"} @@ -558,11 +683,26 @@ def expect_select(self): results = self.run_dbt_ls(["--resource-type", "test", "--select", "+inner"]) assert set(results) == {"test.not_null_outer_id", "test.unique_outer_id"} + results = self.run_dbt_ls(["--resource-type", "semantic_model"]) + assert set(results) == {"semantic_model:test.my_sm"} + + results = self.run_dbt_ls(["--resource-type", "metric"]) + assert set(results) == {"metric:test.total_outer"} + + results = self.run_dbt_ls(["--resource-type", "saved_query"]) + assert set(results) == {"saved_query:test.my_saved_query"} + results = self.run_dbt_ls(["--resource-type", "model", "--select", "outer+"]) assert set(results) == {"test.outer", "test.sub.inner"} results = self.run_dbt_ls(["--resource-type", "model", "--exclude", "inner"]) - assert set(results) == {"test.ephemeral", "test.outer", "test.incremental"} + assert set(results) == { + "test.ephemeral", + "test.outer", + "test.metricflow_time_spine", + "test.metricflow_time_spine_second", + "test.incremental", + } results = self.run_dbt_ls(["--select", "config.incremental_strategy:delete+insert"]) assert set(results) == {"test.incremental"} @@ -580,18 +720,29 @@ def expect_resource_type_multiple(self): "test.not_null_outer_id", "test.outer", "test.sub.inner", + "test.metricflow_time_spine", + "test.metricflow_time_spine_second", "test.t", "test.unique_outer_id", } results = self.run_dbt_ls( - ["--resource-type", "test", "--resource-type", "model", "--exclude", "unique_outer_id"] + [ + "--resource-type", + "test", + "--resource-type", + "model", + "--exclude", + "unique_outer_id", + ] ) assert set(results) == { "test.ephemeral", "test.incremental", "test.not_null_outer_id", "test.outer", + "test.metricflow_time_spine", + "test.metricflow_time_spine_second", "test.sub.inner", "test.t", } @@ -616,10 +767,44 @@ def expect_resource_type_multiple(self): "test.outer", } - def expect_selected_keys(self, project): + def expect_resource_type_env_var(self): + """Expect selected resources when --resource-type given multiple times""" + os.environ["DBT_RESOURCE_TYPES"] = "test model" + results = self.run_dbt_ls() + assert set(results) == { + "test.ephemeral", + "test.incremental", + "test.not_null_outer_id", + "test.outer", + "test.sub.inner", + "test.metricflow_time_spine", + "test.metricflow_time_spine_second", + "test.t", + "test.unique_outer_id", + } + del os.environ["DBT_RESOURCE_TYPES"] + os.environ["DBT_EXCLUDE_RESOURCE_TYPES"] = ( + "test saved_query metric source semantic_model snapshot seed" + ) + results = self.run_dbt_ls() + assert set(results) == { + "test.ephemeral", + "test.incremental", + "test.outer", + "test.sub.inner", + "test.metricflow_time_spine", + "test.metricflow_time_spine_second", + } + del os.environ["DBT_EXCLUDE_RESOURCE_TYPES"] + + def expect_selected_keys(self, happy_path_project): # noqa: F811 """Expect selected fields of the the selected model""" expectations = [ - {"database": project.database, "schema": project.test_schema, "alias": "inner"} + { + "database": happy_path_project.database, + "schema": happy_path_project.test_schema, + "alias": "inner", + } ] results = self.run_dbt_ls( [ @@ -640,7 +825,9 @@ def expect_selected_keys(self, project): """Expect selected fields when --output-keys given multiple times """ - expectations = [{"database": project.database, "schema": project.test_schema}] + expectations = [ + {"database": happy_path_project.database, "schema": happy_path_project.test_schema} + ] results = self.run_dbt_ls( [ "--model", @@ -702,8 +889,8 @@ def expect_selected_keys(self, project): for got, expected in zip(results, expectations): self.assert_json_equal(got, expected) - def test_ls(self, project): - self.expect_snapshot_output(project) + def test_ls(self, happy_path_project): # noqa: F811 + self.expect_snapshot_output(happy_path_project) self.expect_analyses_output() self.expect_model_output() self.expect_source_output() @@ -711,8 +898,9 @@ def test_ls(self, project): self.expect_test_output() self.expect_select() self.expect_resource_type_multiple() + self.expect_resource_type_env_var() self.expect_all_output() - self.expect_selected_keys(project) + self.expect_selected_keys(happy_path_project) def normalize(path): diff --git a/tests/functional/logging/test_logging.py b/tests/functional/logging/test_logging.py index d2e04642f9f..e29bd9c7cac 100644 --- a/tests/functional/logging/test_logging.py +++ b/tests/functional/logging/test_logging.py @@ -1,10 +1,12 @@ -import pytest -from dbt.tests.util import run_dbt, get_manifest, read_file import json import os -from dbt.events.functions import fire_event -from dbt.events.types import InvalidOptionYAML +import pytest + +from dbt.events.types import InvalidOptionYAML +from dbt.tests.util import get_manifest, read_file, run_dbt +from dbt_common.events import EventLevel +from dbt_common.events.functions import fire_event my_model_sql = """ select 1 as fun @@ -68,6 +70,26 @@ def test_basic(project, logs_dir): assert "orig_conn_name" in data and data["orig_conn_name"] +def test_formatted_logs(project, logs_dir): + # a basic run of dbt with a single model should have 5 `Formatting` events in the json logs + results = run_dbt(["--log-format=json", "run"]) + assert len(results) == 1 + + # get log file + json_log_file = read_file(logs_dir, "dbt.log") + formatted_json_lines = 0 + for log_line in json_log_file.split("\n"): + # skip the empty line at the end + if len(log_line) == 0: + continue + log_dct = json.loads(log_line) + log_event = log_dct["info"]["name"] + if log_event == "Formatting": + formatted_json_lines += 1 + + assert formatted_json_lines == 5 + + def test_invalid_event_value(project, logs_dir): results = run_dbt(["--log-format=json", "run"]) assert len(results) == 1 @@ -80,3 +102,178 @@ def test_invalid_event_value(project, logs_dir): fire_event(InvalidOptionYAML(option_name=1)) assert str(excinfo.value) == "[InvalidOptionYAML]: Unable to parse dict {'option_name': 1}" + + +groups_yml = """ +groups: + - name: my_group + owner: + name: my_name + email: my.email@gmail.com + slack: my_slack + other_property: something_else + +models: + - name: my_model + group: my_group + access: public +""" + + +class TestRunResultErrorNodeInfo: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": "select not_found as id", + } + + def test_node_info_on_results(self, project, logs_dir): + results = run_dbt(["--log-format=json", "run"], expect_pass=False) + assert len(results) == 1 + + log_file = read_file(logs_dir, "dbt.log") + + for log_line in log_file.split("\n"): + if not log_line: + continue + + log_json = json.loads(log_line) + if log_json["info"]["level"] == EventLevel.DEBUG: + continue + + if log_json["info"]["name"] == "RunResultError": + assert "node_info" in log_json["data"] + assert log_json["data"]["node_info"]["unique_id"] == "model.test.my_model" + assert "Database Error" in log_json["data"]["msg"] + + +def assert_group_data(group_data): + assert group_data["name"] == "my_group" + assert group_data["owner"] == { + "name": "my_name", + "email": "my.email@gmail.com", + "slack": "my_slack", + "other_property": "something_else", + } + + +class TestRunResultErrorGroup: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": "select not_found as id", + "groups.yml": groups_yml, + } + + def test_node_info_on_results(self, project, logs_dir): + results = run_dbt(["--log-format=json", "run"], expect_pass=False) + assert len(results) == 1 + + log_file = read_file(logs_dir, "dbt.log") + run_result_error_count = 0 + + for log_line in log_file.split("\n"): + if not log_line: + continue + + log_json = json.loads(log_line) + if log_json["info"]["level"] == EventLevel.DEBUG: + continue + + if log_json["info"]["name"] == "RunResultError": + assert "group" in log_json["data"] + assert_group_data(log_json["data"]["group"]) + run_result_error_count += 1 + + assert run_result_error_count == 1 + + +class TestRunResultFailureGroup: + @pytest.fixture(scope="class") + def models(self): + schema_yml = ( + groups_yml + + """ + columns: + - name: my_column + tests: + - not_null +""" + ) + print(schema_yml) + return { + "my_model.sql": "select 1 as id, null as my_column", + "groups.yml": schema_yml, + } + + def test_node_info_on_results(self, project, logs_dir): + results = run_dbt(["--log-format=json", "build"], expect_pass=False) + assert len(results) == 2 + + log_file = read_file(logs_dir, "dbt.log") + run_result_error_count = 0 + run_result_failure_count = 0 + + for log_line in log_file.split("\n"): + if not log_line: + continue + + log_json = json.loads(log_line) + if log_json["info"]["level"] == EventLevel.DEBUG: + continue + + if log_json["info"]["name"] == "RunResultError": + assert "group" in log_json["data"] + assert_group_data(log_json["data"]["group"]) + run_result_error_count += 1 + + if log_json["info"]["name"] == "RunResultFailure": + assert "group" in log_json["data"] + assert_group_data(log_json["data"]["group"]) + run_result_failure_count += 1 + + assert run_result_error_count == 1 + assert run_result_failure_count == 1 + + +class TestRunResultWarningGroup: + @pytest.fixture(scope="class") + def models(self): + schema_yml = ( + groups_yml + + """ + columns: + - name: my_column + tests: + - not_null: + config: + severity: warn +""" + ) + print(schema_yml) + return { + "my_model.sql": "select 1 as id, null as my_column", + "groups.yml": schema_yml, + } + + def test_node_info_on_results(self, project, logs_dir): + results = run_dbt(["--log-format=json", "build"]) + assert len(results) == 2 + + log_file = read_file(logs_dir, "dbt.log") + run_result_warning_count = 0 + + for log_line in log_file.split("\n"): + if not log_line: + continue + + log_json = json.loads(log_line) + if log_json["info"]["level"] == EventLevel.DEBUG: + continue + + if log_json["info"]["name"] == "RunResultWarning": + assert "group" in log_json["data"] + assert_group_data(log_json["data"]["group"]) + run_result_warning_count += 1 + + assert run_result_warning_count == 1 diff --git a/tests/functional/logging/test_meta_logging.py b/tests/functional/logging/test_meta_logging.py index aa262730077..19b10725273 100644 --- a/tests/functional/logging/test_meta_logging.py +++ b/tests/functional/logging/test_meta_logging.py @@ -1,7 +1,9 @@ -import pytest -from dbt.tests.util import run_dbt, read_file import json +import pytest + +from dbt.tests.util import read_file, run_dbt + model1 = "select 1 as fun" model2 = '{{ config(meta={"owners": ["team1", "team2"]})}} select 1 as fun' model3 = '{{ config(meta={"key": 1})}} select 1 as fun' diff --git a/tests/functional/macros/test_macros.py b/tests/functional/macros/test_macros.py index a93a7d76c85..fb3d808526b 100644 --- a/tests/functional/macros/test_macros.py +++ b/tests/functional/macros/test_macros.py @@ -1,33 +1,28 @@ -import pytest import shutil - -import dbt.exceptions - from pathlib import Path -from dbt.tests.util import ( - run_dbt, - check_relations_equal, -) +import pytest +import dbt_common.exceptions from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import check_relations_equal, run_dbt from tests.functional.macros.fixtures import ( dbt_project__incorrect_dispatch, - models__dep_macro, - models__with_undefined_macro, - models__local_macro, - models__ref_macro, - models__override_get_columns_macros, - models__deprecated_adapter_macro_model, - models__incorrect_dispatch, - models__materialization_macro, + macros__deprecated_adapter_macro, + macros__incorrect_dispatch, macros__my_macros, + macros__named_materialization, macros__no_default_macros, macros__override_get_columns_macros, macros__package_override_get_columns_macros, - macros__deprecated_adapter_macro, - macros__incorrect_dispatch, - macros__named_materialization, + models__dep_macro, + models__deprecated_adapter_macro_model, + models__incorrect_dispatch, + models__local_macro, + models__materialization_macro, + models__override_get_columns_macros, + models__ref_macro, + models__with_undefined_macro, ) @@ -118,7 +113,7 @@ def macros(self): return {"my_macros.sql": macros__no_default_macros} def test_invalid_macro(self, project): - with pytest.raises(dbt.exceptions.CompilationError) as exc: + with pytest.raises(dbt_common.exceptions.CompilationError) as exc: run_dbt() assert "In dispatch: No macro named 'dispatch_to_nowhere' found" in str(exc.value) @@ -255,7 +250,7 @@ def test_misnamed_macro_namespace( ): run_dbt(["deps"]) - with pytest.raises(dbt.exceptions.CompilationError) as exc: + with pytest.raises(dbt_common.exceptions.CompilationError) as exc: run_dbt() assert "In dispatch: No macro named 'cowsay' found" in str(exc.value) @@ -271,7 +266,7 @@ def macros(self): return {"macro.sql": macros__deprecated_adapter_macro} def test_invalid_macro(self, project): - with pytest.raises(dbt.exceptions.CompilationError) as exc: + with pytest.raises(dbt_common.exceptions.CompilationError) as exc: run_dbt() assert 'The "adapter_macro" macro has been deprecated' in str(exc.value) diff --git a/tests/functional/manifest_validations/test_check_for_spaces_in_model_names.py b/tests/functional/manifest_validations/test_check_for_spaces_in_model_names.py new file mode 100644 index 00000000000..543a4557eea --- /dev/null +++ b/tests/functional/manifest_validations/test_check_for_spaces_in_model_names.py @@ -0,0 +1,101 @@ +from typing import Dict + +import pytest + +from dbt import deprecations +from dbt.cli.main import dbtRunner +from dbt.events.types import ( + ResourceNamesWithSpacesDeprecation, + SpacesInResourceNameDeprecation, +) +from dbt.tests.util import update_config_file +from dbt_common.events.base_types import EventLevel +from tests.utils import EventCatcher + + +class TestSpacesInModelNamesHappyPath: + def test_no_warnings_when_no_spaces_in_name(self, project) -> None: + event_catcher = EventCatcher(SpacesInResourceNameDeprecation) + runner = dbtRunner(callbacks=[event_catcher.catch]) + runner.invoke(["parse"]) + assert len(event_catcher.caught_events) == 0 + + +class TestSpacesInModelNamesSadPath: + @pytest.fixture(scope="class") + def models(self) -> Dict[str, str]: + return { + "my model.sql": "select 1 as id", + } + + def tests_warning_when_spaces_in_name(self, project) -> None: + event_catcher = EventCatcher(SpacesInResourceNameDeprecation) + total_catcher = EventCatcher(ResourceNamesWithSpacesDeprecation) + runner = dbtRunner(callbacks=[event_catcher.catch, total_catcher.catch]) + runner.invoke(["parse"]) + + assert len(total_catcher.caught_events) == 1 + assert len(event_catcher.caught_events) == 1 + event = event_catcher.caught_events[0] + assert "Found spaces in the name of `model.test.my model`" in event.info.msg + assert event.info.level == EventLevel.WARN + + +class TestSpaceInModelNamesWithDebug: + @pytest.fixture(scope="class") + def models(self) -> Dict[str, str]: + return { + "my model.sql": "select 1 as id", + "my model2.sql": "select 1 as id", + } + + def tests_debug_when_spaces_in_name(self, project) -> None: + deprecations.reset_deprecations() + spaces_check_catcher = EventCatcher(SpacesInResourceNameDeprecation) + total_catcher = EventCatcher(ResourceNamesWithSpacesDeprecation) + runner = dbtRunner(callbacks=[spaces_check_catcher.catch, total_catcher.catch]) + runner.invoke(["parse"]) + assert len(spaces_check_catcher.caught_events) == 1 + assert len(total_catcher.caught_events) == 1 + assert "Spaces found in 2 resource name(s)" in total_catcher.caught_events[0].info.msg + assert ( + "Run again with `--debug` to see them all." in total_catcher.caught_events[0].info.msg + ) + + deprecations.reset_deprecations() + spaces_check_catcher = EventCatcher(SpacesInResourceNameDeprecation) + total_catcher = EventCatcher(ResourceNamesWithSpacesDeprecation) + runner = dbtRunner(callbacks=[spaces_check_catcher.catch, total_catcher.catch]) + runner.invoke(["parse", "--debug"]) + assert len(spaces_check_catcher.caught_events) == 2 + assert len(total_catcher.caught_events) == 1 + assert ( + "Run again with `--debug` to see them all." + not in total_catcher.caught_events[0].info.msg + ) + + +class TestAllowSpacesInModelNamesFalse: + @pytest.fixture(scope="class") + def models(self) -> Dict[str, str]: + return { + "my model.sql": "select 1 as id", + } + + def test_require_resource_names_without_spaces(self, project): + spaces_check_catcher = EventCatcher(SpacesInResourceNameDeprecation) + runner = dbtRunner(callbacks=[spaces_check_catcher.catch]) + runner.invoke(["parse"]) + assert len(spaces_check_catcher.caught_events) == 1 + assert spaces_check_catcher.caught_events[0].info.level == EventLevel.WARN + + config_patch = {"flags": {"require_resource_names_without_spaces": True}} + update_config_file(config_patch, project.project_root, "dbt_project.yml") + + spaces_check_catcher = EventCatcher(SpacesInResourceNameDeprecation) + runner = dbtRunner(callbacks=[spaces_check_catcher.catch]) + result = runner.invoke(["parse"]) + assert not result.success + assert "Resource names cannot contain spaces" in result.exception.__str__() + assert len(spaces_check_catcher.caught_events) == 1 + assert spaces_check_catcher.caught_events[0].info.level == EventLevel.ERROR diff --git a/tests/functional/materializations/conftest.py b/tests/functional/materializations/conftest.py index b808c1a6a7b..f8b483d46cd 100644 --- a/tests/functional/materializations/conftest.py +++ b/tests/functional/materializations/conftest.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.fixtures.project import write_project_files +from dbt.tests.fixtures.project import write_project_files override_view_adapter_pass_dep__dbt_project_yml = """ name: view_adapter_override @@ -325,6 +325,21 @@ {%- endmaterialization -%} """ +custom_materialization_dep__dbt_project_yml = """ +name: custom_materialization_default +macro-paths: ['macros'] +""" + +custom_materialization_sql = """ +{% materialization custom_materialization, default %} + {%- set target_relation = this.incorporate(type='table') %} + {% call statement('main') -%} + select 1 as column1 + {%- endcall %} + {{ return({'relations': [target_relation]}) }} +{% endmaterialization %} +""" + @pytest.fixture(scope="class") def override_view_adapter_pass_dep(project_root): @@ -368,3 +383,12 @@ def override_view_return_no_relation(project_root): }, } write_project_files(project_root, "override-view-return-no-relation", files) + + +@pytest.fixture(scope="class") +def custom_materialization_dep(project_root): + files = { + "dbt_project.yml": custom_materialization_dep__dbt_project_yml, + "macros": {"custom_materialization.sql": custom_materialization_sql}, + } + write_project_files(project_root, "custom-materialization-dep", files) diff --git a/tests/functional/materializations/fixtures.py b/tests/functional/materializations/fixtures.py new file mode 100644 index 00000000000..8828b22bba9 --- /dev/null +++ b/tests/functional/materializations/fixtures.py @@ -0,0 +1,245 @@ +fct_eph_first_sql = """ +-- fct_eph_first.sql +{{ config(materialized='ephemeral') }} + +with int_eph_first as( + select * from {{ ref('int_eph_first') }} +) + +select * from int_eph_first +""" + +int_eph_first_sql = """ +-- int_eph_first.sql +{{ config(materialized='ephemeral') }} + +select + 1 as first_column, + 2 as second_column +""" + +schema_yml = """ +version: 2 + +models: + - name: int_eph_first + columns: + - name: first_column + data_tests: + - not_null + - name: second_column + data_tests: + - not_null + + - name: fct_eph_first + columns: + - name: first_column + data_tests: + - not_null + - name: second_column + data_tests: + - not_null + +""" + +bar_sql = """ +{{ config(materialized = 'table') }} + +WITH foo AS ( + + SELECT * FROM {{ ref('foo') }} + +), foo_1 AS ( + + SELECT * FROM {{ ref('foo_1') }} + +), foo_2 AS ( + + SELECT * FROM {{ ref('foo_2') }} + +) + +SELECT * FROM foo +UNION ALL +SELECT * FROM foo_1 +UNION ALL +SELECT * FROM foo_2 +""" + +bar1_sql = """ +{{ config(materialized = 'table') }} + +WITH foo AS ( + + SELECT * FROM {{ ref('foo') }} + +), foo_1 AS ( + + SELECT * FROM {{ ref('foo_1') }} + +), foo_2 AS ( + + SELECT * FROM {{ ref('foo_2') }} + +) + +SELECT * FROM foo +UNION ALL +SELECT * FROM foo_1 +UNION ALL +SELECT * FROM foo_2 +""" + +bar2_sql = """ +{{ config(materialized = 'table') }} + +WITH foo AS ( + + SELECT * FROM {{ ref('foo') }} + +), foo_1 AS ( + + SELECT * FROM {{ ref('foo_1') }} + +), foo_2 AS ( + + SELECT * FROM {{ ref('foo_2') }} + +) + +SELECT * FROM foo +UNION ALL +SELECT * FROM foo_1 +UNION ALL +SELECT * FROM foo_2 +""" + +bar3_sql = """ +{{ config(materialized = 'table') }} + +WITH foo AS ( + + SELECT * FROM {{ ref('foo') }} + +), foo_1 AS ( + + SELECT * FROM {{ ref('foo_1') }} + +), foo_2 AS ( + + SELECT * FROM {{ ref('foo_2') }} + +) + +SELECT * FROM foo +UNION ALL +SELECT * FROM foo_1 +UNION ALL +SELECT * FROM foo_2 +""" + +bar4_sql = """ +{{ config(materialized = 'table') }} + +WITH foo AS ( + + SELECT * FROM {{ ref('foo') }} + +), foo_1 AS ( + + SELECT * FROM {{ ref('foo_1') }} + +), foo_2 AS ( + + SELECT * FROM {{ ref('foo_2') }} + +) + +SELECT * FROM foo +UNION ALL +SELECT * FROM foo_1 +UNION ALL +SELECT * FROM foo_2 +""" + +bar5_sql = """ +{{ config(materialized = 'table') }} + +WITH foo AS ( + + SELECT * FROM {{ ref('foo') }} + +), foo_1 AS ( + + SELECT * FROM {{ ref('foo_1') }} + +), foo_2 AS ( + + SELECT * FROM {{ ref('foo_2') }} + +) + +SELECT * FROM foo +UNION ALL +SELECT * FROM foo_1 +UNION ALL +SELECT * FROM foo_2 +""" + +baz_sql = """ +{{ config(materialized = 'table') }} +SELECT * FROM {{ ref('bar') }} +""" + +baz1_sql = """ +{{ config(materialized = 'table') }} +SELECT * FROM {{ ref('bar_1') }} +""" + +foo_sql = """ +{{ config(materialized = 'ephemeral') }} + +with source as ( + + select 1 as id + +), renamed as ( + + select id as uid from source + +) + +select * from renamed +""" + +foo1_sql = """ +{{ config(materialized = 'ephemeral') }} + +WITH source AS ( + + SELECT 1 AS id + +), RENAMED as ( + + SELECT id as UID FROM source + +) + +SELECT * FROM renamed +""" + +foo2_sql = """ +{{ config(materialized = 'ephemeral') }} + +WITH source AS ( + + SELECT 1 AS id + +), RENAMED as ( + + SELECT id as UID FROM source + +) + +SELECT * FROM renamed +""" diff --git a/tests/functional/materializations/materialized_view_tests/test_materialized_view.py b/tests/functional/materializations/materialized_view_tests/test_materialized_view.py deleted file mode 100644 index 5f50afc994c..00000000000 --- a/tests/functional/materializations/materialized_view_tests/test_materialized_view.py +++ /dev/null @@ -1,145 +0,0 @@ -from typing import Optional, Tuple - -import pytest - -from dbt.adapters.base.relation import BaseRelation - -from dbt.tests.util import get_model_file, set_model_file -from tests.adapter.dbt.tests.adapter.materialized_view.basic import MaterializedViewBasic -from tests.adapter.dbt.tests.adapter.materialized_view.changes import ( - MaterializedViewChanges, - MaterializedViewChangesApplyMixin, - MaterializedViewChangesContinueMixin, - MaterializedViewChangesFailMixin, -) -from tests.adapter.dbt.tests.adapter.materialized_view.files import MY_TABLE, MY_VIEW -from tests.functional.materializations.materialized_view_tests.utils import ( - query_indexes, - query_relation_type, -) - - -MY_MATERIALIZED_VIEW = """ -{{ config( - materialized='materialized_view', - indexes=[{'columns': ['id']}], -) }} -select * from {{ ref('my_seed') }} -""" - - -class TestPostgresMaterializedViewsBasic(MaterializedViewBasic): - @pytest.fixture(scope="class", autouse=True) - def models(self): - yield { - "my_table.sql": MY_TABLE, - "my_view.sql": MY_VIEW, - "my_materialized_view.sql": MY_MATERIALIZED_VIEW, - } - - @staticmethod - def insert_record(project, table: BaseRelation, record: Tuple[int, int]): - my_id, value = record - project.run_sql(f"insert into {table} (id, value) values ({my_id}, {value})") - - @staticmethod - def refresh_materialized_view(project, materialized_view: BaseRelation): - sql = f"refresh materialized view {materialized_view}" - project.run_sql(sql) - - @staticmethod - def query_row_count(project, relation: BaseRelation) -> int: - sql = f"select count(*) from {relation}" - return project.run_sql(sql, fetch="one")[0] - - @staticmethod - def query_relation_type(project, relation: BaseRelation) -> Optional[str]: - return query_relation_type(project, relation) - - @pytest.mark.skip( - "The current implementation does not support overwriting materialized views with tables." - ) - def test_table_replaces_materialized_view(self, project, my_materialized_view): - super().test_table_replaces_materialized_view(project, my_materialized_view) - - @pytest.mark.skip( - "The current implementation does not support overwriting materialized views with views." - ) - def test_view_replaces_materialized_view(self, project, my_materialized_view): - super().test_view_replaces_materialized_view(project, my_materialized_view) - - @pytest.mark.skip( - "The current implementation does not support overwriting tables with materialized views." - ) - def test_materialized_view_replaces_table(self, project, my_table): - super().test_materialized_view_replaces_table(project, my_table) - - @pytest.mark.skip( - "The current implementation does not support overwriting views with materialized views." - ) - def test_materialized_view_replaces_view(self, project, my_view): - super().test_materialized_view_replaces_view(project, my_view) - - -class PostgresMaterializedViewChanges(MaterializedViewChanges): - @pytest.fixture(scope="class", autouse=True) - def models(self): - yield { - "my_table.sql": MY_TABLE, - "my_view.sql": MY_VIEW, - "my_materialized_view.sql": MY_MATERIALIZED_VIEW, - } - - @staticmethod - def query_relation_type(project, relation: BaseRelation) -> Optional[str]: - return query_relation_type(project, relation) - - @staticmethod - def check_start_state(project, materialized_view): - indexes = query_indexes(project, materialized_view) - assert len(indexes) == 1 - assert indexes[0]["column_names"] == "id" - - @staticmethod - def change_config_via_alter(project, materialized_view): - initial_model = get_model_file(project, materialized_view) - new_model = initial_model.replace( - "indexes=[{'columns': ['id']}]", - "indexes=[{'columns': ['value']}]", - ) - set_model_file(project, materialized_view, new_model) - - @staticmethod - def check_state_alter_change_is_applied(project, materialized_view): - indexes = query_indexes(project, materialized_view) - assert len(indexes) == 1 - assert indexes[0]["column_names"] == "value" - - @staticmethod - def change_config_via_replace(project, materialized_view): - # dbt-postgres does not currently monitor changes of this type - pass - - -class TestPostgresMaterializedViewChangesApply( - PostgresMaterializedViewChanges, MaterializedViewChangesApplyMixin -): - @pytest.mark.skip("dbt-postgres does not currently monitor replace changes.") - def test_change_is_applied_via_replace(self, project, my_materialized_view): - super().test_change_is_applied_via_replace(project, my_materialized_view) - - -class TestPostgresMaterializedViewChangesContinue( - PostgresMaterializedViewChanges, MaterializedViewChangesContinueMixin -): - @pytest.mark.skip("dbt-postgres does not currently monitor replace changes.") - def test_change_is_not_applied_via_replace(self, project, my_materialized_view): - super().test_change_is_not_applied_via_alter(project, my_materialized_view) - - -class TestPostgresMaterializedViewChangesFail( - PostgresMaterializedViewChanges, MaterializedViewChangesFailMixin -): - @pytest.mark.skip("dbt-postgres does not currently monitor replace changes.") - def test_change_is_not_applied_via_replace(self, project, my_materialized_view): - super().test_change_is_not_applied_via_replace(project, my_materialized_view) diff --git a/tests/functional/materializations/materialized_view_tests/utils.py b/tests/functional/materializations/materialized_view_tests/utils.py deleted file mode 100644 index 572f21aeb95..00000000000 --- a/tests/functional/materializations/materialized_view_tests/utils.py +++ /dev/null @@ -1,73 +0,0 @@ -from typing import Dict, List, Optional - -from dbt.adapters.base.relation import BaseRelation - -from dbt.adapters.postgres.relation import PostgresRelation - - -def query_relation_type(project, relation: BaseRelation) -> Optional[str]: - assert isinstance(relation, PostgresRelation) - sql = f""" - select - 'table' as relation_type - from pg_tables - where schemaname = '{relation.schema}' - and tablename = '{relation.identifier}' - union all - select - 'view' as relation_type - from pg_views - where schemaname = '{relation.schema}' - and viewname = '{relation.identifier}' - union all - select - 'materialized_view' as relation_type - from pg_matviews - where schemaname = '{relation.schema}' - and matviewname = '{relation.identifier}' - """ - results = project.run_sql(sql, fetch="all") - if len(results) == 0: - return None - elif len(results) > 1: - raise ValueError(f"More than one instance of {relation.name} found!") - else: - return results[0][0] - - -def query_indexes(project, relation: BaseRelation) -> List[Dict[str, str]]: - assert isinstance(relation, PostgresRelation) - # pulled directly from `postgres__describe_indexes_template` and manually verified - sql = f""" - select - i.relname as name, - m.amname as method, - ix.indisunique as "unique", - array_to_string(array_agg(a.attname), ',') as column_names - from pg_index ix - join pg_class i - on i.oid = ix.indexrelid - join pg_am m - on m.oid=i.relam - join pg_class t - on t.oid = ix.indrelid - join pg_namespace n - on n.oid = t.relnamespace - join pg_attribute a - on a.attrelid = t.oid - and a.attnum = ANY(ix.indkey) - where t.relname ilike '{ relation.identifier }' - and n.nspname ilike '{ relation.schema }' - and t.relkind in ('r', 'm') - group by 1, 2, 3 - order by 1, 2, 3 - """ - raw_indexes = project.run_sql(sql, fetch="all") - indexes = [ - { - header: value - for header, value in zip(["name", "method", "unique", "column_names"], index) - } - for index in raw_indexes - ] - return indexes diff --git a/tests/functional/materializations/test_custom_materialization.py b/tests/functional/materializations/test_custom_materialization.py index 838eb68bb01..ae377cdb428 100644 --- a/tests/functional/materializations/test_custom_materialization.py +++ b/tests/functional/materializations/test_custom_materialization.py @@ -1,8 +1,8 @@ import pytest +from dbt import deprecations from dbt.tests.util import run_dbt - models__model_sql = """ {{ config(materialized='view') }} select 1 as id @@ -10,11 +10,24 @@ """ +models_custom_materialization__model_sql = """ +{{ config(materialized='custom_materialization') }} +select 1 as id + +""" + + @pytest.fixture(scope="class") def models(): return {"model.sql": models__model_sql} +@pytest.fixture(scope="class") +def set_up_deprecations(): + deprecations.reset_deprecations() + assert deprecations.active_deprecations == set() + + class TestOverrideAdapterDependency: # make sure that if there's a dependency with an adapter-specific # materialization, we honor that materialization @@ -22,22 +35,165 @@ class TestOverrideAdapterDependency: def packages(self): return {"packages": [{"local": "override-view-adapter-dep"}]} - def test_adapter_dependency(self, project, override_view_adapter_dep): + def test_adapter_dependency(self, project, override_view_adapter_dep, set_up_deprecations): + run_dbt(["deps"]) + # this should pass because implicit overrides are now deprecated (= disabled by default) + run_dbt(["run"]) + + +class TestOverrideAdapterDependencyDeprecated: + # make sure that if there's a dependency with an adapter-specific + # materialization, we honor that materialization + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "override-view-adapter-dep"}]} + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "flags": { + "require_explicit_package_overrides_for_builtin_materializations": True, + }, + } + + def test_adapter_dependency_deprecate_overrides( + self, project, override_view_adapter_dep, set_up_deprecations + ): + run_dbt(["deps"]) + # this should pass because the override is buggy and unused + run_dbt(["run"]) + + # no deprecation warning -- flag used correctly + assert deprecations.active_deprecations == set() + + +class TestOverrideAdapterDependencyLegacy: + # make sure that if there's a dependency with an adapter-specific + # materialization, we honor that materialization + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "override-view-adapter-dep"}]} + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "flags": { + "require_explicit_package_overrides_for_builtin_materializations": False, + }, + } + + def test_adapter_dependency(self, project, override_view_adapter_dep, set_up_deprecations): run_dbt(["deps"]) # this should error because the override is buggy run_dbt(["run"], expect_pass=False) + # overriding a built-in materialization scoped to adapter from package is deprecated + assert deprecations.active_deprecations == {"package-materialization-override"} + class TestOverrideDefaultDependency: @pytest.fixture(scope="class") def packages(self): return {"packages": [{"local": "override-view-default-dep"}]} - def test_default_dependency(self, project, override_view_default_dep): + def test_default_dependency(self, project, override_view_default_dep, set_up_deprecations): + run_dbt(["deps"]) + # this should pass because implicit overrides are now deprecated (= disabled by default) + run_dbt(["run"]) + + +class TestOverrideDefaultDependencyDeprecated: + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "override-view-default-dep"}]} + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "flags": { + "require_explicit_package_overrides_for_builtin_materializations": True, + }, + } + + def test_default_dependency_deprecated( + self, project, override_view_default_dep, set_up_deprecations + ): + run_dbt(["deps"]) + # this should pass because the override is buggy and unused + run_dbt(["run"]) + + # overriding a built-in materialization from package is deprecated + assert deprecations.active_deprecations == set() + + +class TestOverrideDefaultDependencyLegacy: + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "override-view-default-dep"}]} + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "flags": { + "require_explicit_package_overrides_for_builtin_materializations": False, + }, + } + + def test_default_dependency(self, project, override_view_default_dep, set_up_deprecations): + run_dbt(["deps"]) + # this should error because the override is buggy + run_dbt(["run"], expect_pass=False) + + # overriding a built-in materialization from package is deprecated + assert deprecations.active_deprecations == {"package-materialization-override"} + + +root_view_override_macro = """ +{% materialization view, default %} + {{ return(view_default_override.materialization_view_default()) }} +{% endmaterialization %} +""" + + +class TestOverrideDefaultDependencyRootOverride: + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "override-view-default-dep"}]} + + @pytest.fixture(scope="class") + def macros(self): + return {"my_view.sql": root_view_override_macro} + + def test_default_dependency_with_root_override( + self, project, override_view_default_dep, set_up_deprecations + ): run_dbt(["deps"]) # this should error because the override is buggy run_dbt(["run"], expect_pass=False) + # using an package-overriden built-in materialization in a root matereialization is _not_ deprecated + assert deprecations.active_deprecations == set() + + +class TestCustomMaterializationDependency: + @pytest.fixture(scope="class") + def models(self): + return {"model.sql": models_custom_materialization__model_sql} + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "custom-materialization-dep"}]} + + def test_custom_materialization_deopendency( + self, project, custom_materialization_dep, set_up_deprecations + ): + run_dbt(["deps"]) + # custom materilization is valid + run_dbt(["run"]) + + # using a custom materialization is from an installed package is _not_ deprecated + assert deprecations.active_deprecations == set() + class TestOverrideAdapterDependencyPassing: @pytest.fixture(scope="class") diff --git a/tests/functional/materializations/test_ephemeral_compilation.py b/tests/functional/materializations/test_ephemeral_compilation.py index 56f49928756..3b8ea9d4a86 100644 --- a/tests/functional/materializations/test_ephemeral_compilation.py +++ b/tests/functional/materializations/test_ephemeral_compilation.py @@ -1,5 +1,24 @@ import pytest + +from dbt.contracts.graph.nodes import ModelNode +from dbt.contracts.results import RunExecutionResult, RunResult from dbt.tests.util import run_dbt +from tests.functional.materializations.fixtures import ( + bar1_sql, + bar2_sql, + bar3_sql, + bar4_sql, + bar5_sql, + bar_sql, + baz1_sql, + baz_sql, + fct_eph_first_sql, + foo1_sql, + foo2_sql, + foo_sql, + int_eph_first_sql, + schema_yml, +) # Note: This tests compilation only, so is a dbt Core test and not an adapter test. # There is some complicated logic in core/dbt/compilation.py having to do with @@ -8,49 +27,14 @@ # on the order in which things are compiled). It requires multi-threading to fail. -fct_eph_first_sql = """ --- fct_eph_first.sql -{{ config(materialized='ephemeral') }} +SUPPRESSED_CTE_EXPECTED_OUTPUT = """-- fct_eph_first.sql + with int_eph_first as( - select * from {{ ref('int_eph_first') }} + select * from __dbt__cte__int_eph_first ) -select * from int_eph_first -""" - -int_eph_first_sql = """ --- int_eph_first.sql -{{ config(materialized='ephemeral') }} - -select - 1 as first_column, - 2 as second_column -""" - -schema_yml = """ -version: 2 - -models: - - name: int_eph_first - columns: - - name: first_column - tests: - - not_null - - name: second_column - tests: - - not_null - - - name: fct_eph_first - columns: - - name: first_column - tests: - - not_null - - name: second_column - tests: - - not_null - -""" +select * from int_eph_first""" class TestEphemeralCompilation: @@ -67,5 +51,38 @@ def test_ephemeral_compilation(self, project): results = run_dbt(["run"]) assert len(results) == 0 - results = run_dbt(["test"]) - len(results) == 4 + def test__suppress_injected_ctes(self, project): + compile_output = run_dbt( + ["compile", "--no-inject-ephemeral-ctes", "--select", "fct_eph_first"] + ) + assert isinstance(compile_output, RunExecutionResult) + node_result = compile_output.results[0] + assert isinstance(node_result, RunResult) + node = node_result.node + assert isinstance(node, ModelNode) + assert node.compiled_code == SUPPRESSED_CTE_EXPECTED_OUTPUT + + +# From: https://github.com/jeremyyeo/ephemeral-invalid-sql-repro/tree/main/models +class TestLargeEphemeralCompilation: + @pytest.fixture(scope="class") + def models(self): + + return { + "bar.sql": bar_sql, + "bar_1.sql": bar1_sql, + "bar_2.sql": bar2_sql, + "bar_3.sql": bar3_sql, + "bar_4.sql": bar4_sql, + "bar_5.sql": bar5_sql, + "baz.sql": baz_sql, + "baz_1.sql": baz1_sql, + "foo.sql": foo_sql, + "foo_1.sql": foo1_sql, + "foo_2.sql": foo2_sql, + } + + def test_ephemeral_compilation(self, project): + # 8/11 table models are built as expected. no compilation errors + results = run_dbt(["build"]) + assert len(results) == 8 diff --git a/tests/functional/materializations/test_incremental.py b/tests/functional/materializations/test_incremental.py index b331c9d14b5..48745d0f987 100644 --- a/tests/functional/materializations/test_incremental.py +++ b/tests/functional/materializations/test_incremental.py @@ -1,8 +1,8 @@ import pytest -from dbt.tests.util import run_dbt, get_manifest -from dbt.exceptions import DbtRuntimeError -from dbt.context.providers import generate_runtime_model_context +from dbt.context.providers import generate_runtime_model_context +from dbt.exceptions import DbtRuntimeError +from dbt.tests.util import get_manifest, run_dbt my_model_sql = """ select 1 as fun diff --git a/tests/functional/materializations/test_incremental_with_contract.py b/tests/functional/materializations/test_incremental_with_contract.py new file mode 100644 index 00000000000..a9321f10118 --- /dev/null +++ b/tests/functional/materializations/test_incremental_with_contract.py @@ -0,0 +1,131 @@ +import pytest + +from dbt.tests.util import ( + check_relations_equal, + get_relation_columns, + relation_from_name, + run_dbt, +) + +seeds_base_csv = """ +id,name_xxx,some_date +1,Easton,1981-05-20T06:46:51 +2,Lillian,1978-09-03T18:10:33 +3,Jeremiah,1982-03-11T03:59:51 +4,Nolan,1976-05-06T20:21:35 +5,Hannah,1982-06-23T05:41:26 +6,Eleanor,1991-08-10T23:12:21 +7,Lily,1971-03-29T14:58:02 +8,Jonathan,1988-02-26T02:55:24 +9,Adrian,1994-02-09T13:14:23 +10,Nora,1976-03-01T16:51:39 +""".lstrip() + + +seeds_added_csv = ( + seeds_base_csv + + """ +11,Mateo,2014-09-07T17:04:27 +12,Julian,2000-02-04T11:48:30 +13,Gabriel,2001-07-10T07:32:52 +14,Isaac,2002-11-24T03:22:28 +15,Levi,2009-11-15T11:57:15 +16,Elizabeth,2005-04-09T03:50:11 +17,Grayson,2019-08-06T19:28:17 +18,Dylan,2014-03-01T11:50:41 +19,Jayden,2009-06-06T07:12:49 +20,Luke,2003-12-05T21:42:18 +""".lstrip() +) + +incremental_not_schema_change_sql = """ +{{ config(materialized="incremental", unique_key="user_id_current_time",on_schema_change="sync_all_columns") }} +select + 1 || '-' || current_timestamp as user_id_current_time, + {% if is_incremental() %} + 'thisis18characters' as platform + {% else %} + 'okthisis20characters' as platform + {% endif %} +""" + +incremental_sql = """ + {{ config(materialized="incremental") }} + select * from {{ source('raw', 'seed') }} + {% if is_incremental() %} + where id > (select max(id) from {{ this }}) + {% endif %} +""" + +schema_base_yml = """ +sources: + - name: raw + schema: "{{ target.schema }}" + tables: + - name: seed + identifier: "{{ var('seed_name', 'base') }}" + +models: + - name: incremental + config: + contract: + enforced: true + on_schema_change: append_new_columns + columns: + - name: id + data_type: int + - name: name_xxx + data_type: character varying(10) + - name: some_date + data_type: timestamp +""" + + +class TestIncremental: + @pytest.fixture(scope="class") + def project_config_update(self): + return {"name": "incremental"} + + @pytest.fixture(scope="class") + def models(self): + return {"incremental.sql": incremental_sql, "schema.yml": schema_base_yml} + + @pytest.fixture(scope="class") + def seeds(self): + return {"base.csv": seeds_base_csv, "added.csv": seeds_added_csv} + + def test_incremental(self, project): + # seed command + results = run_dbt(["seed"]) + assert len(results) == 2 + + # base table rowcount + relation = relation_from_name(project.adapter, "base") + result = project.run_sql(f"select count(*) as num_rows from {relation}", fetch="one") + assert result[0] == 10 + + # added table rowcount + relation = relation_from_name(project.adapter, "added") + result = project.run_sql(f"select count(*) as num_rows from {relation}", fetch="one") + assert result[0] == 20 + + # run command + # the "seed_name" var changes the seed identifier in the schema file + results = run_dbt(["run", "--vars", "seed_name: base"]) + assert len(results) == 1 + + # check relations equal + check_relations_equal(project.adapter, ["base", "incremental"]) + + # change seed_name var + # the "seed_name" var changes the seed identifier in the schema file + results = run_dbt(["run", "--debug", "--vars", "seed_name: added"]) + assert len(results) == 1 + + # Error before fix: Changing col type from character varying(10) to character varying(256) in table: + # "dbt"."test<...>_test_incremental_with_contract"."incremental" + columns = get_relation_columns(project.adapter, "incremental") + # [('id', 'integer', None), ('name_xxx', 'character varying', 10), ('some_date', 'timestamp without time zone', None)] + for column in columns: + if column[0] == "name_xxx": + assert column[2] == 10 diff --git a/tests/functional/materializations/test_runtime_materialization.py b/tests/functional/materializations/test_runtime_materialization.py index 336aac561f2..6f84b478e2b 100644 --- a/tests/functional/materializations/test_runtime_materialization.py +++ b/tests/functional/materializations/test_runtime_materialization.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal, check_table_does_not_exist +from dbt.tests.util import check_relations_equal, check_table_does_not_exist, run_dbt models__view_sql = """ {{ diff --git a/tests/functional/materializations/test_supported_languages.py b/tests/functional/materializations/test_supported_languages.py index a2ef8077de5..5e7b8b332e4 100644 --- a/tests/functional/materializations/test_supported_languages.py +++ b/tests/functional/materializations/test_supported_languages.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt +from dbt.tests.util import run_dbt custom_mat_tmpl = """ {% materialization custom_mat{} %} diff --git a/tests/functional/metrics/fixtures.py b/tests/functional/metrics/fixtures.py index 65d61ad74ad..5614fab8c63 100644 --- a/tests/functional/metrics/fixtures.py +++ b/tests/functional/metrics/fixtures.py @@ -88,6 +88,14 @@ metrics: - average_tenure expr: "average_tenure + 1" + + - name: tenured_people + label: Tenured People + description: People who have been here more than 1 year + type: simple + type_params: + measure: people + filter: "{{ Metric('collective_tenure', ['id']) }} > 2" """ metricflow_time_spine_sql = """ @@ -97,6 +105,63 @@ models_people_metrics_yml = """ version: 2 +metrics: + + - name: number_of_people + label: "Number of people" + description: Total count of people + type: simple + type_params: + measure: people + time_granularity: month + config: + meta: + my_meta_config: 'config' + + - name: collective_tenure + label: "Collective tenure" + description: Total number of years of team experience + type: simple + type_params: + measure: + name: years_tenure + filter: "{{ Dimension('id__loves_dbt') }} is true" + join_to_timespine: true + fill_nulls_with: 0 + + - name: collective_window + label: "Collective window" + description: Testing window + type: simple + type_params: + measure: + name: years_tenure + filter: "{{ Dimension('id__loves_dbt') }} is true" + window: 14 days + + - name: average_tenure + label: Average Tenure + description: The average tenure of our people + type: ratio + type_params: + numerator: collective_tenure + denominator: number_of_people + + - name: average_tenure_minus_people + label: Average Tenure minus People + description: Well this isn't really useful is it? + type: derived + type_params: + expr: average_tenure - number_of_people + metrics: + - average_tenure + - number_of_people + +""" + +models_people_metrics_meta_top_yml = """ +version: 2 + metrics: - name: number_of_people @@ -106,7 +171,7 @@ type_params: measure: people meta: - my_meta: 'testing' + my_meta_top: 'top' - name: collective_tenure label: "Collective tenure" @@ -116,6 +181,8 @@ measure: name: years_tenure filter: "{{ Dimension('id__loves_dbt') }} is true" + join_to_timespine: true + fill_nulls_with: 0 - name: collective_window label: "Collective window" @@ -456,6 +523,34 @@ """ +meta_metric_level_schema_yml = """ +version: 2 + +metrics: + + - name: number_of_people + label: "Number of people" + description: Total count of people + type: simple + type_params: + measure: people + config: + meta: + my_meta_config: 'config + meta: + my_meta_direct: 'direct' + + - name: collective_tenure + label: "Collective tenure" + description: Total number of years of team experience + type: simple + type_params: + measure: + name: years_tenure + filter: "{{ Dimension('id__loves_dbt') }} is true" + +""" + enabled_metric_level_schema_yml = """ version: 2 @@ -624,3 +719,177 @@ meta: my_meta: 'testing' """ + +conversion_semantic_model_purchasing_yml = """ +version: 2 + +semantic_models: + - name: semantic_purchasing + model: ref('purchasing') + measures: + - name: num_orders + agg: COUNT + expr: purchased_at + - name: num_visits + agg: SUM + expr: 1 + dimensions: + - name: purchased_at + type: TIME + entities: + - name: purchase + type: primary + expr: '1' + defaults: + agg_time_dimension: purchased_at + +""" + +cumulative_metric_yml = """ +version: 2 +metrics: + - name: weekly_visits + label: Rolling sum of visits over the last 7 days + type: cumulative + type_params: + measure: num_visits + cumulative_type_params: + window: 7 days + period_agg: average + - name: cumulative_orders + label: Rolling total of orders (all time) + type: cumulative + type_params: + measure: num_orders + cumulative_type_params: + period_agg: last + - name: orders_ytd + label: Total orders since the start of the year + type: cumulative + type_params: + measure: num_orders + cumulative_type_params: + grain_to_date: year + period_agg: first + - name: monthly_orders + label: Orders in the past month + type: cumulative + type_params: + measure: num_orders + window: 1 month + cumulative_type_params: + period_agg: average + - name: yearly_orders + label: Orders in the past year + type: cumulative + type_params: + measure: num_orders + window: 1 year + - name: visits_mtd + label: Visits since start of month + type: cumulative + type_params: + measure: num_visits + grain_to_date: month + - name: cumulative_visits + label: Rolling total of visits (all time) + type: cumulative + type_params: + measure: num_visits +""" + +conversion_metric_yml = """ +version: 2 +metrics: + - name: converted_orders_over_visits + label: Number of orders converted from visits + type: conversion + type_params: + conversion_type_params: + base_measure: num_visits + conversion_measure: num_orders + entity: purchase +""" + +filtered_metrics_yml = """ +version: 2 + +metrics: + + - name: collective_tenure_measure_filter_str + label: "Collective tenure1" + description: Total number of years of team experience + type: simple + type_params: + measure: + name: "years_tenure" + filter: "{{ Dimension('id__loves_dbt') }} is true" + + - name: collective_tenure_measure_filter_list + label: "Collective tenure2" + description: Total number of years of team experience + type: simple + type_params: + measure: + name: "years_tenure" + filter: + - "{{ Dimension('id__loves_dbt') }} is true" + + - name: collective_tenure_metric_filter_str + label: Collective tenure3 + description: Total number of years of team experience + type: simple + type_params: + measure: + name: "years_tenure" + filter: "{{ Dimension('id__loves_dbt') }} is true" + + - name: collective_tenure_metric_filter_list + label: Collective tenure4 + description: Total number of years of team experience + type: simple + type_params: + measure: + name: "years_tenure" + filter: + - "{{ Dimension('id__loves_dbt') }} is true" + + - name: average_tenure_filter_str + label: Average tenure of people who love dbt1 + description: Average tenure of people who love dbt + type: derived + type_params: + expr: "average_tenure" + metrics: + - name: average_tenure + filter: "{{ Dimension('id__loves_dbt') }} is true" + + - name: average_tenure_filter_list + label: Average tenure of people who love dbt2 + description: Average tenure of people who love dbt + type: derived + type_params: + expr: "average_tenure" + metrics: + - name: average_tenure + filter: + - "{{ Dimension('id__loves_dbt') }} is true" +""" + +duplicate_measure_metric_yml = """ +metrics: + # Simple metrics + - name: people_with_tenure + description: "Count of people with tenure" + type: simple + label: People with tenure + type_params: + measure: people + - name: ratio_tenure_to_people + description: People to years of tenure + label: New customers to all customers + type: ratio + type_params: + numerator: people_with_tenure + denominator: number_of_people +""" diff --git a/tests/functional/metrics/test_metric_configs.py b/tests/functional/metrics/test_metric_configs.py index 03b8fe2275c..2be68d9e17f 100644 --- a/tests/functional/metrics/test_metric_configs.py +++ b/tests/functional/metrics/test_metric_configs.py @@ -1,18 +1,18 @@ import pytest -from hologram import ValidationError -from dbt.contracts.graph.model_config import MetricConfig -from dbt.exceptions import CompilationError, ParsingError -from dbt.tests.util import run_dbt, update_config_file, get_manifest - +from dbt.artifacts.resources import MetricConfig +from dbt.exceptions import CompilationError, ParsingError +from dbt.tests.util import get_manifest, run_dbt, update_config_file +from dbt_common.dataclass_schema import ValidationError from tests.functional.metrics.fixtures import ( - models_people_sql, - models_people_metrics_yml, - metricflow_time_spine_sql, disabled_metric_level_schema_yml, enabled_metric_level_schema_yml, - models_people_metrics_sql, invalid_config_metric_yml, + metricflow_time_spine_sql, + models_people_metrics_meta_top_yml, + models_people_metrics_sql, + models_people_metrics_yml, + models_people_sql, semantic_model_people_yml, ) @@ -40,22 +40,24 @@ def models(self): def project_config_update(self): return { "metrics": { - "average_tenure_minus_people": { - "enabled": True, - }, + "test": { + "average_tenure_minus_people": { + "enabled": False, + }, + } } } def test_enabled_metric_config_dbt_project(self, project): run_dbt(["parse"]) manifest = get_manifest(project.project_root) - assert "metric.test.average_tenure_minus_people" in manifest.metrics + assert "metric.test.average_tenure_minus_people" not in manifest.metrics new_enabled_config = { "metrics": { "test": { "average_tenure_minus_people": { - "enabled": False, + "enabled": True, }, } } @@ -63,7 +65,7 @@ def test_enabled_metric_config_dbt_project(self, project): update_config_file(new_enabled_config, project.project_root, "dbt_project.yml") run_dbt(["parse"]) manifest = get_manifest(project.project_root) - assert "metric.test.average_tenure_minus_people" not in manifest.metrics + assert "metric.test.average_tenure_minus_people" in manifest.metrics assert "metric.test.collective_tenure" in manifest.metrics @@ -204,3 +206,84 @@ def test_disabling_upstream_metric_errors(self, project): "The metric `number_of_people` is disabled and thus cannot be referenced." ) assert expected_msg in str(excinfo.value) + + +# Test meta config in dbt_project.yml +class TestMetricMetaConfigProjectLevel(MetricConfigTests): + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_model_people.yml": semantic_model_people_yml, + "schema.yml": models_people_metrics_yml, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "metrics": { + "test": { + "average_tenure_minus_people": { + "+meta": {"project_field": "project_value"}, + }, + } + } + } + + def test_meta_metric_config_dbt_project(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "metric.test.average_tenure_minus_people" in manifest.metrics + # for backwards compatibility the config level meta gets copied to the top level meta + assert manifest.metrics.get("metric.test.average_tenure_minus_people").config.meta == { + "project_field": "project_value" + } + assert manifest.metrics.get("metric.test.average_tenure_minus_people").meta == { + "project_field": "project_value" + } + + +# Test setting config at config level +class TestMetricMetaConfigLevel(MetricConfigTests): + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_model_people.yml": semantic_model_people_yml, + "schema.yml": models_people_metrics_yml, + } + + def test_meta_metric_config_yaml(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "metric.test.number_of_people" in manifest.metrics + assert manifest.metrics.get("metric.test.number_of_people").config.meta == { + "my_meta_config": "config" + } + assert manifest.metrics.get("metric.test.number_of_people").meta == { + "my_meta_config": "config" + } + + +# Test setting config at metric level- expect to exist in config after parsing +class TestMetricMetaTopLevel(MetricConfigTests): + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_model_people.yml": semantic_model_people_yml, + "schema.yml": models_people_metrics_meta_top_yml, + } + + def test_meta_metric_config_yaml(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "metric.test.number_of_people" in manifest.metrics + # for backwards compatibility the config level meta gets copied to the top level meta + assert manifest.metrics.get("metric.test.number_of_people").config.meta != { + "my_meta_top": "top" + } + assert manifest.metrics.get("metric.test.number_of_people").meta == {"my_meta_top": "top"} diff --git a/tests/functional/metrics/test_metric_deferral.py b/tests/functional/metrics/test_metric_deferral.py index 620c8dba25f..603016acb0c 100644 --- a/tests/functional/metrics/test_metric_deferral.py +++ b/tests/functional/metrics/test_metric_deferral.py @@ -1,7 +1,9 @@ import os -import pytest -from dbt.tests.util import run_dbt, copy_file, write_file from pathlib import Path + +import pytest + +from dbt.tests.util import copy_file, run_dbt, write_file from tests.functional.metrics.fixtures import ( metrics_1_yml, metrics_2_yml, @@ -23,7 +25,6 @@ def setup(self, project): @pytest.fixture(scope="class") def dbt_profile_data(self, unique_schema): return { - "config": {"send_anonymous_usage_stats": False}, "test": { "outputs": { "default": { diff --git a/tests/functional/metrics/test_metric_helper_functions.py b/tests/functional/metrics/test_metric_helper_functions.py index ec1015aa637..8abdce4af29 100644 --- a/tests/functional/metrics/test_metric_helper_functions.py +++ b/tests/functional/metrics/test_metric_helper_functions.py @@ -1,9 +1,14 @@ import pytest -from dbt.tests.util import run_dbt, get_manifest +from dbt.contracts.graph.manifest import Manifest from dbt.contracts.graph.metrics import ResolvedMetricReference - -from tests.functional.metrics.fixtures import models_people_sql, basic_metrics_yml +from dbt.tests.util import run_dbt +from tests.functional.metrics.fixtures import ( + basic_metrics_yml, + metricflow_time_spine_sql, + models_people_sql, + semantic_model_people_yml, +) class TestMetricHelperFunctions: @@ -11,24 +16,22 @@ class TestMetricHelperFunctions: def models(self): return { "metrics.yml": basic_metrics_yml, + "semantic_people.yml": semantic_model_people_yml, + "metricflow_time_spine.sql": metricflow_time_spine_sql, "people.sql": models_people_sql, } - @pytest.mark.skip( - "TODO reactivate after we begin property hydrating metric `depends_on` and `refs`" - ) - def test_expression_metric( + def test_derived_metric( self, project, ): # initial parse - run_dbt(["compile"]) + manifest = run_dbt(["parse"]) + assert isinstance(manifest, Manifest) - # make sure all the metrics are in the manifest - manifest = get_manifest(project.project_root) parsed_metric = manifest.metrics["metric.test.average_tenure_plus_one"] - testing_metric = ResolvedMetricReference(parsed_metric, manifest, None) + testing_metric = ResolvedMetricReference(parsed_metric, manifest) full_metric_dependency = set(testing_metric.full_metric_dependency()) expected_full_metric_dependency = set( diff --git a/tests/functional/metrics/test_metrics.py b/tests/functional/metrics/test_metrics.py index 3cc0ea412b7..3f8fba2a19c 100644 --- a/tests/functional/metrics/test_metrics.py +++ b/tests/functional/metrics/test_metrics.py @@ -1,31 +1,38 @@ import pytest +from dbt.artifacts.resources.v1.metric import CumulativeTypeParams, MetricTimeWindow from dbt.cli.main import dbtRunner from dbt.contracts.graph.manifest import Manifest from dbt.exceptions import ParsingError -from dbt.tests.util import run_dbt, get_manifest - - +from dbt.tests.util import get_manifest, run_dbt +from dbt_semantic_interfaces.type_enums.period_agg import PeriodAggregation +from dbt_semantic_interfaces.type_enums.time_granularity import TimeGranularity from tests.functional.metrics.fixtures import ( - mock_purchase_data_csv, - models_people_sql, - models_people_metrics_yml, - invalid_models_people_metrics_yml, - invalid_metrics_missing_model_yml, - invalid_metrics_missing_expression_yml, - names_with_spaces_metrics_yml, - names_with_special_chars_metrics_yml, - names_with_leading_numeric_metrics_yml, - long_name_metrics_yml, + basic_metrics_yml, + conversion_metric_yml, + conversion_semantic_model_purchasing_yml, + cumulative_metric_yml, + derived_metric_yml, downstream_model_sql, + duplicate_measure_metric_yml, + filtered_metrics_yml, invalid_derived_metric_contains_model_yml, - derived_metric_yml, invalid_metric_without_timestamp_with_time_grains_yml, invalid_metric_without_timestamp_with_window_yml, + invalid_metrics_missing_expression_yml, + invalid_metrics_missing_model_yml, + invalid_models_people_metrics_yml, + long_name_metrics_yml, metricflow_time_spine_sql, + mock_purchase_data_csv, + models_people_metrics_yml, + models_people_sql, + names_with_leading_numeric_metrics_yml, + names_with_spaces_metrics_yml, + names_with_special_chars_metrics_yml, + purchasing_model_sql, semantic_model_people_yml, semantic_model_purchasing_yml, - purchasing_model_sql, ) @@ -74,8 +81,13 @@ def test_simple_metric( "metric.test.average_tenure_minus_people" ].type_params.input_measures ) - == 3 + == 2 + ) + assert ( + manifest.metrics["metric.test.number_of_people"].time_granularity + == TimeGranularity.MONTH ) + assert manifest.metrics["metric.test.collective_tenure"].time_granularity is None class TestInvalidRefMetrics: @@ -339,3 +351,208 @@ def test_simple_metric( # initial run with pytest.raises(ParsingError): run_dbt(["run"]) + + +class TestConversionMetric: + @pytest.fixture(scope="class") + def models(self): + return { + "purchasing.sql": purchasing_model_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_models.yml": conversion_semantic_model_purchasing_yml, + "conversion_metric.yml": conversion_metric_yml, + } + + @pytest.fixture(scope="class") + def seeds(self): + return { + "mock_purchase_data.csv": mock_purchase_data_csv, + } + + def test_conversion_metric( + self, + project, + ): + # initial parse + runner = dbtRunner() + result = runner.invoke(["parse"]) + assert result.success + assert isinstance(result.result, Manifest) + + # make sure the metric is in the manifest + manifest = get_manifest(project.project_root) + metric_ids = list(manifest.metrics.keys()) + expected_metric_ids = [ + "metric.test.converted_orders_over_visits", + ] + assert metric_ids == expected_metric_ids + assert manifest.metrics[ + "metric.test.converted_orders_over_visits" + ].type_params.conversion_type_params + assert ( + len( + manifest.metrics[ + "metric.test.converted_orders_over_visits" + ].type_params.input_measures + ) + == 2 + ) + assert ( + manifest.metrics[ + "metric.test.converted_orders_over_visits" + ].type_params.conversion_type_params.window + is None + ) + assert ( + manifest.metrics[ + "metric.test.converted_orders_over_visits" + ].type_params.conversion_type_params.entity + == "purchase" + ) + + +class TestCumulativeMetric: + @pytest.fixture(scope="class") + def models(self): + return { + "purchasing.sql": purchasing_model_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_models.yml": conversion_semantic_model_purchasing_yml, + "conversion_metric.yml": cumulative_metric_yml, + } + + @pytest.fixture(scope="class") + def seeds(self): + return {"mock_purchase_data.csv": mock_purchase_data_csv} + + def test_cumulative_metric(self, project): + # initial parse + runner = dbtRunner() + result = runner.invoke(["parse"]) + assert result.success + assert isinstance(result.result, Manifest) + + manifest = get_manifest(project.project_root) + metric_ids = set(manifest.metrics.keys()) + expected_metric_ids_to_cumulative_type_params = { + "metric.test.weekly_visits": CumulativeTypeParams( + window=MetricTimeWindow(count=7, granularity=TimeGranularity.DAY), + period_agg=PeriodAggregation.AVERAGE, + ), + "metric.test.cumulative_orders": CumulativeTypeParams( + period_agg=PeriodAggregation.LAST + ), + "metric.test.orders_ytd": CumulativeTypeParams( + grain_to_date=TimeGranularity.YEAR, period_agg=PeriodAggregation.FIRST + ), + "metric.test.monthly_orders": CumulativeTypeParams( + window=MetricTimeWindow(count=1, granularity=TimeGranularity.MONTH), + period_agg=PeriodAggregation.AVERAGE, + ), + "metric.test.yearly_orders": CumulativeTypeParams( + window=MetricTimeWindow(count=1, granularity=TimeGranularity.YEAR), + period_agg=PeriodAggregation.FIRST, + ), + "metric.test.visits_mtd": CumulativeTypeParams( + grain_to_date=TimeGranularity.MONTH, period_agg=PeriodAggregation.FIRST + ), + "metric.test.cumulative_visits": CumulativeTypeParams( + period_agg=PeriodAggregation.FIRST + ), + } + assert metric_ids == set(expected_metric_ids_to_cumulative_type_params.keys()) + for ( + metric_id, + expected_cumulative_type_params, + ) in expected_metric_ids_to_cumulative_type_params.items(): + assert ( + manifest.metrics[metric_id].type_params.cumulative_type_params + == expected_cumulative_type_params + ), f"Found unexpected cumulative type params for {metric_id}" + + +class TestFilterParsing: + @pytest.fixture(scope="class") + def models(self): + return { + "basic_metrics.yml": basic_metrics_yml, + "filtered_metrics.yml": filtered_metrics_yml, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_model_people.yml": semantic_model_people_yml, + "people.sql": models_people_sql, + } + + # Tests that filters are parsed to their appropriate types + def test_filter_parsing( + self, + project, + ): + runner = dbtRunner() + result = runner.invoke(["parse"]) + assert result.success + assert isinstance(result.result, Manifest) + + manifest = get_manifest(project.project_root) + assert manifest + + # Test metrics with input measure filters. + filters1 = ( + manifest.metrics["metric.test.collective_tenure_measure_filter_str"] + .input_measures[0] + .filter.where_filters + ) + assert len(filters1) == 1 + assert filters1[0].where_sql_template == "{{ Dimension('id__loves_dbt') }} is true" + filters2 = ( + manifest.metrics["metric.test.collective_tenure_measure_filter_list"] + .input_measures[0] + .filter.where_filters + ) + assert len(filters2) == 1 + assert filters2[0].where_sql_template == "{{ Dimension('id__loves_dbt') }} is true" + + # Test metrics with metric-level filters. + filters3 = manifest.metrics[ + "metric.test.collective_tenure_metric_filter_str" + ].filter.where_filters + assert len(filters3) == 1 + assert filters3[0].where_sql_template == "{{ Dimension('id__loves_dbt') }} is true" + filters4 = manifest.metrics[ + "metric.test.collective_tenure_metric_filter_list" + ].filter.where_filters + assert len(filters4) == 1 + assert filters4[0].where_sql_template == "{{ Dimension('id__loves_dbt') }} is true" + + # Test derived metrics with input metric filters. + filters5 = ( + manifest.metrics["metric.test.average_tenure_filter_str"] + .input_metrics[0] + .filter.where_filters + ) + assert len(filters5) == 1 + assert filters5[0].where_sql_template == "{{ Dimension('id__loves_dbt') }} is true" + filters6 = ( + manifest.metrics["metric.test.average_tenure_filter_list"] + .input_metrics[0] + .filter.where_filters + ) + assert len(filters6) == 1 + assert filters6[0].where_sql_template == "{{ Dimension('id__loves_dbt') }} is true" + + +class TestDuplicateInputMeasures: + @pytest.fixture(scope="class") + def models(self): + return { + "basic_metrics.yml": basic_metrics_yml, + "filtered_metrics.yml": duplicate_measure_metric_yml, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_model_people.yml": semantic_model_people_yml, + "people.sql": models_people_sql, + } + + def test_duplicate_input_measures(self, project): + runner = dbtRunner() + result = runner.invoke(["parse"]) + assert result.success + assert isinstance(result.result, Manifest) diff --git a/tests/functional/microbatch/test_microbatch.py b/tests/functional/microbatch/test_microbatch.py new file mode 100644 index 00000000000..cf8e018727f --- /dev/null +++ b/tests/functional/microbatch/test_microbatch.py @@ -0,0 +1,444 @@ +import os +from unittest import mock + +import pytest + +from dbt.tests.util import ( + patch_microbatch_end_time, + relation_from_name, + run_dbt, + run_dbt_and_capture, + write_file, +) + +input_model_sql = """ +{{ config(materialized='table', event_time='event_time') }} + +select 1 as id, TIMESTAMP '2020-01-01 00:00:00-0' as event_time +union all +select 2 as id, TIMESTAMP '2020-01-02 00:00:00-0' as event_time +union all +select 3 as id, TIMESTAMP '2020-01-03 00:00:00-0' as event_time +""" + +input_model_without_event_time_sql = """ +{{ config(materialized='table') }} + +select 1 as id, TIMESTAMP '2020-01-01 00:00:00-0' as event_time +union all +select 2 as id, TIMESTAMP '2020-01-02 00:00:00-0' as event_time +union all +select 3 as id, TIMESTAMP '2020-01-03 00:00:00-0' as event_time +""" + +microbatch_model_sql = """ +{{ config(materialized='incremental', incremental_strategy='microbatch', unique_key='id', event_time='event_time', batch_size='day') }} +select * from {{ ref('input_model') }} +""" + + +microbatch_model_ref_render_sql = """ +{{ config(materialized='incremental', incremental_strategy='microbatch', unique_key='id', event_time='event_time', batch_size='day') }} +select * from {{ ref('input_model').render() }} +""" + +seed_csv = """id,event_time +1,'2020-01-01 00:00:00-0' +2,'2020-01-02 00:00:00-0' +3,'2020-01-03 00:00:00-0' +""" + +seeds_yaml = """ +seeds: + - name: raw_source + config: + column_types: + event_time: TIMESTAMP +""" + +sources_yaml = """ +sources: + - name: seed_sources + schema: "{{ target.schema }}" + tables: + - name: raw_source + config: + event_time: event_time +""" + +microbatch_model_calling_source_sql = """ +{{ config(materialized='incremental', incremental_strategy='microbatch', unique_key='id', event_time='event_time', batch_size='day') }} +select * from {{ source('seed_sources', 'raw_source') }} +""" + +custom_microbatch_strategy = """ +{% macro get_incremental_microbatch_sql(arg_dict) %} + {% do log('custom microbatch strategy', info=True) %} + + {%- set dest_cols_csv = get_quoted_csv(arg_dict["dest_columns"] | map(attribute="name")) -%} + + insert into {{ arg_dict["target_relation"] }} ({{ dest_cols_csv }}) + ( + select {{ dest_cols_csv }} + from {{ arg_dict["temp_relation"] }} + ) + +{% endmacro %} +""" + + +class BaseMicrobatchCustomUserStrategy: + @pytest.fixture(scope="class") + def models(self): + return { + "input_model.sql": input_model_sql, + "microbatch_model.sql": microbatch_model_sql, + } + + @pytest.fixture(scope="class") + def macros(self): + return {"microbatch.sql": custom_microbatch_strategy} + + +class TestMicrobatchCustomUserStrategyDefault(BaseMicrobatchCustomUserStrategy): + def test_use_custom_microbatch_strategy_by_default(self, project): + with mock.patch.object( + type(project.adapter), "valid_incremental_strategies", lambda _: [] + ): + # Initial run + run_dbt(["run"]) + + # Incremental run uses custom strategy + _, logs = run_dbt_and_capture(["run"]) + assert "custom microbatch strategy" in logs + + +class TestMicrobatchCustomUserStrategyEnvVarTrueValid(BaseMicrobatchCustomUserStrategy): + @mock.patch.dict(os.environ, {"DBT_EXPERIMENTAL_MICROBATCH": "True"}) + def test_use_custom_microbatch_strategy_env_var_true_invalid_incremental_strategy( + self, project + ): + with mock.patch.object( + type(project.adapter), "valid_incremental_strategies", lambda _: ["microbatch"] + ): + # Initial run + run_dbt(["run"]) + + # Incremental run uses custom strategy + _, logs = run_dbt_and_capture(["run"]) + assert "custom microbatch strategy" in logs + + +# TODO: Consider a behaviour flag here if DBT_EXPERIMENTAL_MICROBATCH is removed +# Since this causes an exception prior to using an override +class TestMicrobatchCustomUserStrategyEnvVarTrueInvalid(BaseMicrobatchCustomUserStrategy): + @mock.patch.dict(os.environ, {"DBT_EXPERIMENTAL_MICROBATCH": "True"}) + def test_use_custom_microbatch_strategy_env_var_true_invalid_incremental_strategy( + self, project + ): + with mock.patch.object( + type(project.adapter), "valid_incremental_strategies", lambda _: [] + ): + # Initial run + run_dbt(["run"]) + + # Incremental run fails + _, logs = run_dbt_and_capture(["run"], expect_pass=False) + assert "'microbatch' is not valid" in logs + + +class BaseMicrobatchTest: + @pytest.fixture(scope="class") + def models(self): + return { + "input_model.sql": input_model_sql, + "microbatch_model.sql": microbatch_model_sql, + } + + def assert_row_count(self, project, relation_name: str, expected_row_count: int): + relation = relation_from_name(project.adapter, relation_name) + result = project.run_sql(f"select count(*) as num_rows from {relation}", fetch="one") + + if result[0] != expected_row_count: + # running show for debugging + run_dbt(["show", "--inline", f"select * from {relation}"]) + + assert result[0] == expected_row_count + + +class TestMicrobatchCLI(BaseMicrobatchTest): + @mock.patch.dict(os.environ, {"DBT_EXPERIMENTAL_MICROBATCH": "True"}) + def test_run_with_event_time(self, project): + # run without --event-time-start or --event-time-end - 3 expected rows in output + with patch_microbatch_end_time("2020-01-03 13:57:00"): + run_dbt(["run"]) + self.assert_row_count(project, "microbatch_model", 3) + + # build model >= 2020-01-02 + with patch_microbatch_end_time("2020-01-03 13:57:00"): + run_dbt(["run", "--event-time-start", "2020-01-02", "--full-refresh"]) + self.assert_row_count(project, "microbatch_model", 2) + + # build model < 2020-01-03 + run_dbt(["run", "--event-time-end", "2020-01-03", "--full-refresh"]) + self.assert_row_count(project, "microbatch_model", 2) + + # build model between 2020-01-02 >= event_time < 2020-01-03 + run_dbt( + [ + "run", + "--event-time-start", + "2020-01-02", + "--event-time-end", + "2020-01-03", + "--full-refresh", + ] + ) + self.assert_row_count(project, "microbatch_model", 1) + + +class TestMicroBatchBoundsDefault(BaseMicrobatchTest): + @mock.patch.dict(os.environ, {"DBT_EXPERIMENTAL_MICROBATCH": "True"}) + def test_run_with_event_time(self, project): + # initial run -- backfills all data + with patch_microbatch_end_time("2020-01-03 13:57:00"): + run_dbt(["run"]) + self.assert_row_count(project, "microbatch_model", 3) + + # our partition grain is "day" so running the same day without new data should produce the same results + with patch_microbatch_end_time("2020-01-03 14:57:00"): + run_dbt(["run"]) + self.assert_row_count(project, "microbatch_model", 3) + + # add next two days of data + test_schema_relation = project.adapter.Relation.create( + database=project.database, schema=project.test_schema + ) + project.run_sql( + f"insert into {test_schema_relation}.input_model(id, event_time) values (4, TIMESTAMP '2020-01-04 00:00:00-0'), (5, TIMESTAMP '2020-01-05 00:00:00-0')" + ) + self.assert_row_count(project, "input_model", 5) + + # re-run without changing current time => no insert + with patch_microbatch_end_time("2020-01-03 14:57:00"): + run_dbt(["run", "--select", "microbatch_model"]) + self.assert_row_count(project, "microbatch_model", 3) + + # re-run by advancing time by one day changing current time => insert 1 row + with patch_microbatch_end_time("2020-01-04 14:57:00"): + run_dbt(["run", "--select", "microbatch_model"]) + self.assert_row_count(project, "microbatch_model", 4) + + # re-run by advancing time by one more day changing current time => insert 1 more row + with patch_microbatch_end_time("2020-01-05 14:57:00"): + run_dbt(["run", "--select", "microbatch_model"]) + self.assert_row_count(project, "microbatch_model", 5) + + +class TestMicrobatchWithSource(BaseMicrobatchTest): + @pytest.fixture(scope="class") + def seeds(self): + return { + "raw_source.csv": seed_csv, + } + + @pytest.fixture(scope="class") + def models(self): + return { + "microbatch_model.sql": microbatch_model_calling_source_sql, + "sources.yml": sources_yaml, + "seeds.yml": seeds_yaml, + } + + @mock.patch.dict(os.environ, {"DBT_EXPERIMENTAL_MICROBATCH": "True"}) + def test_run_with_event_time(self, project): + # ensure seed is created for source + run_dbt(["seed"]) + + # initial run -- backfills all data + with patch_microbatch_end_time("2020-01-03 13:57:00"): + run_dbt(["run"]) + self.assert_row_count(project, "microbatch_model", 3) + + # our partition grain is "day" so running the same day without new data should produce the same results + with patch_microbatch_end_time("2020-01-03 14:57:00"): + run_dbt(["run"]) + self.assert_row_count(project, "microbatch_model", 3) + + # add next two days of data + test_schema_relation = project.adapter.Relation.create( + database=project.database, schema=project.test_schema + ) + project.run_sql( + f"insert into {test_schema_relation}.raw_source(id, event_time) values (4, TIMESTAMP '2020-01-04 00:00:00-0'), (5, TIMESTAMP '2020-01-05 00:00:00-0')" + ) + self.assert_row_count(project, "raw_source", 5) + + # re-run without changing current time => no insert + with patch_microbatch_end_time("2020-01-03 14:57:00"): + run_dbt(["run", "--select", "microbatch_model"]) + self.assert_row_count(project, "microbatch_model", 3) + + # re-run by advancing time by one day changing current time => insert 1 row + with patch_microbatch_end_time("2020-01-04 14:57:00"): + run_dbt(["run", "--select", "microbatch_model"]) + self.assert_row_count(project, "microbatch_model", 4) + + # re-run by advancing time by one more day changing current time => insert 1 more row + with patch_microbatch_end_time("2020-01-05 14:57:00"): + run_dbt(["run", "--select", "microbatch_model"]) + self.assert_row_count(project, "microbatch_model", 5) + + +class TestMicrobatchWithInputWithoutEventTime(BaseMicrobatchTest): + @pytest.fixture(scope="class") + def models(self): + return { + "input_model.sql": input_model_without_event_time_sql, + "microbatch_model.sql": microbatch_model_sql, + } + + @mock.patch.dict(os.environ, {"DBT_EXPERIMENTAL_MICROBATCH": "True"}) + def test_run_with_event_time(self, project): + # initial run -- backfills all data + with patch_microbatch_end_time("2020-01-03 13:57:00"): + run_dbt(["run"]) + self.assert_row_count(project, "microbatch_model", 3) + + # our partition grain is "day" so running the same day without new data should produce the same results + with patch_microbatch_end_time("2020-01-03 14:57:00"): + run_dbt(["run"]) + self.assert_row_count(project, "microbatch_model", 3) + + # add next two days of data + test_schema_relation = project.adapter.Relation.create( + database=project.database, schema=project.test_schema + ) + project.run_sql( + f"insert into {test_schema_relation}.input_model(id, event_time) values (4, TIMESTAMP '2020-01-04 00:00:00-0'), (5, TIMESTAMP '2020-01-05 00:00:00-0')" + ) + self.assert_row_count(project, "input_model", 5) + + # re-run without changing current time => INSERT BECAUSE INPUT MODEL ISN'T BEING FILTERED + with patch_microbatch_end_time("2020-01-03 14:57:00"): + run_dbt(["run", "--select", "microbatch_model"]) + self.assert_row_count(project, "microbatch_model", 5) + + +class TestMicrobatchUsingRefRenderSkipsFilter(BaseMicrobatchTest): + @mock.patch.dict(os.environ, {"DBT_EXPERIMENTAL_MICROBATCH": "True"}) + def test_run_with_event_time(self, project): + # initial run -- backfills all data + with patch_microbatch_end_time("2020-01-03 13:57:00"): + run_dbt(["run"]) + self.assert_row_count(project, "microbatch_model", 3) + + # our partition grain is "day" so running the same day without new data should produce the same results + with patch_microbatch_end_time("2020-01-03 14:57:00"): + run_dbt(["run"]) + self.assert_row_count(project, "microbatch_model", 3) + + # add next two days of data + test_schema_relation = project.adapter.Relation.create( + database=project.database, schema=project.test_schema + ) + project.run_sql( + f"insert into {test_schema_relation}.input_model(id, event_time) values (4, TIMESTAMP '2020-01-04 00:00:00-0'), (5, TIMESTAMP '2020-01-05 00:00:00-0')" + ) + self.assert_row_count(project, "input_model", 5) + + # re-run without changing current time => no insert + with patch_microbatch_end_time("2020-01-03 14:57:00"): + run_dbt(["run", "--select", "microbatch_model"]) + self.assert_row_count(project, "microbatch_model", 3) + + # Update microbatch model to call .render() on ref('input_model') + write_file( + microbatch_model_ref_render_sql, project.project_root, "models", "microbatch_model.sql" + ) + + # re-run without changing current time => INSERT because .render() skips filtering + with patch_microbatch_end_time("2020-01-03 14:57:00"): + run_dbt(["run", "--select", "microbatch_model"]) + self.assert_row_count(project, "microbatch_model", 5) + + +microbatch_model_context_vars = """ +{{ config(materialized='incremental', incremental_strategy='microbatch', unique_key='id', event_time='event_time', batch_size='day') }} +{{ log("start: "~ model.config.__dbt_internal_microbatch_event_time_start, info=True)}} +{{ log("end: "~ model.config.__dbt_internal_microbatch_event_time_end, info=True)}} +select * from {{ ref('input_model') }} +""" + + +class TestMicrobatchJinjaContextVarsAvailable(BaseMicrobatchTest): + @pytest.fixture(scope="class") + def models(self): + return { + "input_model.sql": input_model_sql, + "microbatch_model.sql": microbatch_model_context_vars, + } + + @mock.patch.dict(os.environ, {"DBT_EXPERIMENTAL_MICROBATCH": "True"}) + def test_run_with_event_time_logs(self, project): + with patch_microbatch_end_time("2020-01-03 13:57:00"): + _, logs = run_dbt_and_capture(["run", "--event-time-start", "2020-01-01"]) + + assert "start: 2020-01-01 00:00:00+00:00" in logs + assert "end: 2020-01-02 00:00:00+00:00" in logs + + assert "start: 2020-01-02 00:00:00+00:00" in logs + assert "end: 2020-01-03 00:00:00+00:00" in logs + + assert "start: 2020-01-03 00:00:00+00:00" in logs + assert "end: 2020-01-03 13:57:00+00:00" in logs + + +microbatch_model_failing_incremental_partition_sql = """ +{{ config(materialized='incremental', incremental_strategy='microbatch', unique_key='id', event_time='event_time', batch_size='day') }} +{% if '2020-01-02' in (model.config.__dbt_internal_microbatch_event_time_start | string) %} + invalid_sql +{% endif %} +select * from {{ ref('input_model') }} +""" + + +class TestMicrobatchIncrementalPartitionFailure(BaseMicrobatchTest): + @pytest.fixture(scope="class") + def models(self): + return { + "input_model.sql": input_model_sql, + "microbatch_model.sql": microbatch_model_failing_incremental_partition_sql, + } + + @mock.patch.dict(os.environ, {"DBT_EXPERIMENTAL_MICROBATCH": "True"}) + def test_run_with_event_time(self, project): + # run all partitions from start - 2 expected rows in output, one failed + with patch_microbatch_end_time("2020-01-03 13:57:00"): + run_dbt(["run", "--event-time-start", "2020-01-01"]) + self.assert_row_count(project, "microbatch_model", 2) + + +microbatch_model_first_partition_failing_sql = """ +{{ config(materialized='incremental', incremental_strategy='microbatch', unique_key='id', event_time='event_time', batch_size='day') }} +{% if '2020-01-01' in (model.config.__dbt_internal_microbatch_event_time_start | string) %} + invalid_sql +{% endif %} +select * from {{ ref('input_model') }} +""" + + +class TestMicrobatchInitialPartitionFailure(BaseMicrobatchTest): + @pytest.fixture(scope="class") + def models(self): + return { + "input_model.sql": input_model_sql, + "microbatch_model.sql": microbatch_model_first_partition_failing_sql, + } + + @mock.patch.dict(os.environ, {"DBT_EXPERIMENTAL_MICROBATCH": "True"}) + def test_run_with_event_time(self, project): + # run all partitions from start - 2 expected rows in output, one failed + with patch_microbatch_end_time("2020-01-03 13:57:00"): + run_dbt(["run", "--event-time-start", "2020-01-01"]) + self.assert_row_count(project, "microbatch_model", 2) diff --git a/tests/functional/minimal_cli/fixtures.py b/tests/functional/minimal_cli/fixtures.py index 5635ff72cda..dadfb130f2d 100644 --- a/tests/functional/minimal_cli/fixtures.py +++ b/tests/functional/minimal_cli/fixtures.py @@ -1,4 +1,5 @@ import pytest +from click.testing import CliRunner models__schema_yml = """ version: 2 @@ -6,12 +7,12 @@ - name: sample_model columns: - name: sample_num - tests: + data_tests: - accepted_values: values: [1, 2] - not_null - name: sample_bool - tests: + data_tests: - not_null - unique """ @@ -51,6 +52,10 @@ class BaseConfigProject: + @pytest.fixture() + def runner(self): + return CliRunner() + @pytest.fixture(scope="class") def project_config_update(self): return { diff --git a/tests/functional/minimal_cli/test_minimal_cli.py b/tests/functional/minimal_cli/test_minimal_cli.py index ae8f40dcfcc..c757b43d4b3 100644 --- a/tests/functional/minimal_cli/test_minimal_cli.py +++ b/tests/functional/minimal_cli/test_minimal_cli.py @@ -1,48 +1,91 @@ -import pytest -from click.testing import CliRunner - from dbt.cli.main import cli from tests.functional.minimal_cli.fixtures import BaseConfigProject +from tests.functional.utils import up_one -class TestMinimalCli(BaseConfigProject): +class TestClean(BaseConfigProject): """Test the minimal/happy-path for the CLI using the Click CliRunner""" - @pytest.fixture(scope="class") - def runner(self): - return CliRunner() - def test_clean(self, runner, project): result = runner.invoke(cli, ["clean"]) assert "target" in result.output assert "dbt_packages" in result.output assert "logs" in result.output + +class TestCleanUpLevel(BaseConfigProject): + def test_clean_one_level_up(self, runner, project): + with up_one(): + result = runner.invoke(cli, ["clean"]) + assert result.exit_code == 2 + assert "Runtime Error" in result.output + assert "No dbt_project.yml" in result.output + + +class TestDeps(BaseConfigProject): def test_deps(self, runner, project): result = runner.invoke(cli, ["deps"]) assert "dbt-labs/dbt_utils" in result.output assert "1.0.0" in result.output + +class TestLS(BaseConfigProject): def test_ls(self, runner, project): runner.invoke(cli, ["deps"]) ls_result = runner.invoke(cli, ["ls"]) assert "1 seed" in ls_result.output assert "1 model" in ls_result.output - assert "5 tests" in ls_result.output + assert "5 data tests" in ls_result.output assert "1 snapshot" in ls_result.output + +class TestBuild(BaseConfigProject): def test_build(self, runner, project): runner.invoke(cli, ["deps"]) result = runner.invoke(cli, ["build"]) - # 1 seed, 1 model, 2 tests + # 1 seed, 1 model, 2 data tests + assert "PASS=4" in result.output + # 2 data tests + assert "ERROR=2" in result.output + # Singular test + assert "WARN=1" in result.output + # 1 snapshot + assert "SKIP=1" in result.output + + +class TestBuildFailFast(BaseConfigProject): + def test_build(self, runner, project): + runner.invoke(cli, ["deps"]) + result = runner.invoke(cli, ["build", "--fail-fast"]) + # 1 seed, 1 model, 2 data tests + assert "PASS=4" in result.output + # 2 data tests + assert "ERROR=2" in result.output + # Singular test + assert "WARN=1" in result.output + # 1 snapshot + assert "SKIP=1" in result.output + # Skipping due to fail_fast is not shown when --debug is not specified. + assert "Skipping due to fail_fast" not in result.output + + +class TestBuildFailFastDebug(BaseConfigProject): + def test_build(self, runner, project): + runner.invoke(cli, ["deps"]) + result = runner.invoke(cli, ["build", "--fail-fast", "--debug"]) + # 1 seed, 1 model, 2 data tests assert "PASS=4" in result.output - # 2 tests + # 2 data tests assert "ERROR=2" in result.output # Singular test assert "WARN=1" in result.output # 1 snapshot assert "SKIP=1" in result.output + # Skipping due to fail_fast is shown when --debug is specified. + assert "Skipping due to fail_fast" in result.output + +class TestDocsGenerate(BaseConfigProject): def test_docs_generate(self, runner, project): runner.invoke(cli, ["deps"]) result = runner.invoke(cli, ["docs", "generate"]) diff --git a/tests/functional/partial_parsing/fixtures.py b/tests/functional/partial_parsing/fixtures.py index 52578d90308..e419a3dfdbd 100644 --- a/tests/functional/partial_parsing/fixtures.py +++ b/tests/functional/partial_parsing/fixtures.py @@ -33,7 +33,7 @@ - name: "seed" columns: - name: id - tests: + data_tests: - unique """ @@ -67,7 +67,7 @@ - name: raw_customers columns: - name: id - tests: + data_tests: - not_null: severity: "{{ 'error' if target.name == 'prod' else 'warn' }}" - unique @@ -80,7 +80,7 @@ description: "Raw customer data" columns: - name: id - tests: + data_tests: - unique - not_null - name: first_name @@ -140,7 +140,7 @@ - name: raw_customers columns: - name: id - tests: + data_tests: - not_null: severity: "{{ 'error' if target.name == 'prod' else 'warn' }}" - unique @@ -154,7 +154,7 @@ description: "Raw customer data" columns: - name: id - tests: + data_tests: - unique - not_null - name: first_name @@ -194,7 +194,7 @@ - name: "seed" columns: - name: id - tests: + data_tests: - unique - not_null @@ -209,7 +209,7 @@ - name: raw_customers columns: - name: id - tests: + data_tests: - not_null: severity: "{{ env_var('ENV_VAR_SEVERITY') }}" - unique @@ -258,7 +258,7 @@ - name: raw_customers columns: - name: id - tests: + data_tests: - not_null: severity: "{{ 'error' if target.name == 'prod' else 'warn' }}" - unique @@ -279,7 +279,7 @@ - name: raw_customers columns: - name: id - tests: + data_tests: - not_null: severity: "{{ 'error' if target.name == 'prod' else 'warn' }}" - unique @@ -315,7 +315,7 @@ - name: raw_customers columns: - name: id - tests: + data_tests: - not_null: severity: "{{ 'error' if target.name == 'prod' else 'warn' }}" - unique @@ -369,7 +369,7 @@ - name: model_one config: materialized: "{{ env_var('TEST_SCHEMA_VAR') }}" - tests: + data_tests: - check_color: column_name: fun color: "env_var('ENV_VAR_COLOR')" @@ -407,7 +407,7 @@ - name: model_one config: materialized: "{{ env_var('TEST_SCHEMA_VAR') }}" - tests: + data_tests: - check_color: column_name: fun color: "env_var('ENV_VAR_COLOR')" @@ -452,6 +452,60 @@ agg_time_dimension: created_at """ +people_sl_yml = """ +version: 2 + +semantic_models: + - name: semantic_people + model: ref('people') + dimensions: + - name: favorite_color + type: categorical + - name: created_at + type: TIME + type_params: + time_granularity: day + measures: + - name: years_tenure + agg: SUM + expr: tenure + - name: people + agg: count + expr: id + entities: + - name: id + type: primary + defaults: + agg_time_dimension: created_at + +metrics: + + - name: number_of_people + description: Total count of people + label: "Number of people" + type: simple + type_params: + measure: people + meta: + my_meta: 'testing' + + - name: collective_tenure + description: Total number of years of team experience + label: "Collective tenure" + type: simple + type_params: + measure: + name: years_tenure + filter: "{{ Dimension('id__loves_dbt') }} is true" + + - name: average_tenure + label: Average Tenure + type: ratio + type_params: + numerator: collective_tenure + denominator: number_of_people +""" + env_var_metrics_yml = """ metrics: @@ -670,7 +724,7 @@ description: "The third model" columns: - name: id - tests: + data_tests: - not_null """ @@ -696,7 +750,7 @@ enabled: false columns: - name: id - tests: + data_tests: - unique """ @@ -706,6 +760,14 @@ """ +model_two_disabled_sql = """ +{{ config( + enabled=false +) }} + +select 1 as notfun +""" + generic_test_schema_yml = """ models: @@ -713,7 +775,7 @@ description: "Some order data" columns: - name: id - tests: + data_tests: - unique - is_odd @@ -773,7 +835,7 @@ description: "The third model" columns: - name: id - tests: + data_tests: - unique """ @@ -795,7 +857,7 @@ description: "The first model" - name: model_three description: "The third model" - tests: + data_tests: - unique macros: - name: do_something @@ -837,7 +899,7 @@ - name: model_color columns: - name: fun - tests: + data_tests: - unique: enabled: "{{ env_var('ENV_VAR_ENABLED', True) }}" @@ -880,7 +942,7 @@ models: - name: model_a - tests: + data_tests: - type_one - type_two @@ -974,7 +1036,7 @@ enabled: true columns: - name: id - tests: + data_tests: - unique """ @@ -1019,7 +1081,7 @@ description: "Some order data" columns: - name: id - tests: + data_tests: - unique """ diff --git a/tests/functional/partial_parsing/test_file_diff.py b/tests/functional/partial_parsing/test_file_diff.py index a9f4c8fdd09..3661a823a65 100644 --- a/tests/functional/partial_parsing/test_file_diff.py +++ b/tests/functional/partial_parsing/test_file_diff.py @@ -1,7 +1,9 @@ import os -from dbt.tests.util import run_dbt, write_artifact +import pytest +from dbt.tests.util import run_dbt, write_artifact, write_file +from tests.functional.partial_parsing.fixtures import model_one_sql, model_two_sql first_file_diff = { "deleted": [], @@ -17,7 +19,7 @@ } -class TestFileDiffs: +class TestFileDiffPaths: def test_file_diffs(self, project): os.environ["DBT_PP_FILE_DIFF_TEST"] = "true" @@ -35,3 +37,27 @@ def test_file_diffs(self, project): write_artifact(second_file_diff, "file_diff.json") results = run_dbt() assert len(results) == 2 + + +class TestFileDiffs: + @pytest.fixture(scope="class") + def models(self): + return { + "model_one.sql": model_one_sql, + } + + def test_no_file_diffs(self, project): + # We start with a project with one model + manifest = run_dbt(["parse"]) + assert len(manifest.nodes) == 1 + + # add a model file + write_file(model_two_sql, project.project_root, "models", "model_two.sql") + + # parse without computing a file diff + manifest = run_dbt(["--partial-parse", "--no-partial-parse-file-diff", "parse"]) + assert len(manifest.nodes) == 1 + + # default behaviour - parse with computing a file diff + manifest = run_dbt(["--partial-parse", "parse"]) + assert len(manifest.nodes) == 2 diff --git a/tests/functional/partial_parsing/test_partial_parsing.py b/tests/functional/partial_parsing/test_partial_parsing.py index 8d6014eb8ac..13bb3d5daae 100644 --- a/tests/functional/partial_parsing/test_partial_parsing.py +++ b/tests/functional/partial_parsing/test_partial_parsing.py @@ -1,79 +1,85 @@ +import os +import re +from argparse import Namespace +from unittest import mock + import pytest +import yaml -from dbt.tests.util import run_dbt, get_manifest, write_file, rm_file, run_dbt_and_capture +import dbt.flags as flags +from dbt.contracts.files import ParseFileType +from dbt.contracts.results import TestStatus +from dbt.exceptions import CompilationError +from dbt.plugins.manifest import ModelNodeArgs, PluginNodes from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import ( + get_manifest, + rename_dir, + rm_file, + run_dbt, + run_dbt_and_capture, + write_file, +) from tests.functional.partial_parsing.fixtures import ( + custom_schema_tests1_sql, + custom_schema_tests2_sql, + customers1_md, + customers2_md, + customers_sql, + empty_schema_with_version_yml, + empty_schema_yml, + generic_schema_yml, + generic_test_edited_sql, + generic_test_schema_yml, + generic_test_sql, + gsm_override2_sql, + gsm_override_sql, + local_dependency__dbt_project_yml, + local_dependency__macros__dep_macro_sql, + local_dependency__models__model_to_import_sql, + local_dependency__models__schema_yml, + local_dependency__seeds__seed_csv, + macros_schema_yml, + macros_yml, + model_a_sql, + model_b_sql, + model_four1_sql, + model_four2_sql, model_one_sql, + model_three_disabled2_sql, + model_three_disabled_sql, + model_three_modified_sql, + model_three_sql, + model_two_disabled_sql, model_two_sql, models_schema1_yml, models_schema2_yml, models_schema2b_yml, - model_three_sql, - model_three_modified_sql, - model_four1_sql, - model_four2_sql, + models_schema3_yml, models_schema4_yml, models_schema4b_yml, - models_schema3_yml, - my_macro_sql, + my_analysis_sql, my_macro2_sql, - macros_yml, - empty_schema_yml, - empty_schema_with_version_yml, - model_three_disabled_sql, - model_three_disabled2_sql, + my_macro_sql, + my_test_sql, + orders_sql, raw_customers_csv, - customers_sql, - sources_tests1_sql, + ref_override2_sql, + ref_override_sql, + schema_models_c_yml, schema_sources1_yml, schema_sources2_yml, schema_sources3_yml, schema_sources4_yml, schema_sources5_yml, - customers1_md, - customers2_md, - test_macro_sql, - my_test_sql, - test_macro2_sql, - my_analysis_sql, - sources_tests2_sql, - local_dependency__dbt_project_yml, - local_dependency__models__schema_yml, - local_dependency__models__model_to_import_sql, - local_dependency__macros__dep_macro_sql, - local_dependency__seeds__seed_csv, - schema_models_c_yml, - model_a_sql, - model_b_sql, - macros_schema_yml, - custom_schema_tests1_sql, - custom_schema_tests2_sql, - ref_override_sql, - ref_override2_sql, - gsm_override_sql, - gsm_override2_sql, - orders_sql, - orders_downstream_sql, - snapshot_sql, snapshot2_sql, - generic_schema_yml, - generic_test_sql, - generic_test_schema_yml, - generic_test_edited_sql, - groups_schema_yml_one_group, - groups_schema_yml_two_groups, - groups_schema_yml_two_groups_edited, - groups_schema_yml_one_group_model_in_group2, - groups_schema_yml_two_groups_private_orders_valid_access, - groups_schema_yml_two_groups_private_orders_invalid_access, + snapshot_sql, + sources_tests1_sql, + sources_tests2_sql, + test_macro2_sql, + test_macro_sql, ) - -from dbt.exceptions import CompilationError, ParsingError -from dbt.contracts.files import ParseFileType -from dbt.contracts.results import TestStatus - -import re -import os +from tests.functional.utils import up_one os.environ["DBT_PP_TEST"] = "true" @@ -129,7 +135,7 @@ def test_pp_models(self, project): assert model_three_node.description == "The third model" schema_file = manifest.files[schema_file_id] assert type(schema_file).__name__ == "SchemaSourceFile" - assert len(schema_file.tests) == 1 + assert len(schema_file.data_tests) == 1 tests = schema_file.get_all_test_ids() assert tests == ["test.test.unique_model_three_id.6776ac8160"] unique_test_id = tests[0] @@ -651,88 +657,227 @@ def test_pp_generic_tests(self, project): assert expected_nodes == list(manifest.nodes.keys()) -class TestGroups: +class TestExternalModels: @pytest.fixture(scope="class") - def models(self): - return { - "orders.sql": orders_sql, - "orders_downstream.sql": orders_downstream_sql, - "schema.yml": groups_schema_yml_one_group, - } + def external_model_node(self): + return ModelNodeArgs( + name="external_model", + package_name="external", + identifier="test_identifier", + schema="test_schema", + ) - def test_pp_groups(self, project): + @pytest.fixture(scope="class") + def external_model_node_versioned(self): + return ModelNodeArgs( + name="external_model_versioned", + package_name="external", + identifier="test_identifier_v1", + schema="test_schema", + version=1, + ) - # initial run - results = run_dbt() - assert len(results) == 2 - manifest = get_manifest(project.project_root) - expected_nodes = ["model.test.orders", "model.test.orders_downstream"] - expected_groups = ["group.test.test_group"] - assert expected_nodes == sorted(list(manifest.nodes.keys())) - assert expected_groups == sorted(list(manifest.groups.keys())) + @pytest.fixture(scope="class") + def external_model_node_depends_on(self): + return ModelNodeArgs( + name="external_model_depends_on", + package_name="external", + identifier="test_identifier_depends_on", + schema="test_schema", + depends_on_nodes=["model.external.external_model_depends_on_parent"], + ) - # add group to schema - write_file(groups_schema_yml_two_groups, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - expected_nodes = ["model.test.orders", "model.test.orders_downstream"] - expected_groups = ["group.test.test_group", "group.test.test_group2"] - assert expected_nodes == sorted(list(manifest.nodes.keys())) - assert expected_groups == sorted(list(manifest.groups.keys())) + @pytest.fixture(scope="class") + def external_model_node_depends_on_parent(self): + return ModelNodeArgs( + name="external_model_depends_on_parent", + package_name="external", + identifier="test_identifier_depends_on_parent", + schema="test_schema", + ) - # edit group in schema - write_file( - groups_schema_yml_two_groups_edited, project.project_root, "models", "schema.yml" + @pytest.fixture(scope="class") + def external_model_node_merge(self): + return ModelNodeArgs( + name="model_two", + package_name="test", + identifier="test_identifier", + schema="test_schema", ) - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - expected_nodes = ["model.test.orders", "model.test.orders_downstream"] - expected_groups = ["group.test.test_group", "group.test.test_group2_edited"] - assert expected_nodes == sorted(list(manifest.nodes.keys())) - assert expected_groups == sorted(list(manifest.groups.keys())) - # delete group in schema - write_file(groups_schema_yml_one_group, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 - manifest = get_manifest(project.project_root) - expected_nodes = ["model.test.orders", "model.test.orders_downstream"] - expected_groups = ["group.test.test_group"] - assert expected_nodes == sorted(list(manifest.nodes.keys())) - assert expected_groups == sorted(list(manifest.groups.keys())) + @pytest.fixture(scope="class") + def models(self): + return {"model_one.sql": model_one_sql} + + @mock.patch("dbt.plugins.get_plugin_manager") + def test_pp_external_models( + self, + get_plugin_manager, + project, + external_model_node, + external_model_node_versioned, + external_model_node_depends_on, + external_model_node_depends_on_parent, + external_model_node_merge, + ): + # initial plugin - one external model + external_nodes = PluginNodes() + external_nodes.add_model(external_model_node) + get_plugin_manager.return_value.get_nodes.return_value = external_nodes + + # initial parse + manifest = run_dbt(["parse"]) + assert len(manifest.nodes) == 2 + assert set(manifest.nodes.keys()) == { + "model.external.external_model", + "model.test.model_one", + } + assert len(manifest.external_node_unique_ids) == 1 + assert manifest.external_node_unique_ids == ["model.external.external_model"] - # add back second group - write_file(groups_schema_yml_two_groups, project.project_root, "models", "schema.yml") - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 + # add a model file - test.model_two + write_file(model_two_sql, project.project_root, "models", "model_two.sql") + manifest = run_dbt(["--partial-parse", "parse"]) + assert len(manifest.nodes) == 3 + + # add an external model that is already in project - test.model_two + # project model should be preferred to external model + external_nodes.add_model(external_model_node_merge) + manifest = run_dbt(["--partial-parse", "parse"]) + assert len(manifest.nodes) == 3 + assert len(manifest.external_node_unique_ids) == 1 + + # disable test.model_two in project + # project models should still be preferred to external model + write_file(model_two_disabled_sql, project.project_root, "models", "model_two.sql") + manifest = run_dbt(["--partial-parse", "parse"]) + assert len(manifest.nodes) == 2 + assert len(manifest.disabled) == 1 + assert len(manifest.external_node_unique_ids) == 1 + + # re-enable model_2.sql + write_file(model_two_sql, project.project_root, "models", "model_two.sql") - # remove second group with model still configured to second group - write_file( - groups_schema_yml_one_group_model_in_group2, - project.project_root, - "models", - "schema.yml", - ) - with pytest.raises(ParsingError): - results = run_dbt(["--partial-parse", "run"]) + # add a new external model + external_nodes.add_model(external_model_node_versioned) + manifest = run_dbt(["--partial-parse", "parse"]) + assert len(manifest.nodes) == 4 + assert len(manifest.external_node_unique_ids) == 2 - # add back second group, make orders private with valid ref + # add a model file that depends on external model write_file( - groups_schema_yml_two_groups_private_orders_valid_access, + "SELECT * FROM {{ref('external', 'external_model')}}", project.project_root, "models", - "schema.yml", + "model_depends_on_external.sql", ) - results = run_dbt(["--partial-parse", "run"]) - assert len(results) == 2 + manifest = run_dbt(["--partial-parse", "parse"]) + assert len(manifest.nodes) == 5 + assert len(manifest.external_node_unique_ids) == 2 - write_file( - groups_schema_yml_two_groups_private_orders_invalid_access, - project.project_root, - "models", - "schema.yml", - ) - with pytest.raises(ParsingError): - results = run_dbt(["--partial-parse", "run"]) + # remove a model file that depends on external model + rm_file(project.project_root, "models", "model_depends_on_external.sql") + manifest = run_dbt(["--partial-parse", "parse"]) + assert len(manifest.nodes) == 4 + + # add an external node with depends on + external_nodes.add_model(external_model_node_depends_on) + external_nodes.add_model(external_model_node_depends_on_parent) + manifest = run_dbt(["--partial-parse", "parse"]) + assert len(manifest.nodes) == 6 + assert len(manifest.external_node_unique_ids) == 4 + + # skip files parsing - ensure no issues + run_dbt(["--partial-parse", "parse"]) + assert len(manifest.nodes) == 6 + assert len(manifest.external_node_unique_ids) == 4 + + +class TestPortablePartialParsing: + @pytest.fixture(scope="class") + def models(self): + return { + "model_one.sql": model_one_sql, + } + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "local_dependency"}]} + + @pytest.fixture(scope="class") + def local_dependency_files(self): + return { + "dbt_project.yml": local_dependency__dbt_project_yml, + "models": { + "schema.yml": local_dependency__models__schema_yml, + "model_to_import.sql": local_dependency__models__model_to_import_sql, + }, + "macros": {"dep_macro.sql": local_dependency__macros__dep_macro_sql}, + "seeds": {"seed.csv": local_dependency__seeds__seed_csv}, + } + + def rename_project_root(self, project, new_project_root): + with up_one(new_project_root): + rename_dir(project.project_root, new_project_root) + project.project_root = new_project_root + # flags.project_dir is set during the project test fixture, and is persisted across run_dbt calls, + # so it needs to be reset between invocations + flags.set_from_args(Namespace(PROJECT_DIR=new_project_root), None) + + @pytest.fixture(scope="class", autouse=True) + def initial_run_and_rename_project_dir(self, project, local_dependency_files): + initial_project_root = project.project_root + renamed_project_root = os.path.join(project.project_root.dirname, "renamed_project_dir") + + write_project_files(project.project_root, "local_dependency", local_dependency_files) + + # initial run + run_dbt(["deps"]) + assert len(run_dbt(["seed"])) == 1 + assert len(run_dbt(["run"])) == 2 + + self.rename_project_root(project, renamed_project_root) + yield + self.rename_project_root(project, initial_project_root) + + def test_pp_renamed_project_dir_unchanged_project_contents(self, project): + # partial parse same project in new absolute dir location, using partial_parse.msgpack created in previous dir + run_dbt(["deps"]) + assert len(run_dbt(["--partial-parse", "seed"])) == 1 + assert len(run_dbt(["--partial-parse", "run"])) == 2 + + def test_pp_renamed_project_dir_changed_project_contents(self, project): + write_file(model_two_sql, project.project_root, "models", "model_two.sql") + + # partial parse changed project in new absolute dir location, using partial_parse.msgpack created in previous dir + run_dbt(["deps"]) + len(run_dbt(["--partial-parse", "seed"])) == 1 + len(run_dbt(["--partial-parse", "run"])) == 3 + + +class TestProfileChanges: + @pytest.fixture(scope="class") + def models(self): + return { + "model.sql": "select 1 as id", + } + + def test_profile_change(self, project, dbt_profile_data): + # Fist run not partial parsing + _, stdout = run_dbt_and_capture(["parse"]) + assert "Unable to do partial parsing because saved manifest not found" in stdout + + _, stdout = run_dbt_and_capture(["parse"]) + assert "Unable to do partial parsing" not in stdout + + # change dbname which is included in the connection_info + dbt_profile_data["test"]["outputs"]["default"]["dbname"] = "dbt2" + write_file(yaml.safe_dump(dbt_profile_data), project.profiles_dir, "profiles.yml") + _, stdout = run_dbt_and_capture(["parse"]) + assert "Unable to do partial parsing because profile has changed" in stdout + + # Change the password which is not included in the connection_info + dbt_profile_data["test"]["outputs"]["default"]["pass"] = "another_password" + write_file(yaml.safe_dump(dbt_profile_data), project.profiles_dir, "profiles.yml") + _, stdout = run_dbt_and_capture(["parse"]) + assert "Unable to do partial parsing" not in stdout diff --git a/tests/functional/partial_parsing/test_pp_disabled_config.py b/tests/functional/partial_parsing/test_pp_disabled_config.py index 03d2e8a728b..472d84ecff6 100644 --- a/tests/functional/partial_parsing/test_pp_disabled_config.py +++ b/tests/functional/partial_parsing/test_pp_disabled_config.py @@ -1,5 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, write_file, get_manifest + +from dbt.tests.util import get_manifest, run_dbt, write_file model_one_sql = """ select 1 as fun diff --git a/tests/functional/partial_parsing/test_pp_docs.py b/tests/functional/partial_parsing/test_pp_docs.py index 3961bb9c0fc..c818cca364b 100644 --- a/tests/functional/partial_parsing/test_pp_docs.py +++ b/tests/functional/partial_parsing/test_pp_docs.py @@ -1,5 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, write_file, get_manifest, rm_file + +from dbt.tests.util import get_manifest, rm_file, run_dbt, write_file model_one_sql = """ select 1 as fun @@ -72,7 +73,7 @@ - name: raw_customers columns: - name: id - tests: + data_tests: - not_null: severity: "{{ 'error' if target.name == 'prod' else 'warn' }}" - unique diff --git a/tests/functional/partial_parsing/test_pp_groups.py b/tests/functional/partial_parsing/test_pp_groups.py new file mode 100644 index 00000000000..57a0917105e --- /dev/null +++ b/tests/functional/partial_parsing/test_pp_groups.py @@ -0,0 +1,156 @@ +import pytest + +from dbt.exceptions import ParsingError +from dbt.tests.util import get_manifest, run_dbt, write_file +from tests.functional.partial_parsing.fixtures import ( + groups_schema_yml_one_group, + groups_schema_yml_one_group_model_in_group2, + groups_schema_yml_two_groups, + groups_schema_yml_two_groups_edited, + groups_schema_yml_two_groups_private_orders_invalid_access, + groups_schema_yml_two_groups_private_orders_valid_access, + orders_downstream_sql, + orders_sql, +) + + +class TestGroups: + @pytest.fixture(scope="class") + def models(self): + return { + "orders.sql": orders_sql, + "orders_downstream.sql": orders_downstream_sql, + "schema.yml": groups_schema_yml_one_group, + } + + def test_pp_groups(self, project): + + # initial run + results = run_dbt() + assert len(results) == 2 + manifest = get_manifest(project.project_root) + expected_nodes = ["model.test.orders", "model.test.orders_downstream"] + expected_groups = ["group.test.test_group"] + assert expected_nodes == sorted(list(manifest.nodes.keys())) + assert expected_groups == sorted(list(manifest.groups.keys())) + + # add group to schema + write_file(groups_schema_yml_two_groups, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + expected_nodes = ["model.test.orders", "model.test.orders_downstream"] + expected_groups = ["group.test.test_group", "group.test.test_group2"] + assert expected_nodes == sorted(list(manifest.nodes.keys())) + assert expected_groups == sorted(list(manifest.groups.keys())) + + # edit group in schema + write_file( + groups_schema_yml_two_groups_edited, project.project_root, "models", "schema.yml" + ) + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + expected_nodes = ["model.test.orders", "model.test.orders_downstream"] + expected_groups = ["group.test.test_group", "group.test.test_group2_edited"] + assert expected_nodes == sorted(list(manifest.nodes.keys())) + assert expected_groups == sorted(list(manifest.groups.keys())) + + # delete group in schema + write_file(groups_schema_yml_one_group, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + manifest = get_manifest(project.project_root) + expected_nodes = ["model.test.orders", "model.test.orders_downstream"] + expected_groups = ["group.test.test_group"] + assert expected_nodes == sorted(list(manifest.nodes.keys())) + assert expected_groups == sorted(list(manifest.groups.keys())) + + # add back second group + write_file(groups_schema_yml_two_groups, project.project_root, "models", "schema.yml") + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + + # remove second group with model still configured to second group + write_file( + groups_schema_yml_one_group_model_in_group2, + project.project_root, + "models", + "schema.yml", + ) + with pytest.raises(ParsingError): + results = run_dbt(["--partial-parse", "run"]) + + # add back second group, make orders private with valid ref + write_file( + groups_schema_yml_two_groups_private_orders_valid_access, + project.project_root, + "models", + "schema.yml", + ) + results = run_dbt(["--partial-parse", "run"]) + assert len(results) == 2 + + write_file( + groups_schema_yml_two_groups_private_orders_invalid_access, + project.project_root, + "models", + "schema.yml", + ) + with pytest.raises(ParsingError): + results = run_dbt(["--partial-parse", "run"]) + + +my_model_c = """ +select * from {{ ref("my_model_a") }} union all +select * from {{ ref("my_model_b") }} +""" + +models_yml = """ +models: + - name: my_model_a + - name: my_model_b + - name: my_model_c +""" + +models_and_groups_yml = """ +groups: + - name: sales_analytics + owner: + name: Sales Analytics + email: sales@jaffleshop.com + +models: + - name: my_model_a + access: private + group: sales_analytics + - name: my_model_b + access: private + group: sales_analytics + - name: my_model_c + access: private + group: sales_analytics +""" + + +class TestAddingModelsToNewGroups: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model_a.sql": "select 1 as id", + "my_model_b.sql": "select 2 as id", + "my_model_c.sql": my_model_c, + "models.yml": models_yml, + } + + def test_adding_models_to_new_groups(self, project): + run_dbt(["compile"]) + # This tests that the correct patch is added to my_model_c. The bug + # was that it was using the old patch, so model_c didn't have the + # correct group and access. + write_file(models_and_groups_yml, project.project_root, "models", "models.yml") + run_dbt(["compile"]) + manifest = get_manifest(project.project_root) + model_c_node = manifest.nodes["model.test.my_model_c"] + assert model_c_node.group == "sales_analytics" + assert model_c_node.access == "private" diff --git a/tests/functional/partial_parsing/test_pp_metrics.py b/tests/functional/partial_parsing/test_pp_metrics.py index da994e09808..9aa5530c419 100644 --- a/tests/functional/partial_parsing/test_pp_metrics.py +++ b/tests/functional/partial_parsing/test_pp_metrics.py @@ -1,18 +1,20 @@ import pytest -from dbt.tests.util import run_dbt, write_file, get_manifest +from dbt.cli.main import dbtRunner +from dbt.contracts.graph.manifest import Manifest +from dbt.exceptions import CompilationError +from dbt.tests.util import get_manifest, rm_file, run_dbt, write_file from tests.functional.partial_parsing.fixtures import ( - people_sql, + metric_model_a_sql, metricflow_time_spine_sql, - people_semantic_models_yml, - people_metrics_yml, people_metrics2_yml, - metric_model_a_sql, people_metrics3_yml, + people_metrics_yml, + people_semantic_models_yml, + people_sl_yml, + people_sql, ) -from dbt.exceptions import CompilationError - class TestMetrics: @pytest.fixture(scope="class") @@ -84,3 +86,29 @@ def test_metrics(self, project): # We use "parse" here and not "run" because we're checking that the CompilationError # occurs at parse time, not compilation results = run_dbt(["parse"]) + + +class TestDeleteFileWithMetricsAndSemanticModels: + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "people_sl.yml": people_sl_yml, + } + + def test_metrics(self, project): + # Initial parsing + runner = dbtRunner() + result = runner.invoke(["parse"]) + assert result.success + manifest = result.result + assert isinstance(manifest, Manifest) + assert len(manifest.metrics) == 3 + + # Remove metric file + rm_file(project.project_root, "models", "people_sl.yml") + + # Rerun parse, shouldn't fail + result = runner.invoke(["parse"]) + assert result.exception is None, result.exception diff --git a/tests/functional/partial_parsing/test_pp_vars.py b/tests/functional/partial_parsing/test_pp_vars.py index f57fca06b1e..a2e915dcb34 100644 --- a/tests/functional/partial_parsing/test_pp_vars.py +++ b/tests/functional/partial_parsing/test_pp_vars.py @@ -2,10 +2,11 @@ from pathlib import Path import pytest -from dbt.constants import SECRET_ENV_PREFIX -from dbt.exceptions import FailedToConnectError, ParsingError -from dbt.tests.util import get_manifest, run_dbt, run_dbt_and_capture, write_file +from dbt.adapters.exceptions import FailedToConnectError +from dbt.exceptions import ParsingError +from dbt.tests.util import get_manifest, run_dbt, run_dbt_and_capture, write_file +from dbt_common.constants import SECRET_ENV_PREFIX from tests.functional.partial_parsing.fixtures import ( env_var_macro_sql, env_var_macros_yml, @@ -263,11 +264,16 @@ def test_env_vars_models(self, project): class TestProjectEnvVars: + @pytest.fixture(scope="class") + def environment(self): + custom_env = os.environ.copy() + custom_env["ENV_VAR_NAME"] = "Jane Smith" + return custom_env + @pytest.fixture(scope="class") def project_config_update(self): # Need to set the environment variable here initially because # the project fixture loads the config. - os.environ["ENV_VAR_NAME"] = "Jane Smith" return {"models": {"+meta": {"meta_name": "{{ env_var('ENV_VAR_NAME') }}"}}} @pytest.fixture(scope="class") @@ -278,6 +284,7 @@ def models(self): def test_project_env_vars(self, project): # Initial run + os.environ["ENV_VAR_NAME"] = "Jane Smith" results = run_dbt(["run"]) assert len(results) == 1 manifest = get_manifest(project.project_root) @@ -307,46 +314,40 @@ def models(self): "model_one.sql": model_one_sql, } + @pytest.fixture(scope="class") + def environment(self): + custom_env = os.environ.copy() + custom_env["ENV_VAR_HOST"] = "localhost" + return custom_env + @pytest.fixture(scope="class") def dbt_profile_target(self): - # Need to set these here because the base integration test class - # calls 'load_config' before the tests are run. - # Note: only the specified profile is rendered, so there's no - # point it setting env_vars in non-used profiles. - os.environ["ENV_VAR_USER"] = "root" - os.environ["ENV_VAR_PASS"] = "password" return { "type": "postgres", "threads": 4, - "host": "localhost", + "host": "{{ env_var('ENV_VAR_HOST') }}", "port": 5432, - "user": "{{ env_var('ENV_VAR_USER') }}", - "pass": "{{ env_var('ENV_VAR_PASS') }}", + "user": "root", + "pass": "password", "dbname": "dbt", } def test_profile_env_vars(self, project, logs_dir): # Initial run - os.environ["ENV_VAR_USER"] = "root" - os.environ["ENV_VAR_PASS"] = "password" + os.environ["ENV_VAR_HOST"] = "localhost" run_dbt(["run"]) - manifest = get_manifest(project.project_root) - env_vars_checksum = manifest.state_check.profile_env_vars_hash.checksum # Change env_vars, the user doesn't exist, this should fail - os.environ["ENV_VAR_USER"] = "fake_user" + os.environ["ENV_VAR_HOST"] = "wrong_host" # N.B. run_dbt_and_capture won't work here because FailedToConnectError ends the test entirely with pytest.raises(FailedToConnectError): run_dbt(["run"], expect_pass=False) log_output = Path(logs_dir, "dbt.log").read_text() - assert "env vars used in profiles.yml have changed" in log_output - - manifest = get_manifest(project.project_root) - assert env_vars_checksum != manifest.state_check.profile_env_vars_hash.checksum + assert "Unable to do partial parsing because profile has changed" in log_output class TestProfileSecretEnvVars: @@ -365,7 +366,7 @@ def dbt_profile_target(self): # user is secret and password is not. postgres on macos doesn't care if the password # changes so we have to change the user. related: https://github.com/dbt-labs/dbt-core/pull/4250 - os.environ[SECRET_ENV_PREFIX + "USER"] = "root" + os.environ[SECRET_ENV_PREFIX + "_USER"] = "root" os.environ["ENV_VAR_PASS"] = "password" return { "type": "postgres", @@ -380,7 +381,7 @@ def dbt_profile_target(self): def test_profile_secret_env_vars(self, project): # Initial run - os.environ[SECRET_ENV_PREFIX + "USER"] = "root" + os.environ[SECRET_ENV_PREFIX + "_USER"] = "root" os.environ["ENV_VAR_PASS"] = "password" results = run_dbt(["run"]) @@ -388,7 +389,7 @@ def test_profile_secret_env_vars(self, project): env_vars_checksum = manifest.state_check.profile_env_vars_hash.checksum # Change a secret var, it shouldn't register because we shouldn't save secrets. - os.environ[SECRET_ENV_PREFIX + "USER"] = "fake_user" + os.environ[SECRET_ENV_PREFIX + "_USER"] = "fake_user" # we just want to see if the manifest has included # the secret in the hash of environment variables. (results, log_output) = run_dbt_and_capture(["run"], expect_pass=True) diff --git a/tests/functional/partial_parsing/test_versioned_models.py b/tests/functional/partial_parsing/test_versioned_models.py index 06e56d9c0cd..0e6ef22cd2f 100644 --- a/tests/functional/partial_parsing/test_versioned_models.py +++ b/tests/functional/partial_parsing/test_versioned_models.py @@ -1,13 +1,9 @@ -import pytest import pathlib -from dbt.tests.util import ( - run_dbt, - get_manifest, - write_file, - rm_file, - read_file, -) + +import pytest + from dbt.exceptions import DuplicateVersionedUnversionedError +from dbt.tests.util import get_manifest, read_file, rm_file, run_dbt, write_file model_one_sql = """ select 1 as fun diff --git a/tests/functional/permission/fixtures.py b/tests/functional/permission/fixtures.py index e6014b09754..7ac2f6f3665 100644 --- a/tests/functional/permission/fixtures.py +++ b/tests/functional/permission/fixtures.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.fixtures.project import write_project_files +from dbt.tests.fixtures.project import write_project_files models__view_model_sql = """ diff --git a/tests/functional/postgres/test_postgres_indexes.py b/tests/functional/postgres/test_postgres_indexes.py index 143a0888755..ceb9d0514bd 100644 --- a/tests/functional/postgres/test_postgres_indexes.py +++ b/tests/functional/postgres/test_postgres_indexes.py @@ -1,21 +1,19 @@ -import pytest import re -from dbt.tests.util import ( - run_dbt, - run_dbt_and_capture, -) + +import pytest + +from dbt.tests.util import run_dbt, run_dbt_and_capture from tests.functional.postgres.fixtures import ( models__incremental_sql, models__table_sql, - models_invalid__missing_columns_sql, models_invalid__invalid_columns_type_sql, models_invalid__invalid_type_sql, models_invalid__invalid_unique_config_sql, + models_invalid__missing_columns_sql, seeds__seed_csv, snapshots__colors_sql, ) - INDEX_DEFINITION_PATTERN = re.compile(r"using\s+(\w+)\s+\((.+)\)\Z") diff --git a/tests/functional/postgres/test_postgres_unlogged_table.py b/tests/functional/postgres/test_postgres_unlogged_table.py index bfb739ef41e..50c23635b64 100644 --- a/tests/functional/postgres/test_postgres_unlogged_table.py +++ b/tests/functional/postgres/test_postgres_unlogged_table.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt +from dbt.tests.util import run_dbt schema_yml = """ version: 2 diff --git a/tests/functional/primary_keys/fixtures.py b/tests/functional/primary_keys/fixtures.py new file mode 100644 index 00000000000..888e9d65190 --- /dev/null +++ b/tests/functional/primary_keys/fixtures.py @@ -0,0 +1,88 @@ +simple_model_sql = """ +select 1 as id, 'blue' as color +""" + +simple_model_unique_test = """ +models: + - name: simple_model + columns: + - name: id + tests: + - unique +""" + +simple_model_disabled_unique_test = """ +models: + - name: simple_model + columns: + - name: id + tests: + - unique: + enabled: false + +""" + +simple_model_unique_not_null_tests = """ +models: + - name: simple_model + columns: + - name: id + tests: + - unique + - not_null +""" + +simple_model_unique_combo_of_columns = """ +models: + - name: simple_model + tests: + - dbt_utils.unique_combination_of_columns: + combination_of_columns: [id, color] +""" + +simple_model_constraints = """ +models: + - name: simple_model + config: + contract: + enforced: true + columns: + - name: id + data_type: int + constraints: + - type: not_null + - type: primary_key + - name: color + data_type: text +""" + +simple_model_two_versions_both_configured = """ +models: + - name: simple_model + latest_version: 1 + columns: + - name: id + tests: + - unique + - not_null + versions: + - v: 1 + - v: 2 +""" + +simple_model_two_versions_exclude_col = """ +models: + - name: simple_model + latest_version: 1 + columns: + - name: id + tests: + - unique + - not_null + versions: + - v: 1 + - v: 2 + columns: + - include: all + exclude: [id] +""" diff --git a/tests/functional/primary_keys/test_primary_keys.py b/tests/functional/primary_keys/test_primary_keys.py new file mode 100644 index 00000000000..866ff2967ce --- /dev/null +++ b/tests/functional/primary_keys/test_primary_keys.py @@ -0,0 +1,157 @@ +import pytest + +from dbt.tests.util import get_manifest, run_dbt +from tests.functional.primary_keys.fixtures import ( + simple_model_constraints, + simple_model_disabled_unique_test, + simple_model_sql, + simple_model_two_versions_both_configured, + simple_model_two_versions_exclude_col, + simple_model_unique_combo_of_columns, + simple_model_unique_not_null_tests, + simple_model_unique_test, +) + + +class TestSimpleModelNoYml: + @pytest.fixture(scope="class") + def models(self): + return { + "simple_model.sql": simple_model_sql, + } + + def test_simple_model_no_yml(self, project): + run_dbt(["deps"]) + run_dbt(["run"]) + manifest = get_manifest(project.project_root) + node = manifest.nodes["model.test.simple_model"] + assert node.primary_key == [] + + +class TestSimpleModelConstraints: + @pytest.fixture(scope="class") + def models(self): + return { + "simple_model.sql": simple_model_sql, + "schema.yml": simple_model_constraints, + } + + def test_simple_model_constraints(self, project): + run_dbt(["deps"]) + run_dbt(["run"]) + manifest = get_manifest(project.project_root) + node = manifest.nodes["model.test.simple_model"] + assert node.primary_key == ["id"] + + +class TestSimpleModelUniqueNotNullTests: + @pytest.fixture(scope="class") + def models(self): + return { + "simple_model.sql": simple_model_sql, + "schema.yml": simple_model_unique_not_null_tests, + } + + def test_simple_model_unique_not_null_tests(self, project): + run_dbt(["deps"]) + run_dbt(["run"]) + manifest = get_manifest(project.project_root) + node = manifest.nodes["model.test.simple_model"] + assert node.primary_key == ["id"] + + +class TestSimpleModelUniqueTests: + @pytest.fixture(scope="class") + def models(self): + return { + "simple_model.sql": simple_model_sql, + "schema.yml": simple_model_unique_test, + } + + def test_simple_model_unique_test(self, project): + run_dbt(["deps"]) + run_dbt(["run"]) + manifest = get_manifest(project.project_root) + node = manifest.nodes["model.test.simple_model"] + assert node.primary_key == ["id"] + + +class TestSimpleModelDisabledUniqueTests: + @pytest.fixture(scope="class") + def models(self): + return { + "simple_model.sql": simple_model_sql, + "schema.yml": simple_model_disabled_unique_test, + } + + def test_simple_model_disabled_unique_test(self, project): + run_dbt(["deps"]) + run_dbt(["run"]) + manifest = get_manifest(project.project_root) + node = manifest.nodes["model.test.simple_model"] + assert node.primary_key == ["id"] + + +class TestVersionedSimpleModel: + @pytest.fixture(scope="class") + def models(self): + return { + "simple_model_v1.sql": simple_model_sql, + "simple_model_v2.sql": simple_model_sql, + "schema.yml": simple_model_two_versions_both_configured, + } + + def test_versioned_simple_model(self, project): + run_dbt(["deps"]) + run_dbt(["run"]) + manifest = get_manifest(project.project_root) + node_v1 = manifest.nodes["model.test.simple_model.v1"] + node_v2 = manifest.nodes["model.test.simple_model.v2"] + assert node_v1.primary_key == ["id"] + assert node_v2.primary_key == ["id"] + + +class TestVersionedSimpleModelExcludeTests: + @pytest.fixture(scope="class") + def models(self): + return { + "simple_model_v1.sql": simple_model_sql, + "simple_model_v2.sql": simple_model_sql, + "schema.yml": simple_model_two_versions_exclude_col, + } + + def test_versioned_simple_model_exclude_col(self, project): + run_dbt(["deps"]) + run_dbt(["run"]) + manifest = get_manifest(project.project_root) + node_v1 = manifest.nodes["model.test.simple_model.v1"] + node_v2 = manifest.nodes["model.test.simple_model.v2"] + assert node_v1.primary_key == ["id"] + assert node_v2.primary_key == [] + + +class TestSimpleModelCombinationOfColumns: + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "git": "https://github.com/dbt-labs/dbt-utils.git", + "revision": "1.1.0", + }, + ] + } + + @pytest.fixture(scope="class") + def models(self): + return { + "simple_model.sql": simple_model_sql, + "schema.yml": simple_model_unique_combo_of_columns, + } + + def test_versioned_simple_combo_of_columns(self, project): + run_dbt(["deps"]) + run_dbt(["run"]) + manifest = get_manifest(project.project_root) + node = manifest.nodes["model.test.simple_model"] + assert node.primary_key == ["color", "id"] diff --git a/tests/functional/profiles/test_profile_dir.py b/tests/functional/profiles/test_profile_dir.py index b9237af5dce..8545e2c4773 100644 --- a/tests/functional/profiles/test_profile_dir.py +++ b/tests/functional/profiles/test_profile_dir.py @@ -1,19 +1,13 @@ import os -import pytest -import yaml - +from argparse import Namespace from contextlib import contextmanager from pathlib import Path -from argparse import Namespace -import dbt.flags as flags +import pytest +import yaml -from dbt.tests.util import ( - run_dbt, - run_dbt_and_capture, - write_file, - rm_file, -) +import dbt.flags as flags +from dbt.tests.util import rm_file, run_dbt, run_dbt_and_capture, write_file @pytest.fixture(scope="class") diff --git a/tests/functional/profiles/test_profiles_yml.py b/tests/functional/profiles/test_profiles_yml.py index 50771c24132..2af18b5f982 100644 --- a/tests/functional/profiles/test_profiles_yml.py +++ b/tests/functional/profiles/test_profiles_yml.py @@ -1,4 +1,5 @@ import pathlib + from test_profile_dir import environ from dbt.cli.main import dbtRunner diff --git a/tests/functional/record/record.py b/tests/functional/record/record.py new file mode 100644 index 00000000000..6b1779f8fc0 --- /dev/null +++ b/tests/functional/record/record.py @@ -0,0 +1,17 @@ +import os + +from dbt.tests.util import run_dbt + + +class TestRecord: + def test_record_when_env_var_set(self, project): + temp = os.environ.get("DBT_RECORD", None) + try: + os.environ["DBT_RECORD"] = "True" + run_dbt(["run"]) + assert os.path.isfile(os.path.join(os.getcwd(), "recording.json")) + finally: + if temp is None: + del os.environ["DBT_RECORD"] + else: + os.environ["DBT_RECORD"] = temp diff --git a/tests/functional/ref_override/test_ref_override.py b/tests/functional/ref_override/test_ref_override.py index 9a6b1def435..97ffa790173 100644 --- a/tests/functional/ref_override/test_ref_override.py +++ b/tests/functional/ref_override/test_ref_override.py @@ -1,8 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal -from dbt.tests.fixtures.project import write_project_files - +from dbt.tests.util import check_relations_equal, run_dbt models__ref_override_sql = """ select @@ -28,52 +26,120 @@ 3,6""" -@pytest.fixture(scope="class") -def models(): - return {"ref_override.sql": models__ref_override_sql} +class TestRefOverride: + @pytest.fixture(scope="class") + def models(self): + return {"ref_override.sql": models__ref_override_sql} + @pytest.fixture(scope="class") + def macros(self): + return {"ref_override_macro.sql": macros__ref_override_macro_sql} -@pytest.fixture(scope="class") -def macros(): - return {"ref_override_macro.sql": macros__ref_override_macro_sql} + @pytest.fixture(scope="class") + def seeds(self): + return {"seed_2.csv": seeds__seed_2_csv, "seed_1.csv": seeds__seed_1_csv} + def test_ref_override( + self, + project, + ): + run_dbt(["seed"]) + run_dbt(["run"]) -@pytest.fixture(scope="class") -def seeds(): - return {"seed_2.csv": seeds__seed_2_csv, "seed_1.csv": seeds__seed_1_csv} + # We want it to equal seed_2 and not seed_1. If it's + # still pointing at seed_1 then the override hasn't worked. + check_relations_equal(project.adapter, ["ref_override", "seed_2"]) -@pytest.fixture(scope="class") -def project_files( - project_root, - models, - macros, - seeds, -): - write_project_files(project_root, "models", models) - write_project_files(project_root, "macros", macros) - write_project_files(project_root, "seeds", seeds) +models__version_ref_override_sql = """ +select + * +from {{ ref('versioned_model', version=1) }} +""" +models__package_ref_override_sql = """ +select + * +from {{ ref('package', 'versioned_model') }} +""" -class TestRefOverride: +models__package_version_ref_override_sql = """ +select + * +from {{ ref('package', 'versioned_model', version=1) }} +""" + +models__v1_sql = """ +select 1 +""" + +models__v2_sql = """ +select 2 +""" + +schema__versions_yml = """ +models: + - name: versioned_model + versions: + - v: 1 + - v: 2 +""" + +macros__package_version_ref_override_macro_sql = """ +-- Macro to override ref and always return the same result +{% macro ref() %} +-- extract user-provided positional and keyword arguments +{% set version = kwargs.get('version') %} +{% set packagename = none %} +{%- if (varargs | length) == 1 -%} + {% set modelname = varargs[0] %} +{%- else -%} + {% set packagename = varargs[0] %} + {% set modelname = varargs[1] %} +{% endif %} + +{%- set version_override = 2 -%} +{%- set packagename_override = 'test' -%} +-- call builtins.ref based on provided positional arguments +{% if packagename is not none %} + {% do return(builtins.ref(packagename_override, modelname, version=version_override)) %} +{% else %} + {% do return(builtins.ref(modelname, version=version_override)) %} +{% endif %} + +{% endmacro %} +""" + + +class TestAdvancedRefOverride: @pytest.fixture(scope="class") - def project_config_update(self): + def models(self): return { - "config-version": 2, - "seed-paths": ["seeds"], - "macro-paths": ["macros"], - "seeds": { - "quote_columns": False, - }, + "version_ref_override.sql": models__version_ref_override_sql, + "package_ref_override.sql": models__package_ref_override_sql, + "package_version_ref_override.sql": models__package_version_ref_override_sql, + "versioned_model_v1.sql": models__v1_sql, + "versioned_model_v2.sql": models__v2_sql, + "model.sql": models__v1_sql, + "schema.yml": schema__versions_yml, } + @pytest.fixture(scope="class") + def macros(self): + return {"ref_override_macro.sql": macros__package_version_ref_override_macro_sql} + def test_ref_override( self, project, ): - run_dbt(["seed"]) run_dbt(["run"]) - # We want it to equal seed_2 and not seed_1. If it's - # still pointing at seed_1 then the override hasn't worked. - check_relations_equal(project.adapter, ["ref_override", "seed_2"]) + # We want versioned_ref_override to equal to versioned_model_v2, otherwise the + # ref override macro has not worked + check_relations_equal(project.adapter, ["version_ref_override", "versioned_model_v2"]) + + check_relations_equal(project.adapter, ["package_ref_override", "versioned_model_v2"]) + + check_relations_equal( + project.adapter, ["package_version_ref_override", "versioned_model_v2"] + ) diff --git a/tests/functional/retry/fixtures.py b/tests/functional/retry/fixtures.py index 1c063b4490a..64adf9c684f 100644 --- a/tests/functional/retry/fixtures.py +++ b/tests/functional/retry/fixtures.py @@ -11,7 +11,7 @@ - name: sample_model columns: - name: foo - tests: + data_tests: - accepted_values: values: [3] quote: false @@ -20,7 +20,7 @@ - name: second_model columns: - name: bar - tests: + data_tests: - accepted_values: values: [3] quote: false @@ -29,7 +29,7 @@ - name: union_model columns: - name: sum3 - tests: + data_tests: - accepted_values: values: [3] quote: false @@ -45,3 +45,16 @@ {% do log("Timezone set to: " + timezone, info=True) %} {% endmacro %} """ + +simple_model = """ +select null as id +""" + +simple_schema = """ +models: + - name: some_model + columns: + - name: id + data_tests: + - not_null +""" diff --git a/tests/functional/retry/test_retry.py b/tests/functional/retry/test_retry.py index 8c322a664c7..012db25e42f 100644 --- a/tests/functional/retry/test_retry.py +++ b/tests/functional/retry/test_retry.py @@ -1,14 +1,19 @@ +from pathlib import Path +from shutil import copytree, move + import pytest from dbt.contracts.results import RunStatus, TestStatus from dbt.exceptions import DbtRuntimeError, TargetNotFoundError -from dbt.tests.util import run_dbt, write_file, rm_file +from dbt.tests.util import rm_file, run_dbt, write_file from tests.functional.retry.fixtures import ( + macros__alter_timezone_sql, models__sample_model, + models__second_model, models__union_model, schema_yml, - models__second_model, - macros__alter_timezone_sql, + simple_model, + simple_schema, ) @@ -122,7 +127,10 @@ def test_previous_run(self, project): write_file(models__sample_model, "models", "sample_model.sql") def test_warn_error(self, project): - # Regular build + # Our test command should succeed when run normally... + results = run_dbt(["build", "--select", "second_model"]) + + # ...but it should fail when run with warn-error, due to a warning... results = run_dbt(["--warn-error", "build", "--select", "second_model"], expect_pass=False) expected_statuses = { @@ -225,3 +233,135 @@ def test_fail_fast(self, project): results = run_dbt(["retry"]) assert {r.node.unique_id: r.status for r in results.results} == {} + + +class TestRetryResourceType: + @pytest.fixture(scope="class") + def models(self): + return { + "null_model.sql": simple_model, + "schema.yml": simple_schema, + } + + def test_resource_type(self, project): + # test multiple options in single string + results = run_dbt(["build", "--select", "null_model", "--resource-type", "test model"]) + assert len(results) == 1 + + # nothing to do + results = run_dbt(["retry"]) + assert len(results) == 0 + + # test multiple options in multiple args + results = run_dbt( + [ + "build", + "--select", + "null_model", + "--resource-type", + "test", + "--resource-type", + "model", + ] + ) + assert len(results) == 1 + + # nothing to do + results = run_dbt(["retry"]) + assert len(results) == 0 + + # test single all option + results = run_dbt(["build", "--select", "null_model", "--resource-type", "all"]) + assert len(results) == 1 + + # nothing to do + results = run_dbt(["retry"]) + assert len(results) == 0 + + +class TestRetryOverridePath: + @pytest.fixture(scope="class") + def models(self): + return { + "sample_model.sql": models__sample_model, + } + + def test_retry(self, project): + project_root = project.project_root + proj_location_1 = project_root / "proj_location_1" + proj_location_2 = project_root / "proj_location_2" + + copytree(project_root, proj_location_1) + run_dbt(["run", "--project-dir", "proj_location_1"], expect_pass=False) + move(proj_location_1, proj_location_2) + run_dbt(["retry", "--project-dir", "proj_location_2"], expect_pass=False) + + +class TestRetryVars: + @pytest.fixture(scope="class") + def models(self): + return { + "sample_model.sql": "select {{ var('myvar_a', '1') + var('myvar_b', '2') }} as mycol", + } + + def test_retry(self, project): + # pass because default vars works + run_dbt(["run"]) + run_dbt(["run", "--vars", '{"myvar_a": "12", "myvar_b": "3 4"}'], expect_pass=False) + # fail because vars are invalid, this shows that the last passed vars are being used + # instead of using the default vars + run_dbt(["retry"], expect_pass=False) + results = run_dbt(["retry", "--vars", '{"myvar_a": "12", "myvar_b": "34"}']) + assert len(results) == 1 + + +class TestRetryFullRefresh: + @pytest.fixture(scope="class") + def models(self): + return { + "sample_model.sql": "{% if flags.FULL_REFRESH %} this is invalid sql {% else %} select 1 as mycol {% endif %}", + } + + def test_retry(self, project): + # This run should fail with invalid sql... + run_dbt(["run", "--full-refresh"], expect_pass=False) + # ...and so should this one, since the effect of the full-refresh parameter should persist. + results = run_dbt(["retry"], expect_pass=False) + assert len(results) == 1 + + +class TestRetryTargetPathEnvVar: + @pytest.fixture(scope="class") + def models(self): + return { + "sample_model.sql": models__sample_model, + } + + def test_retry_target_path_env_var(self, project, monkeypatch): + monkeypatch.setenv("DBT_TARGET_PATH", "artifacts") + run_dbt(["run"], expect_pass=False) + + write_file(models__second_model, "models", "sample_model.sql") + + results = run_dbt(["retry"]) + assert len(results) == 1 + + +class TestRetryTargetPathFlag: + @pytest.fixture(scope="class") + def models(self): + return { + "sample_model.sql": models__sample_model, + } + + def test_retry_target_path_flag(self, project): + run_dbt(["run", "--target-path", "target"], expect_pass=False) + + project_root = project.project_root + move(project_root / "target", project_root / "artifacts") + + write_file(models__second_model, "models", "sample_model.sql") + + results = run_dbt(["retry", "--state", "artifacts", "--target-path", "my_target_path"]) + assert len(results) == 1 + assert Path("my_target_path").is_dir() diff --git a/tests/functional/run_operations/test_run_operations.py b/tests/functional/run_operations/test_run_operations.py index aa6d908b8ce..064c98b3a51 100644 --- a/tests/functional/run_operations/test_run_operations.py +++ b/tests/functional/run_operations/test_run_operations.py @@ -3,17 +3,21 @@ import pytest import yaml -from dbt.exceptions import DbtInternalError from dbt.tests.util import ( check_table_does_exist, - run_dbt, - write_file, mkdir, - run_dbt_and_capture, rm_dir, rm_file, + run_dbt, + run_dbt_and_capture, + write_file, +) +from dbt_common.exceptions import DbtInternalError +from tests.functional.run_operations.fixtures import ( + happy_macros_sql, + model_sql, + sad_macros_sql, ) -from tests.functional.run_operations.fixtures import happy_macros_sql, sad_macros_sql, model_sql class TestOperations: @@ -28,7 +32,6 @@ def macros(self): @pytest.fixture(scope="class") def dbt_profile_data(self, unique_schema): return { - "config": {"send_anonymous_usage_stats": False}, "test": { "outputs": { "default": { diff --git a/tests/functional/saved_queries/__init__.py b/tests/functional/saved_queries/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/functional/saved_queries/fixtures.py b/tests/functional/saved_queries/fixtures.py new file mode 100644 index 00000000000..96383ab5472 --- /dev/null +++ b/tests/functional/saved_queries/fixtures.py @@ -0,0 +1,162 @@ +saved_query_description = """ +{% docs saved_query_description %} My SavedQuery Description {% enddocs %} +""" + +saved_queries_yml = """ +saved_queries: + - name: test_saved_query + description: "{{ doc('saved_query_description') }}" + label: Test Saved Query + query_params: + metrics: + - simple_metric + group_by: + - "Dimension('id__ds')" + where: + - "{{ TimeDimension('id__ds', 'DAY') }} <= now()" + - "{{ TimeDimension('id__ds', 'DAY') }} >= '2023-01-01'" + - "{{ Metric('txn_revenue', ['id']) }} > 1" + exports: + - name: my_export + config: + alias: my_export_alias + export_as: table + schema: my_export_schema_name +""" + +saved_queries_with_defaults_yml = """ +saved_queries: + - name: test_saved_query + description: "{{ doc('saved_query_description') }}" + label: Test Saved Query + query_params: + metrics: + - simple_metric + group_by: + - "Dimension('id__ds')" + where: + - "{{ TimeDimension('id__ds', 'DAY') }} <= now()" + - "{{ TimeDimension('id__ds', 'DAY') }} >= '2023-01-01'" + - "{{ Metric('txn_revenue', ['id']) }} > 1" + exports: + - name: my_export + config: + alias: my_export_alias + export_as: table +""" + +saved_queries_with_diff_filters_yml = """ +saved_queries: + - name: test_saved_query_where_list + description: "{{ doc('saved_query_description') }}" + label: Test Saved Query + query_params: + metrics: + - simple_metric + group_by: + - "Dimension('id__ds')" + where: + - "{{ TimeDimension('id__ds', 'DAY') }} <= now()" + - "{{ TimeDimension('id__ds', 'DAY') }} >= '2023-01-01'" + exports: + - name: my_export + config: + alias: my_export_alias + export_as: table + schema: my_export_schema_name + + - name: test_saved_query_where_str + description: "{{ doc('saved_query_description') }}" + label: Test Saved Query2 + query_params: + metrics: + - simple_metric + group_by: + - "Dimension('id__ds')" + where: "{{ TimeDimension('id__ds', 'DAY') }} <= now()" +""" + +saved_query_with_extra_config_attributes_yml = """ +saved_queries: + - name: test_saved_query + description: "{{ doc('saved_query_description') }}" + label: Test Saved Query + query_params: + metrics: + - simple_metric + group_by: + - "Dimension('id__ds')" + where: + - "{{ TimeDimension('id__ds', 'DAY') }} <= now()" + - "{{ TimeDimension('id__ds', 'DAY') }} >= '2023-01-01'" + exports: + - name: my_export + config: + my_random_config: 'I have this for some reason' + export_as: table +""" + +saved_query_with_export_configs_defined_at_saved_query_level_yml = """ +saved_queries: + - name: test_saved_query + description: "{{ doc('saved_query_description') }}" + label: Test Saved Query + config: + export_as: table + schema: my_default_export_schema + query_params: + metrics: + - simple_metric + group_by: + - "Dimension('id__ds')" + where: + - "{{ TimeDimension('id__ds', 'DAY') }} <= now()" + - "{{ TimeDimension('id__ds', 'DAY') }} >= '2023-01-01'" + exports: + - name: my_export + config: + export_as: view + schema: my_custom_export_schema + - name: my_export2 +""" + +saved_query_without_export_configs_defined_yml = """ +saved_queries: + - name: test_saved_query + description: "{{ doc('saved_query_description') }}" + label: Test Saved Query + query_params: + metrics: + - simple_metric + group_by: + - "Dimension('id__ds')" + where: + - "{{ TimeDimension('id__ds', 'DAY') }} <= now()" + - "{{ TimeDimension('id__ds', 'DAY') }} >= '2023-01-01'" + exports: + - name: my_export +""" + +saved_query_with_cache_configs_defined_yml = """ +saved_queries: + - name: test_saved_query + description: "{{ doc('saved_query_description') }}" + label: Test Saved Query + config: + cache: + enabled: True + query_params: + metrics: + - simple_metric + group_by: + - "Dimension('id__ds')" + where: + - "{{ TimeDimension('id__ds', 'DAY') }} <= now()" + - "{{ TimeDimension('id__ds', 'DAY') }} >= '2023-01-01'" + exports: + - name: my_export + config: + alias: my_export_alias + export_as: table + schema: my_export_schema_name +""" diff --git a/tests/functional/saved_queries/test_configs.py b/tests/functional/saved_queries/test_configs.py new file mode 100644 index 00000000000..df4be7aa5b6 --- /dev/null +++ b/tests/functional/saved_queries/test_configs.py @@ -0,0 +1,324 @@ +import pytest + +from dbt.contracts.graph.manifest import Manifest +from dbt.tests.util import update_config_file +from dbt_semantic_interfaces.type_enums.export_destination_type import ( + ExportDestinationType, +) +from tests.functional.assertions.test_runner import dbtTestRunner +from tests.functional.configs.fixtures import BaseConfigProject +from tests.functional.saved_queries.fixtures import ( + saved_queries_with_defaults_yml, + saved_queries_yml, + saved_query_description, + saved_query_with_cache_configs_defined_yml, + saved_query_with_export_configs_defined_at_saved_query_level_yml, + saved_query_with_extra_config_attributes_yml, + saved_query_without_export_configs_defined_yml, +) +from tests.functional.semantic_models.fixtures import ( + fct_revenue_sql, + metricflow_time_spine_sql, + schema_yml, +) + + +class TestSavedQueryConfigs(BaseConfigProject): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "saved-queries": { + "test": { + "test_saved_query": { + "+enabled": True, + "+export_as": ExportDestinationType.VIEW.value, + "+schema": "my_default_export_schema", + "+cache": {"enabled": True}, + } + }, + }, + } + + @pytest.fixture(scope="class") + def models(self): + return { + "saved_queries.yml": saved_query_with_extra_config_attributes_yml, + "schema.yml": schema_yml, + "fct_revenue.sql": fct_revenue_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "docs.md": saved_query_description, + } + + def test_basic_saved_query_config( + self, + project, + ): + runner = dbtTestRunner() + + # parse with default fixture project config + result = runner.invoke(["parse"]) + assert result.success + assert isinstance(result.result, Manifest) + assert len(result.result.saved_queries) == 1 + saved_query = result.result.saved_queries["saved_query.test.test_saved_query"] + assert saved_query.config.export_as == ExportDestinationType.VIEW + assert saved_query.config.schema == "my_default_export_schema" + assert saved_query.config.cache.enabled is True + + # disable the saved_query via project config and rerun + config_patch = {"saved-queries": {"test": {"test_saved_query": {"+enabled": False}}}} + update_config_file(config_patch, project.project_root, "dbt_project.yml") + result = runner.invoke(["parse"]) + assert result.success + assert len(result.result.saved_queries) == 0 + + +# Test that the cache will default to enabled = false if not set in the saved_query config +class TestSavedQueryDefaultCacheConfigs(BaseConfigProject): + @pytest.fixture(scope="class") + def models(self): + return { + "saved_queries.yml": saved_query_with_extra_config_attributes_yml, + "schema.yml": schema_yml, + "fct_revenue.sql": fct_revenue_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "docs.md": saved_query_description, + } + + def test_basic_saved_query_config( + self, + project, + ): + runner = dbtTestRunner() + + # parse with default fixture project config + result = runner.invoke(["parse"]) + assert result.success + assert isinstance(result.result, Manifest) + assert len(result.result.saved_queries) == 1 + saved_query = result.result.saved_queries["saved_query.test.test_saved_query"] + assert saved_query.config.cache.enabled is False + + +class TestExportConfigsWithAdditionalProperties(BaseConfigProject): + @pytest.fixture(scope="class") + def models(self): + return { + "saved_queries.yml": saved_queries_yml, + "schema.yml": schema_yml, + "fct_revenue.sql": fct_revenue_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "docs.md": saved_query_description, + } + + def test_extra_config_properties_dont_break_parsing(self, project): + runner = dbtTestRunner() + + # parse with default fixture project config + result = runner.invoke(["parse"]) + assert result.success + assert isinstance(result.result, Manifest) + assert len(result.result.saved_queries) == 1 + saved_query = result.result.saved_queries["saved_query.test.test_saved_query"] + assert len(saved_query.exports) == 1 + assert saved_query.exports[0].config.__dict__.get("my_random_config") is None + + +class TestExportConfigsWithDefaultProperties(BaseConfigProject): + @pytest.fixture(scope="class") + def models(self): + return { + "saved_queries.yml": saved_queries_with_defaults_yml, + "schema.yml": schema_yml, + "fct_revenue.sql": fct_revenue_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "docs.md": saved_query_description, + } + + def test_default_properties(self, project): + runner = dbtTestRunner() + + # parse with default fixture project config + result = runner.invoke(["parse"]) + assert result.success + assert isinstance(result.result, Manifest) + assert len(result.result.saved_queries) == 1 + saved_query = result.result.saved_queries["saved_query.test.test_saved_query"] + assert len(saved_query.exports) == 1 + export = saved_query.exports[0] + assert export.config.alias == "my_export_alias" + assert export.config.schema_name == project.test_schema + assert export.config.database == project.database + + +class TestInheritingExportConfigFromSavedQueryConfig(BaseConfigProject): + @pytest.fixture(scope="class") + def models(self): + return { + "saved_queries.yml": saved_query_with_export_configs_defined_at_saved_query_level_yml, + "schema.yml": schema_yml, + "fct_revenue.sql": fct_revenue_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "docs.md": saved_query_description, + } + + def test_export_config_inherits_from_saved_query(self, project): + runner = dbtTestRunner() + + # parse with default fixture project config + result = runner.invoke(["parse"]) + assert result.success + assert isinstance(result.result, Manifest) + assert len(result.result.saved_queries) == 1 + saved_query = result.result.saved_queries["saved_query.test.test_saved_query"] + assert len(saved_query.exports) == 2 + + # assert Export `my_export` has its configs defined from itself because they should take priority + export1 = next( + (export for export in saved_query.exports if export.name == "my_export"), None + ) + assert export1 is not None + assert export1.config.export_as == ExportDestinationType.VIEW + assert export1.config.export_as != saved_query.config.export_as + assert export1.config.schema_name == "my_custom_export_schema" + assert export1.config.schema_name != saved_query.config.schema + assert export1.config.database == project.database + + # assert Export `my_export` has its configs defined from the saved_query because they should take priority + export2 = next( + (export for export in saved_query.exports if export.name == "my_export2"), None + ) + assert export2 is not None + assert export2.config.export_as == ExportDestinationType.TABLE + assert export2.config.export_as == saved_query.config.export_as + assert export2.config.schema_name == "my_default_export_schema" + assert export2.config.schema_name == saved_query.config.schema + assert export2.config.database == project.database + + +class TestInheritingExportConfigsFromProject(BaseConfigProject): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "saved-queries": { + "test": { + "test_saved_query": { + "+export_as": ExportDestinationType.VIEW.value, + } + }, + }, + } + + @pytest.fixture(scope="class") + def models(self): + return { + "saved_queries.yml": saved_query_without_export_configs_defined_yml, + "schema.yml": schema_yml, + "fct_revenue.sql": fct_revenue_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "docs.md": saved_query_description, + } + + def test_export_config_inherits_from_project( + self, + project, + ): + runner = dbtTestRunner() + + # parse with default fixture project config + result = runner.invoke(["parse"]) + assert result.success + assert isinstance(result.result, Manifest) + assert len(result.result.saved_queries) == 1 + saved_query = result.result.saved_queries["saved_query.test.test_saved_query"] + assert saved_query.config.export_as == ExportDestinationType.VIEW + + # change export's `export_as` to `TABLE` via project config + config_patch = { + "saved-queries": { + "test": {"test_saved_query": {"+export_as": ExportDestinationType.TABLE.value}} + } + } + update_config_file(config_patch, project.project_root, "dbt_project.yml") + result = runner.invoke(["parse"]) + assert result.success + assert isinstance(result.result, Manifest) + assert len(result.result.saved_queries) == 1 + saved_query = result.result.saved_queries["saved_query.test.test_saved_query"] + assert saved_query.config.export_as == ExportDestinationType.TABLE + + +# cache can be specified just in a SavedQuery config +class TestSavedQueryLevelCacheConfigs(BaseConfigProject): + @pytest.fixture(scope="class") + def models(self): + return { + "saved_queries.yml": saved_query_with_cache_configs_defined_yml, + "schema.yml": schema_yml, + "fct_revenue.sql": fct_revenue_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "docs.md": saved_query_description, + } + + def test_basic_saved_query_config( + self, + project, + ): + runner = dbtTestRunner() + + # parse with default fixture project config + result = runner.invoke(["parse"]) + assert result.success + assert isinstance(result.result, Manifest) + assert len(result.result.saved_queries) == 1 + saved_query = result.result.saved_queries["saved_query.test.test_saved_query"] + assert saved_query.config.cache.enabled is True + + +# the cache defined in yaml for the SavedQuery overrides settings from the dbt_project.toml +class TestSavedQueryCacheConfigsOverride(BaseConfigProject): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "saved-queries": { + "test": { + "test_saved_query": { + "+cache": {"enabled": True}, + } + }, + }, + } + + @pytest.fixture(scope="class") + def models(self): + return { + "saved_queries.yml": saved_query_with_cache_configs_defined_yml, + "schema.yml": schema_yml, + "fct_revenue.sql": fct_revenue_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "docs.md": saved_query_description, + } + + def test_override_saved_query_config( + self, + project, + ): + runner = dbtTestRunner() + + # parse with default fixture project config + result = runner.invoke(["parse"]) + assert result.success + assert isinstance(result.result, Manifest) + assert len(result.result.saved_queries) == 1 + saved_query = result.result.saved_queries["saved_query.test.test_saved_query"] + assert saved_query.config.cache.enabled is True + + # set cache to enabled=False via project config but since it's set to true at the saved_query + # level, it should stay enabled + config_patch = { + "saved-queries": {"test": {"test_saved_query": {"+cache": {"enabled": False}}}} + } + update_config_file(config_patch, project.project_root, "dbt_project.yml") + result = runner.invoke(["parse"]) + assert result.success + assert saved_query.config.cache.enabled is True diff --git a/tests/functional/saved_queries/test_saved_query_build.py b/tests/functional/saved_queries/test_saved_query_build.py new file mode 100644 index 00000000000..e9c2bbda3f8 --- /dev/null +++ b/tests/functional/saved_queries/test_saved_query_build.py @@ -0,0 +1,39 @@ +import pytest + +from dbt.tests.util import run_dbt +from tests.functional.saved_queries.fixtures import ( + saved_queries_yml, + saved_query_description, +) +from tests.functional.semantic_models.fixtures import ( + fct_revenue_sql, + metricflow_time_spine_sql, + schema_yml, +) + + +class TestSavedQueryBuild: + @pytest.fixture(scope="class") + def models(self): + return { + "saved_queries.yml": saved_queries_yml, + "schema.yml": schema_yml, + "fct_revenue.sql": fct_revenue_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "docs.md": saved_query_description, + } + + @pytest.fixture(scope="class") + def packages(self): + return """ +packages: + - package: dbt-labs/dbt_utils + version: 1.1.1 +""" + + def test_build_saved_queries_no_op(self, project) -> None: + """Test building saved query exports with no flag, so should be no-op.""" + run_dbt(["deps"]) + result = run_dbt(["build"]) + assert len(result.results) == 3 + assert "NO-OP" in [r.message for r in result.results] diff --git a/tests/functional/saved_queries/test_saved_query_parsing.py b/tests/functional/saved_queries/test_saved_query_parsing.py new file mode 100644 index 00000000000..40e4cdfa4fb --- /dev/null +++ b/tests/functional/saved_queries/test_saved_query_parsing.py @@ -0,0 +1,273 @@ +import os +import shutil +from copy import deepcopy +from typing import List + +import pytest + +from dbt.contracts.graph.manifest import Manifest +from dbt.tests.util import run_dbt, write_file +from dbt_common.events.base_types import BaseEvent +from dbt_semantic_interfaces.type_enums.export_destination_type import ( + ExportDestinationType, +) +from tests.functional.assertions.test_runner import dbtTestRunner +from tests.functional.saved_queries.fixtures import ( + saved_queries_with_defaults_yml, + saved_queries_with_diff_filters_yml, + saved_queries_yml, + saved_query_description, + saved_query_with_cache_configs_defined_yml, +) +from tests.functional.semantic_models.fixtures import ( + fct_revenue_sql, + metricflow_time_spine_sql, + schema_yml, +) + + +class TestSavedQueryParsing: + @pytest.fixture(scope="class") + def models(self): + return { + "saved_queries.yml": saved_queries_yml, + "schema.yml": schema_yml, + "fct_revenue.sql": fct_revenue_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "docs.md": saved_query_description, + } + + @pytest.fixture(scope="class") + def other_schema(self, unique_schema): + return unique_schema + "_other" + + @pytest.fixture(scope="class") + def profiles_config_update(self, dbt_profile_target, unique_schema, other_schema): + outputs = {"default": dbt_profile_target, "prod": deepcopy(dbt_profile_target)} + outputs["default"]["schema"] = unique_schema + outputs["prod"]["schema"] = other_schema + return {"test": {"outputs": outputs, "target": "default"}} + + def copy_state(self): + if not os.path.exists("state"): + os.makedirs("state") + shutil.copyfile("target/manifest.json", "state/manifest.json") + + def test_semantic_model_parsing(self, project): + runner = dbtTestRunner() + result = runner.invoke(["parse", "--no-partial-parse"]) + assert result.success + assert isinstance(result.result, Manifest) + manifest = result.result + assert len(manifest.saved_queries) == 1 + saved_query = manifest.saved_queries["saved_query.test.test_saved_query"] + assert saved_query.name == "test_saved_query" + assert len(saved_query.query_params.metrics) == 1 + assert len(saved_query.query_params.group_by) == 1 + assert len(saved_query.query_params.where.where_filters) == 3 + assert len(saved_query.depends_on.nodes) == 1 + assert saved_query.description == "My SavedQuery Description" + assert len(saved_query.exports) == 1 + assert saved_query.exports[0].name == "my_export" + assert saved_query.exports[0].config.alias == "my_export_alias" + assert saved_query.exports[0].config.export_as == ExportDestinationType.TABLE + assert saved_query.exports[0].config.schema_name == "my_export_schema_name" + assert saved_query.exports[0].unrendered_config == { + "alias": "my_export_alias", + "export_as": "table", + "schema": "my_export_schema_name", + } + + # Save state + self.copy_state() + # Nothing has changed, so no state:modified results + results = run_dbt(["ls", "--select", "state:modified", "--state", "./state"]) + assert len(results) == 0 + + # Change saved_query + write_file( + saved_query_with_cache_configs_defined_yml, + project.project_root, + "models", + "saved_queries.yml", + ) + # State modified finds changed saved_query + results = run_dbt(["ls", "--select", "state:modified", "--state", "./state"]) + assert len(results) == 1 + + # change exports + write_file( + saved_queries_with_defaults_yml, project.project_root, "models", "saved_queries.yml" + ) + # State modified finds changed saved_query + results = run_dbt(["ls", "--select", "state:modified", "--state", "./state"]) + assert len(results) == 1 + + def test_semantic_model_parsing_change_export(self, project, other_schema): + runner = dbtTestRunner() + result = runner.invoke(["parse", "--no-partial-parse"]) + assert result.success + assert isinstance(result.result, Manifest) + manifest = result.result + assert len(manifest.saved_queries) == 1 + saved_query = manifest.saved_queries["saved_query.test.test_saved_query"] + assert saved_query.name == "test_saved_query" + assert saved_query.exports[0].name == "my_export" + + # Save state + self.copy_state() + # Nothing has changed, so no state:modified results + results = run_dbt(["ls", "--select", "state:modified", "--state", "./state"]) + assert len(results) == 0 + + # Change export name + write_file( + saved_queries_yml.replace("name: my_export", "name: my_expor2"), + project.project_root, + "models", + "saved_queries.yml", + ) + # State modified finds changed saved_query + results = run_dbt(["ls", "--select", "state:modified", "--state", "./state"]) + assert len(results) == 1 + + # Change export schema + write_file( + saved_queries_yml.replace( + "schema: my_export_schema_name", "schema: my_export_schema_name2" + ), + project.project_root, + "models", + "saved_queries.yml", + ) + # State modified finds changed saved_query + results = run_dbt(["ls", "--select", "state:modified", "--state", "./state"]) + assert len(results) == 1 + + def test_semantic_model_parsing_with_default_schema(self, project, other_schema): + write_file( + saved_queries_with_defaults_yml, project.project_root, "models", "saved_queries.yml" + ) + runner = dbtTestRunner() + result = runner.invoke(["parse", "--no-partial-parse", "--target", "prod"]) + assert result.success + assert isinstance(result.result, Manifest) + manifest = result.result + assert len(manifest.saved_queries) == 1 + saved_query = manifest.saved_queries["saved_query.test.test_saved_query"] + assert saved_query.name == "test_saved_query" + assert len(saved_query.query_params.metrics) == 1 + assert len(saved_query.query_params.group_by) == 1 + assert len(saved_query.query_params.where.where_filters) == 3 + assert len(saved_query.depends_on.nodes) == 1 + assert saved_query.description == "My SavedQuery Description" + assert len(saved_query.exports) == 1 + assert saved_query.exports[0].name == "my_export" + assert saved_query.exports[0].config.alias == "my_export_alias" + assert saved_query.exports[0].config.export_as == ExportDestinationType.TABLE + assert saved_query.exports[0].config.schema_name == other_schema + assert saved_query.exports[0].unrendered_config == { + "alias": "my_export_alias", + "export_as": "table", + } + + # Save state + self.copy_state() + # Nothing has changed, so no state:modified results + results = run_dbt( + ["ls", "--select", "state:modified", "--state", "./state", "--target", "prod"] + ) + assert len(results) == 0 + + # There should also be no state:modified results when using the default schema + results = run_dbt(["ls", "--select", "state:modified", "--state", "./state"]) + assert len(results) == 0 + + def test_saved_query_error(self, project): + error_schema_yml = saved_queries_yml.replace("simple_metric", "metric_not_found") + write_file(error_schema_yml, project.project_root, "models", "saved_queries.yml") + events: List[BaseEvent] = [] + runner = dbtTestRunner(callbacks=[events.append]) + + result = runner.invoke(["parse", "--no-partial-parse"]) + assert not result.success + validation_errors = [e for e in events if e.info.name == "MainEncounteredError"] + assert validation_errors + + +class TestSavedQueryPartialParsing: + @pytest.fixture(scope="class") + def models(self): + return { + "saved_queries.yml": saved_queries_yml, + "saved_queries_with_diff_filters.yml": saved_queries_with_diff_filters_yml, + "schema.yml": schema_yml, + "fct_revenue.sql": fct_revenue_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "docs.md": saved_query_description, + } + + def test_saved_query_filter_types(self, project): + runner = dbtTestRunner() + result = runner.invoke(["parse"]) + assert result.success + + manifest = result.result + saved_query1 = manifest.saved_queries["saved_query.test.test_saved_query_where_list"] + saved_query2 = manifest.saved_queries["saved_query.test.test_saved_query_where_str"] + + # List filter + assert len(saved_query1.query_params.where.where_filters) == 2 + assert { + where_filter.where_sql_template + for where_filter in saved_query1.query_params.where.where_filters + } == { + "{{ TimeDimension('id__ds', 'DAY') }} <= now()", + "{{ TimeDimension('id__ds', 'DAY') }} >= '2023-01-01'", + } + # String filter + assert len(saved_query2.query_params.where.where_filters) == 1 + assert ( + saved_query2.query_params.where.where_filters[0].where_sql_template + == "{{ TimeDimension('id__ds', 'DAY') }} <= now()" + ) + + def test_saved_query_metrics_changed(self, project): + # First, use the default saved_queries.yml to define our saved_queries, and + # run the dbt parse command + runner = dbtTestRunner() + result = runner.invoke(["parse"]) + assert result.success + + # Next, modify the default saved_queries.yml to change a detail of the saved + # query. + modified_saved_queries_yml = saved_queries_yml.replace("simple_metric", "txn_revenue") + write_file(modified_saved_queries_yml, project.project_root, "models", "saved_queries.yml") + + # Now, run the dbt parse command again. + result = runner.invoke(["parse"]) + assert result.success + + # Finally, verify that the manifest reflects the partially parsed change + manifest = result.result + saved_query = manifest.saved_queries["saved_query.test.test_saved_query"] + assert len(saved_query.metrics) == 1 + assert saved_query.metrics[0] == "txn_revenue" + + def test_saved_query_deleted_partial_parsing(self, project): + # First, use the default saved_queries.yml to define our saved_query, and + # run the dbt parse command + runner = dbtTestRunner() + result = runner.invoke(["parse"]) + assert result.success + assert "saved_query.test.test_saved_query" in result.result.saved_queries + + # Next, modify the default saved_queries.yml to remove the saved query. + write_file("", project.project_root, "models", "saved_queries.yml") + + # Now, run the dbt parse command again. + result = runner.invoke(["parse"]) + assert result.success + + # Finally, verify that the manifest reflects the deletion + assert "saved_query.test.test_saved_query" not in result.result.saved_queries diff --git a/tests/functional/schema/test_custom_schema.py b/tests/functional/schema/test_custom_schema.py index 7262a79cce9..5a9969e4284 100644 --- a/tests/functional/schema/test_custom_schema.py +++ b/tests/functional/schema/test_custom_schema.py @@ -1,5 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal + +from dbt.tests.util import check_relations_equal, run_dbt from tests.functional.schema.fixtures.macros import ( _CUSTOM_MACRO, _CUSTOM_MACRO_MULTI_SCHEMA, diff --git a/tests/functional/schema_tests/data_test_config.py b/tests/functional/schema_tests/data_test_config.py new file mode 100644 index 00000000000..377f14aac04 --- /dev/null +++ b/tests/functional/schema_tests/data_test_config.py @@ -0,0 +1,115 @@ +import re + +import pytest + +from dbt.exceptions import CompilationError +from dbt.tests.util import get_manifest, run_dbt +from tests.functional.schema_tests.fixtures import ( + custom_config_yml, + mixed_config_yml, + same_key_error_yml, + seed_csv, + table_sql, +) + + +class BaseDataTestsConfig: + @pytest.fixture(scope="class") + def seeds(self): + return {"seed.csv": seed_csv} + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + } + + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project): + run_dbt(["seed"]) + + +class TestCustomDataTestConfig(BaseDataTestsConfig): + @pytest.fixture(scope="class") + def models(self): + return {"table.sql": table_sql, "custom_config.yml": custom_config_yml} + + def test_custom_config(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + + # Pattern to match the test_id without the specific suffix + pattern = re.compile(r"test\.test\.accepted_values_table_color__blue__red\.\d+") + + # Find the test_id dynamically + test_id = None + for node_id in manifest.nodes: + if pattern.match(node_id): + test_id = node_id + break + + # Ensure the test_id was found + assert ( + test_id is not None + ), "Test ID matching the pattern was not found in the manifest nodes" + + # Proceed with the assertions + test_node = manifest.nodes[test_id] + assert "custom_config_key" in test_node.config + assert test_node.config["custom_config_key"] == "some_value" + + +class TestMixedDataTestConfig(BaseDataTestsConfig): + @pytest.fixture(scope="class") + def models(self): + return {"table.sql": table_sql, "mixed_config.yml": mixed_config_yml} + + def test_mixed_config(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + + # Pattern to match the test_id without the specific suffix + pattern = re.compile(r"test\.test\.accepted_values_table_color__blue__red\.\d+") + + # Find the test_id dynamically + test_id = None + for node_id in manifest.nodes: + if pattern.match(node_id): + test_id = node_id + break + + # Ensure the test_id was found + assert ( + test_id is not None + ), "Test ID matching the pattern was not found in the manifest nodes" + + # Proceed with the assertions + test_node = manifest.nodes[test_id] + assert "custom_config_key" in test_node.config + assert test_node.config["custom_config_key"] == "some_value" + assert "severity" in test_node.config + assert test_node.config["severity"] == "warn" + + +class TestSameKeyErrorDataTestConfig: + @pytest.fixture(scope="class") + def models(self): + return {"table.sql": table_sql, "same_key_error.yml": same_key_error_yml} + + def test_same_key_error(self, project): + """ + Test that verifies dbt raises a CompilationError when the test configuration + contains the same key at the top level and inside the config dictionary. + """ + # Run dbt and expect a CompilationError due to the invalid configuration + with pytest.raises(CompilationError) as exc_info: + run_dbt(["parse"]) + + # Extract the exception message + exception_message = str(exc_info.value) + + # Assert that the error message contains the expected text + assert "Test cannot have the same key at the top-level and in config" in exception_message + + # Assert that the error message contains the context of the error + assert "models/same_key_error.yml" in exception_message diff --git a/tests/functional/schema_tests/fixtures.py b/tests/functional/schema_tests/fixtures.py index 7e0dfbaca58..bf16148e0c7 100644 --- a/tests/functional/schema_tests/fixtures.py +++ b/tests/functional/schema_tests/fixtures.py @@ -10,7 +10,7 @@ models: - name: model_a - tests: + data_tests: - self_referential """ @@ -140,13 +140,13 @@ columns: - name: id quote: true - tests: + data_tests: - unique - name: uppercase columns: - name: id quote: true - tests: + data_tests: - unique """ @@ -204,11 +204,11 @@ models: - name: model_a - tests: + data_tests: - type_one - type_two - name: model_c - tests: + data_tests: - call_pkg_macro - test_utils.dispatch @@ -366,11 +366,11 @@ models: - name: model_a - tests: + data_tests: - type_one - type_two - name: model_c - tests: + data_tests: - call_pkg_macro - local_utils.dispatch - my_datediff @@ -398,12 +398,12 @@ - name: base columns: - name: extension_id - tests: + data_tests: - not_null - name: base_extension columns: - name: id - tests: + data_tests: - not_null """ @@ -423,7 +423,7 @@ - name: model_a columns: - name: id - tests: + data_tests: - not_null: config: where: "1=1" @@ -444,7 +444,7 @@ - name: model_a columns: - name: id - tests: + data_tests: - not_null: config: where: "id = (select id from {{ ref('model_a') }} limit 1)" @@ -462,7 +462,7 @@ - name: model_a columns: - name: id - tests: + data_tests: - not_null: name: not_null_where_1_equals_1 config: @@ -484,7 +484,7 @@ - name: model_a columns: - name: id - tests: + data_tests: - name: not_null_where_1_equals_1 test_name: not_null config: @@ -525,7 +525,7 @@ columns: - name: Id quote: true - tests: + data_tests: - unique - not_null @@ -545,7 +545,7 @@ models: - name: model - tests: + data_tests: - equivalent: value: "{{ var('myvar', 'baz') }}-bar" @@ -565,7 +565,7 @@ columns: - name: id description: "The number of responses for this favorite color - purple will be null" - tests: + data_tests: - not_null: error_if: '>1' warn_if: '>1' @@ -575,7 +575,7 @@ columns: - name: id description: "The number of responses for this favorite color - purple will be null" - tests: + data_tests: - not_null: error_if: '>1' @@ -584,7 +584,7 @@ columns: - name: id description: "The number of responses for this favorite color - purple will be null" - tests: + data_tests: - not_null: error_if: '>1' @@ -616,32 +616,32 @@ columns: - name: id description: "The ID" - tests: + data_tests: - not_null - unique tags: - table_id - name: first_name description: "The user's first name" - tests: + data_tests: - not_null tags: - table_first_name - name: ip_address description: "The user's IP address" - tests: + data_tests: - not_null - name: updated_at description: "The update time of the user" - tests: + data_tests: - not_null - name: email description: "The user's email address" - tests: + data_tests: - unique - name: favorite_color description: "The user's favorite color" - tests: + data_tests: - accepted_values: { values: ['blue', 'green'], quote: true, @@ -651,7 +651,7 @@ - table_favorite_color - name: fav_number description: "The user's favorite number" - tests: + data_tests: - accepted_values: values: [3.14159265] quote: false @@ -664,7 +664,7 @@ columns: - name: favorite_color_copy description: "The favorite color" - tests: + data_tests: - not_null - unique - accepted_values: { values: ['blue', 'green'] } @@ -673,7 +673,7 @@ - table_favorite_color - name: count description: "The number of responses for this favorite color" - tests: + data_tests: - not_null # all of these constraints will fail @@ -682,14 +682,14 @@ columns: - name: id description: "The user ID" - tests: + data_tests: - not_null - unique tags: - xfail - name: favorite_color description: "The user's favorite color" - tests: + data_tests: - accepted_values: { values: ['blue', 'green'] } tags: - xfail @@ -700,7 +700,7 @@ columns: - name: favorite_color description: "The favorite color" - tests: + data_tests: - accepted_values: { values: ['red'] } - relationships: { field: favorite_color, to: ref('table_copy') } tags: @@ -712,7 +712,7 @@ columns: - name: favorite_color description: "The favorite color" - tests: + data_tests: - accepted_values: { values: ['red'] } - relationships: { field: favorite_color, to: ref('table_copy') } @@ -722,7 +722,7 @@ columns: - name: id description: "The user ID" - tests: + data_tests: - relationships: { field: id, to: ref('table_failure_copy') } tags: - xfail @@ -818,11 +818,11 @@ columns: - name: id description: "The ID" - tests: + data_tests: - not_null - unique - name: favorite_color - tests: + data_tests: # this is missing a "-" and is malformed accepted_values: { values: ['blue', 'green'] } @@ -832,14 +832,14 @@ columns: - name: favorite_color description: "The favorite color" - tests: + data_tests: - not_null - unique - accepted_values: { values: ['blue', 'green'] } - relationships: { field: favorite_color, to: ref('table_copy') } - name: count description: "The number of responses for this favorite color" - tests: + data_tests: - not_null """ @@ -878,7 +878,7 @@ columns: - name: id description: "The number of responses for this favorite color - purple will be null" - tests: + data_tests: - not_null @@ -898,7 +898,7 @@ - name: table_copy description: "A copy of the table" # passes - tests: + data_tests: - where - error_if - warn_if @@ -906,21 +906,21 @@ - fail_calc columns: - name: id - tests: + data_tests: # relationships with where - relationships: to: ref('table_copy') # itself field: id where: 1=1 - name: table_copy_another_one - tests: + data_tests: - where: # test override + weird quoting config: where: "\\"favorite_color\\" = 'red'" - name: "table.copy.with.dots" description: "A copy of the table with a gross name" # passes, see https://github.com/dbt-labs/dbt-core/issues/3857 - tests: + data_tests: - where """ @@ -952,7 +952,7 @@ models: - name: model - tests: + data_tests: - equivalent: value: "{{ var('myvar', 'baz') }}-bar" @@ -971,18 +971,18 @@ description: "A copy of the table" columns: - name: email - tests: + data_tests: - not_null - name: id description: "The ID" - tests: + data_tests: - unique - name: favorite_color - tests: + data_tests: - every_value_is_blue - rejected_values: { values: ['orange', 'purple'] } # passes - tests: + data_tests: - local_dep.equality: { compare_model: ref('table_copy') } """ @@ -1010,7 +1010,7 @@ description: "The favorite color" - name: count description: "The number of responses for this favorite color - purple will be null" - tests: + data_tests: - not_null: error_if: '>1' warn_if: '>1' @@ -1022,7 +1022,7 @@ description: "The favorite color" - name: count description: "The number of responses for this favorite color - purple will be null" - tests: + data_tests: - not_null: error_if: '>1' @@ -1033,7 +1033,7 @@ description: "The favorite color" - name: count description: "The number of responses for this favorite color - purple will be null" - tests: + data_tests: - not_null: error_if: '>1' @@ -1152,7 +1152,7 @@ - name: ephemeral columns: - name: id - tests: + data_tests: - unique """ @@ -1173,14 +1173,14 @@ columns: - name: Id quote: true - tests: + data_tests: - unique - not_null - name: model_again quote_columns: true columns: - name: Id - tests: + data_tests: - unique - not_null - name: model_noquote @@ -1188,7 +1188,7 @@ columns: - name: Id quote: false - tests: + data_tests: - unique - not_null @@ -1205,7 +1205,7 @@ columns: - name: Id quote: true - tests: + data_tests: - unique - name: my_source_2 schema: "{{ target.schema }}" @@ -1218,13 +1218,13 @@ column: true columns: - name: Id - tests: + data_tests: - unique # this should result in column quoting = false - name: model_noquote columns: - name: Id - tests: + data_tests: - unique @@ -1270,6 +1270,66 @@ version: 2 models: - name: my_model - tests: + data_tests: - my_custom_test """ + +custom_config_yml = """ +version: 2 +models: + - name: table + columns: + - name: color + data_tests: + - accepted_values: + values: ['blue', 'red'] + config: + custom_config_key: some_value +""" + +mixed_config_yml = """ +version: 2 +models: + - name: table + columns: + - name: color + data_tests: + - accepted_values: + values: ['blue', 'red'] + severity: warn + config: + custom_config_key: some_value +""" + +same_key_error_yml = """ +version: 2 +models: + - name: table + columns: + - name: color + data_tests: + - accepted_values: + values: ['blue', 'red'] + severity: warn + config: + severity: error +""" + +seed_csv = """ +id,color,value +1,blue,10 +2,red,20 +3,green,30 +4,yellow,40 +5,blue,50 +6,red,60 +7,blue,70 +8,green,80 +9,yellow,90 +10,blue,100 +""" + +table_sql = """ +-- content of the table.sql +select * from {{ ref('seed') }} +""" diff --git a/tests/functional/schema_tests/test_schema_v2_tests.py b/tests/functional/schema_tests/test_schema_v2_tests.py index af85bcc290c..ea33e62bce3 100644 --- a/tests/functional/schema_tests/test_schema_v2_tests.py +++ b/tests/functional/schema_tests/test_schema_v2_tests.py @@ -1,102 +1,103 @@ -import pytest import os import re -from dbt.tests.util import run_dbt, write_file +import pytest + +from dbt.contracts.results import TestStatus +from dbt.exceptions import CompilationError, DuplicateResourceNameError, ParsingError from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import run_dbt, write_file from tests.fixtures.dbt_integration_project import dbt_integration_project # noqa: F401 from tests.functional.schema_tests.fixtures import ( - wrong_specification_block__schema_yml, - test_context_where_subq_models__schema_yml, - test_context_where_subq_models__model_a_sql, - test_utils__dbt_project_yml, - test_utils__macros__current_timestamp_sql, - test_utils__macros__custom_test_sql, - local_dependency__dbt_project_yml, - local_dependency__macros__equality_sql, - case_sensitive_models__schema_yml, + all_quotes_schema__schema_yml, + alt_local_utils__macros__type_timestamp_sql, case_sensitive_models__lowercase_sql, - test_context_macros__my_test_sql, - test_context_macros__test_my_datediff_sql, - test_context_macros__custom_schema_tests_sql, - test_context_models_namespaced__schema_yml, - test_context_models_namespaced__model_c_sql, - test_context_models_namespaced__model_b_sql, - test_context_models_namespaced__model_a_sql, - macros_v2__override_get_test_macros_fail__get_test_sql_sql, - macros_v2__macros__tests_sql, - macros_v2__custom_configs__test_sql, - macros_v2__override_get_test_macros__get_test_sql_sql, - test_context_macros_namespaced__my_test_sql, - test_context_macros_namespaced__custom_schema_tests_sql, - seeds__some_seed_csv, - test_context_models__schema_yml, - test_context_models__model_c_sql, - test_context_models__model_b_sql, - test_context_models__model_a_sql, - name_collision__schema_yml, - name_collision__base_sql, - name_collision__base_extension_sql, - dupe_generic_tests_collide__schema_yml, - dupe_generic_tests_collide__model_a, - custom_generic_test_config_custom_macro__schema_yml, + case_sensitive_models__schema_yml, + case_sensitive_models__uppercase_SQL, custom_generic_test_config_custom_macro__model_a, - custom_generic_test_names__schema_yml, + custom_generic_test_config_custom_macro__schema_yml, custom_generic_test_names__model_a, - custom_generic_test_names_alt_format__schema_yml, + custom_generic_test_names__schema_yml, custom_generic_test_names_alt_format__model_a, - test_context_where_subq_macros__custom_generic_test_sql, - invalid_schema_models__schema_yml, + custom_generic_test_names_alt_format__schema_yml, + dupe_generic_tests_collide__model_a, + dupe_generic_tests_collide__schema_yml, + ephemeral__ephemeral_sql, + ephemeral__schema_yml, invalid_schema_models__model_sql, + invalid_schema_models__schema_yml, + local_dependency__dbt_project_yml, + local_dependency__macros__equality_sql, + local_utils__dbt_project_yml, + local_utils__macros__current_timestamp_sql, + local_utils__macros__custom_test_sql, + local_utils__macros__datediff_sql, + macro_resolution_order_macros__my_custom_test_sql, + macro_resolution_order_models__config_yml, + macro_resolution_order_models__my_model_sql, + macros_v2__custom_configs__test_sql, + macros_v2__macros__tests_sql, + macros_v2__override_get_test_macros__get_test_sql_sql, + macros_v2__override_get_test_macros_fail__get_test_sql_sql, + models_v2__custom__schema_yml, + models_v2__custom__table_copy_sql, + models_v2__custom_configs__schema_yml, + models_v2__custom_configs__table_copy_another_one_sql, + models_v2__custom_configs__table_copy_sql, + models_v2__custom_configs__table_copy_with_dots_sql, + models_v2__limit_null__schema_yml, + models_v2__limit_null__table_failure_limit_null_sql, + models_v2__limit_null__table_limit_null_sql, + models_v2__limit_null__table_warning_limit_null_sql, + models_v2__malformed__schema_yml, + models_v2__malformed__table_copy_sql, + models_v2__malformed__table_summary_sql, models_v2__models__schema_yml, - models_v2__models__table_summary_sql, - models_v2__models__table_failure_summary_sql, + models_v2__models__table_copy_sql, models_v2__models__table_disabled_sql, - models_v2__models__table_failure_null_relation_sql, models_v2__models__table_failure_copy_sql, - models_v2__models__table_copy_sql, - models_v2__limit_null__schema_yml, - models_v2__limit_null__table_warning_limit_null_sql, - models_v2__limit_null__table_limit_null_sql, - models_v2__limit_null__table_failure_limit_null_sql, - models_v2__override_get_test_models__schema_yml, - models_v2__override_get_test_models__my_model_warning_sql, - models_v2__override_get_test_models__my_model_pass_sql, + models_v2__models__table_failure_null_relation_sql, + models_v2__models__table_failure_summary_sql, + models_v2__models__table_summary_sql, models_v2__override_get_test_models__my_model_failure_sql, - models_v2__override_get_test_models_fail__schema_yml, + models_v2__override_get_test_models__my_model_pass_sql, + models_v2__override_get_test_models__my_model_warning_sql, + models_v2__override_get_test_models__schema_yml, models_v2__override_get_test_models_fail__my_model_sql, - models_v2__malformed__schema_yml, - models_v2__malformed__table_summary_sql, - models_v2__malformed__table_copy_sql, - models_v2__custom_configs__schema_yml, - models_v2__custom_configs__table_copy_another_one_sql, - models_v2__custom_configs__table_copy_sql, - models_v2__custom_configs__table_copy_with_dots_sql, - models_v2__custom__schema_yml, - models_v2__custom__table_copy_sql, - models_v2__render_test_cli_arg_models__schema_yml, + models_v2__override_get_test_models_fail__schema_yml, models_v2__render_test_cli_arg_models__model_sql, - models_v2__render_test_configured_arg_models__schema_yml, + models_v2__render_test_cli_arg_models__schema_yml, models_v2__render_test_configured_arg_models__model_sql, - local_utils__dbt_project_yml, - local_utils__macros__datediff_sql, - local_utils__macros__current_timestamp_sql, - local_utils__macros__custom_test_sql, - ephemeral__schema_yml, - ephemeral__ephemeral_sql, - quote_required_models__schema_yml, + models_v2__render_test_configured_arg_models__schema_yml, + name_collision__base_extension_sql, + name_collision__base_sql, + name_collision__schema_yml, quote_required_models__model_again_sql, quote_required_models__model_noquote_sql, quote_required_models__model_sql, - case_sensitive_models__uppercase_SQL, - macro_resolution_order_macros__my_custom_test_sql, - macro_resolution_order_models__config_yml, - macro_resolution_order_models__my_model_sql, - alt_local_utils__macros__type_timestamp_sql, - all_quotes_schema__schema_yml, + quote_required_models__schema_yml, + seeds__some_seed_csv, + test_context_macros__custom_schema_tests_sql, + test_context_macros__my_test_sql, + test_context_macros__test_my_datediff_sql, + test_context_macros_namespaced__custom_schema_tests_sql, + test_context_macros_namespaced__my_test_sql, + test_context_models__model_a_sql, + test_context_models__model_b_sql, + test_context_models__model_c_sql, + test_context_models__schema_yml, + test_context_models_namespaced__model_a_sql, + test_context_models_namespaced__model_b_sql, + test_context_models_namespaced__model_c_sql, + test_context_models_namespaced__schema_yml, + test_context_where_subq_macros__custom_generic_test_sql, + test_context_where_subq_models__model_a_sql, + test_context_where_subq_models__schema_yml, + test_utils__dbt_project_yml, + test_utils__macros__current_timestamp_sql, + test_utils__macros__custom_test_sql, + wrong_specification_block__schema_yml, ) -from dbt.exceptions import ParsingError, CompilationError, DuplicateResourceNameError -from dbt.contracts.results import TestStatus class TestSchemaTests: @@ -906,7 +907,7 @@ def test_generic_test_collision( """These tests collide, since only the configs differ""" with pytest.raises(DuplicateResourceNameError) as exc: run_dbt() - assert "dbt found two tests with the name" in str(exc.value) + assert "dbt found two data_tests with the name" in str(exc.value) class TestGenericTestsConfigCustomMacros: diff --git a/tests/functional/selected_resources/test_selected_resources.py b/tests/functional/selected_resources/test_selected_resources.py index 550db700af7..25025c56286 100644 --- a/tests/functional/selected_resources/test_selected_resources.py +++ b/tests/functional/selected_resources/test_selected_resources.py @@ -1,10 +1,11 @@ import pytest + from dbt.tests.util import run_dbt from tests.functional.selected_resources.fixtures import ( - on_run_start_macro_assert_selected_models_expected_list, my_model1, my_model2, my_snapshot, + on_run_start_macro_assert_selected_models_expected_list, ) diff --git a/tests/functional/selectors/test_default_selectors.py b/tests/functional/selectors/test_default_selectors.py index 3be42bea132..fac60af8545 100644 --- a/tests/functional/selectors/test_default_selectors.py +++ b/tests/functional/selectors/test_default_selectors.py @@ -1,4 +1,5 @@ import pytest + from dbt.tests.util import run_dbt models__schema_yml = """ diff --git a/tests/functional/semantic_models/fixtures.py b/tests/functional/semantic_models/fixtures.py index 163a6851dbf..7788067e91d 100644 --- a/tests/functional/semantic_models/fixtures.py +++ b/tests/functional/semantic_models/fixtures.py @@ -1,5 +1,5 @@ -metricflow_time_spine_sql = """ -SELECT to_date('02/20/2023, 'mm/dd/yyyy') as date_day +simple_metricflow_time_spine_sql = """ +SELECT to_date('02/20/2023', 'mm/dd/yyyy') as date_day """ models_people_sql = """ @@ -10,6 +10,18 @@ select 3 as id, 'Callum' as first_name, 'McCann' as last_name, 'emerald' as favorite_color, true as loves_dbt, 0 as tenure, current_timestamp as created_at """ +groups_yml = """ +version: 2 + +groups: + - name: some_group + owner: + email: me@gmail.com + - name: some_other_group + owner: + email: me@gmail.com +""" + models_people_metrics_yml = """ version: 2 @@ -24,12 +36,140 @@ my_meta: 'testing' """ +disabled_models_people_metrics_yml = """ +version: 2 + +metrics: + - name: number_of_people + config: + enabled: false + group: some_group + label: "Number of people" + description: Total count of people + type: simple + type_params: + measure: people + meta: + my_meta: 'testing' +""" + semantic_model_people_yml = """ version: 2 semantic_models: - name: semantic_people + label: "Semantic People" + model: ref('people') + dimensions: + - name: favorite_color + label: "Favorite Color" + type: categorical + - name: created_at + label: "Created At" + type: TIME + type_params: + time_granularity: day + measures: + - name: years_tenure + label: "Years Tenure" + agg: SUM + expr: tenure + - name: people + label: "People" + agg: count + expr: id + entities: + - name: id + label: "Primary ID" + type: primary + defaults: + agg_time_dimension: created_at +""" + +semantic_model_people_diff_name_yml = """ +version: 2 + +semantic_models: + - name: semantic_people_diff_name + label: "Semantic People" + model: ref('people') + dimensions: + - name: favorite_color + label: "Favorite Color" + type: categorical + - name: created_at + label: "Created At" + type: TIME + type_params: + time_granularity: day + measures: + - name: years_tenure + label: "Years Tenure" + agg: SUM + expr: tenure + - name: people + label: "People" + agg: count + expr: id + entities: + - name: id + label: "Primary ID" + type: primary + defaults: + agg_time_dimension: created_at +""" + +semantic_model_descriptions = """ +{% docs semantic_model_description %} foo {% enddocs %} +{% docs dimension_description %} bar {% enddocs %} +{% docs measure_description %} baz {% enddocs %} +{% docs entity_description %} qux {% enddocs %} +""" + +semantic_model_people_yml_with_docs = """ +version: 2 + +semantic_models: + - name: semantic_people + model: ref('people') + description: "{{ doc('semantic_model_description') }}" + dimensions: + - name: favorite_color + type: categorical + description: "{{ doc('dimension_description') }}" + - name: created_at + type: TIME + type_params: + time_granularity: day + measures: + - name: years_tenure + agg: SUM + expr: tenure + description: "{{ doc('measure_description') }}" + - name: people + agg: count + expr: id + entities: + - name: id + description: "{{ doc('entity_description') }}" + type: primary + defaults: + agg_time_dimension: created_at +""" + +enabled_semantic_model_people_yml = """ +version: 2 + +semantic_models: + - name: semantic_people + label: "Semantic People" model: ref('people') + config: + enabled: true + group: some_group + meta: + my_meta: 'testing' + my_other_meta: 'testing more' dimensions: - name: favorite_color type: categorical @@ -50,3 +190,216 @@ defaults: agg_time_dimension: created_at """ + +disabled_semantic_model_people_yml = """ +version: 2 + +semantic_models: + - name: semantic_people + label: "Semantic People" + model: ref('people') + config: + enabled: false + dimensions: + - name: favorite_color + type: categorical + - name: created_at + type: TIME + type_params: + time_granularity: day + measures: + - name: years_tenure + agg: SUM + expr: tenure + - name: people + agg: count + expr: id + entities: + - name: id + type: primary + defaults: + agg_time_dimension: created_at +""" + + +schema_yml = """models: + - name: fct_revenue + description: This is the model fct_revenue. It should be able to use doc blocks + +semantic_models: + - name: revenue + description: This is the revenue semantic model. It should be able to use doc blocks + model: ref('fct_revenue') + + defaults: + agg_time_dimension: ds + + measures: + - name: txn_revenue + expr: revenue + agg: sum + agg_time_dimension: ds + create_metric: true + - name: txn_revenue_with_label + label: "Transaction Revenue with label" + expr: revenue + agg: sum + agg_time_dimension: ds + create_metric: true + - name: sum_of_things + expr: 2 + agg: sum + agg_time_dimension: ds + - name: has_revenue + expr: true + agg: sum_boolean + agg_time_dimension: ds + - name: discrete_order_value_p99 + expr: order_total + agg: percentile + agg_time_dimension: ds + agg_params: + percentile: 0.99 + use_discrete_percentile: True + use_approximate_percentile: False + - name: test_agg_params_optional_are_empty + expr: order_total + agg: percentile + agg_time_dimension: ds + agg_params: + percentile: 0.99 + - name: test_non_additive + expr: txn_revenue + agg: sum + non_additive_dimension: + name: ds + window_choice: max + + dimensions: + - name: ds + type: time + expr: created_at + type_params: + time_granularity: day + + entities: + - name: user + type: foreign + expr: user_id + - name: id + type: primary + +metrics: + - name: simple_metric + label: Simple Metric + type: simple + type_params: + measure: sum_of_things +""" + +schema_without_semantic_model_yml = """models: + - name: fct_revenue + description: This is the model fct_revenue. It should be able to use doc blocks +""" + +fct_revenue_sql = """select + 1 as id, + 10 as user_id, + 1000 as revenue, + current_timestamp as created_at""" + +metricflow_time_spine_sql = """ +with days as ( + {{dbt_utils.date_spine('day' + , "to_date('01/01/2000','mm/dd/yyyy')" + , "to_date('01/01/2027','mm/dd/yyyy')" + ) + }} +), + +final as ( + select cast(date_day as date) as date_day + from days +) + +select * +from final +""" + +multi_sm_schema_yml = """ +models: + - name: fct_revenue + description: This is the model fct_revenue. + +semantic_models: + - name: revenue + description: This is the first semantic model. + model: ref('fct_revenue') + + defaults: + agg_time_dimension: ds + + measures: + - name: txn_revenue + expr: revenue + agg: sum + agg_time_dimension: ds + create_metric: true + - name: sum_of_things + expr: 2 + agg: sum + agg_time_dimension: ds + + dimensions: + - name: ds + type: time + expr: created_at + type_params: + time_granularity: day + + entities: + - name: user + type: foreign + expr: user_id + - name: id + type: primary + + - name: alt_revenue + description: This is the second revenue semantic model. + model: ref('fct_revenue') + + defaults: + agg_time_dimension: ads + + measures: + - name: alt_txn_revenue + expr: revenue + agg: sum + agg_time_dimension: ads + create_metric: true + - name: alt_sum_of_things + expr: 2 + agg: sum + agg_time_dimension: ads + + dimensions: + - name: ads + type: time + expr: created_at + type_params: + time_granularity: day + + entities: + - name: user + type: foreign + expr: user_id + - name: id + type: primary + +metrics: + - name: simple_metric + label: Simple Metric + type: simple + type_params: + measure: sum_of_things +""" diff --git a/tests/functional/semantic_models/test_semantic_model_configs.py b/tests/functional/semantic_models/test_semantic_model_configs.py new file mode 100644 index 00000000000..cac38e92bd3 --- /dev/null +++ b/tests/functional/semantic_models/test_semantic_model_configs.py @@ -0,0 +1,227 @@ +import pytest + +from dbt.artifacts.resources import SemanticModelConfig +from dbt.exceptions import ParsingError +from dbt.tests.util import get_manifest, run_dbt, update_config_file +from tests.functional.semantic_models.fixtures import ( + disabled_models_people_metrics_yml, + disabled_semantic_model_people_yml, + enabled_semantic_model_people_yml, + groups_yml, + metricflow_time_spine_sql, + models_people_metrics_yml, + models_people_sql, + semantic_model_people_yml, +) + + +# Test disabled config at semantic_models level in yaml file +class TestConfigYamlLevel: + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_models.yml": disabled_semantic_model_people_yml, + "people_metrics.yml": disabled_models_people_metrics_yml, + "groups.yml": groups_yml, + } + + def test_yaml_level(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "semantic_model.test.semantic_people" not in manifest.semantic_models + assert "semantic_model.test.semantic_people" in manifest.disabled + + assert "group.test.some_group" in manifest.groups + assert "semantic_model.test.semantic_people" not in manifest.groups + + +# Test disabled config at semantic_models level with a still enabled metric +class TestDisabledConfigYamlLevelEnabledMetric: + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_models.yml": disabled_semantic_model_people_yml, + "people_metrics.yml": models_people_metrics_yml, + "groups.yml": groups_yml, + } + + def test_yaml_level(self, project): + with pytest.raises( + ParsingError, + match="The measure `people` is referenced on disabled semantic model `semantic_people`.", + ): + run_dbt(["parse"]) + + +# Test disabling semantic model config but not metric config in dbt_project.yml +class TestMismatchesConfigProjectLevel: + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_models.yml": semantic_model_people_yml, + "people_metrics.yml": models_people_metrics_yml, + "groups.yml": groups_yml, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "semantic-models": { + "test": { + "enabled": True, + } + } + } + + def test_project_level(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "semantic_model.test.semantic_people" in manifest.semantic_models + assert "group.test.some_group" in manifest.groups + assert manifest.semantic_models["semantic_model.test.semantic_people"].group is None + + new_enabled_config = { + "semantic-models": { + "test": { + "enabled": False, + } + } + } + update_config_file(new_enabled_config, project.project_root, "dbt_project.yml") + with pytest.raises( + ParsingError, + match="The measure `people` is referenced on disabled semantic model `semantic_people`.", + ): + run_dbt(["parse"]) + + +# Test disabling semantic model and metric configs in dbt_project.yml +class TestConfigProjectLevel: + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_models.yml": semantic_model_people_yml, + "people_metrics.yml": models_people_metrics_yml, + "groups.yml": groups_yml, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "semantic-models": { + "test": { + "enabled": True, + } + }, + "metrics": { + "test": { + "enabled": True, + } + }, + } + + def test_project_level(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "semantic_model.test.semantic_people" in manifest.semantic_models + assert "group.test.some_group" in manifest.groups + assert "group.test.some_other_group" in manifest.groups + assert manifest.semantic_models["semantic_model.test.semantic_people"].group is None + + new_group_config = { + "semantic-models": { + "test": { + "group": "some_other_group", + } + }, + } + update_config_file(new_group_config, project.project_root, "dbt_project.yml") + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + + assert "semantic_model.test.semantic_people" in manifest.semantic_models + assert "group.test.some_other_group" in manifest.groups + assert "group.test.some_group" in manifest.groups + assert ( + manifest.semantic_models["semantic_model.test.semantic_people"].group + == "some_other_group" + ) + + new_enabled_config = { + "semantic-models": { + "test": { + "enabled": False, + } + }, + "metrics": { + "test": { + "enabled": False, + } + }, + } + update_config_file(new_enabled_config, project.project_root, "dbt_project.yml") + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + + assert "semantic_model.test.semantic_people" not in manifest.semantic_models + assert "semantic_model.test.semantic_people" in manifest.disabled + + assert "group.test.some_group" in manifest.groups + assert "semantic_model.test.semantic_people" not in manifest.groups + + +# Test inheritence - set configs at project and semantic_model level - expect semantic_model level to win +class TestConfigsInheritence: + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_models.yml": enabled_semantic_model_people_yml, + "people_metrics.yml": models_people_metrics_yml, + "groups.yml": groups_yml, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"semantic-models": {"enabled": False}} + + def test_project_plus_yaml_level(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "semantic_model.test.semantic_people" in manifest.semantic_models + config_test_table = manifest.semantic_models.get( + "semantic_model.test.semantic_people" + ).config + + assert isinstance(config_test_table, SemanticModelConfig) + + +# test setting meta attributes in semantic model config +class TestMetaConfig: + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_models.yml": enabled_semantic_model_people_yml, + "people_metrics.yml": models_people_metrics_yml, + "groups.yml": groups_yml, + } + + def test_meta_config(self, project): + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + sm_id = "semantic_model.test.semantic_people" + assert sm_id in manifest.semantic_models + sm_node = manifest.semantic_models[sm_id] + meta_expected = {"my_meta": "testing", "my_other_meta": "testing more"} + assert sm_node.config.meta == meta_expected diff --git a/tests/functional/semantic_models/test_semantic_model_parsing.py b/tests/functional/semantic_models/test_semantic_model_parsing.py index 6b0fe643691..607caafa697 100644 --- a/tests/functional/semantic_models/test_semantic_model_parsing.py +++ b/tests/functional/semantic_models/test_semantic_model_parsing.py @@ -2,106 +2,19 @@ import pytest -from dbt_semantic_interfaces.type_enums.time_granularity import TimeGranularity - -from dbt.cli.main import dbtRunner from dbt.contracts.graph.manifest import Manifest -from dbt.events.base_types import BaseEvent -from dbt.tests.util import write_file - - -schema_yml = """models: - - name: fct_revenue - description: This is the model fct_revenue. It should be able to use doc blocks - -semantic_models: - - name: revenue - description: This is the revenue semantic model. It should be able to use doc blocks - model: ref('fct_revenue') - - defaults: - agg_time_dimension: ds - - measures: - - name: txn_revenue - expr: revenue - agg: sum - agg_time_dimension: ds - - name: sum_of_things - expr: 2 - agg: sum - agg_time_dimension: ds - - name: has_revenue - expr: true - agg: sum_boolean - agg_time_dimension: ds - - name: discrete_order_value_p99 - expr: order_total - agg: percentile - agg_time_dimension: ds - agg_params: - percentile: 0.99 - use_discrete_percentile: True - use_approximate_percentile: False - - name: test_agg_params_optional_are_empty - expr: order_total - agg: percentile - agg_time_dimension: ds - agg_params: - percentile: 0.99 - - dimensions: - - name: ds - type: time - expr: created_at - type_params: - time_granularity: day - - entities: - - name: user - type: foreign - expr: user_id - - name: id - type: primary - -metrics: - - name: records_with_revenue - label: "Number of records with revenue" - description: Total number of records with revenue - type: simple - type_params: - measure: has_revenue -""" - -schema_without_semantic_model_yml = """models: - - name: fct_revenue - description: This is the model fct_revenue. It should be able to use doc blocks -""" - -fct_revenue_sql = """select - 1 as id, - 10 as user_id, - 1000 as revenue, - current_timestamp as created_at""" - -metricflow_time_spine_sql = """ -with days as ( - {{dbt_utils.date_spine('day' - , "to_date('01/01/2000','mm/dd/yyyy')" - , "to_date('01/01/2027','mm/dd/yyyy')" - ) - }} -), - -final as ( - select cast(date_day as date) as date_day - from days +from dbt.tests.util import run_dbt, write_file +from dbt_common.events.base_types import BaseEvent +from dbt_semantic_interfaces.type_enums.time_granularity import TimeGranularity +from tests.functional.assertions.test_runner import dbtTestRunner +from tests.functional.semantic_models.fixtures import ( + fct_revenue_sql, + metricflow_time_spine_sql, + multi_sm_schema_yml, + schema_without_semantic_model_yml, + schema_yml, ) -select * -from final -""" - class TestSemanticModelParsing: @pytest.fixture(scope="class") @@ -113,7 +26,7 @@ def models(self): } def test_semantic_model_parsing(self, project): - runner = dbtRunner() + runner = dbtTestRunner() result = runner.invoke(["parse"]) assert result.success assert isinstance(result.result, Manifest) @@ -125,14 +38,21 @@ def test_semantic_model_parsing(self, project): semantic_model.node_relation.relation_name == f'"dbt"."{project.test_schema}"."fct_revenue"' ) - assert len(semantic_model.measures) == 5 + assert len(semantic_model.measures) == 7 + # manifest should have two metrics created from measures + assert len(manifest.metrics) == 3 + metric = manifest.metrics["metric.test.txn_revenue"] + assert metric.name == "txn_revenue" + metric_with_label = manifest.metrics["metric.test.txn_revenue_with_label"] + assert metric_with_label.name == "txn_revenue_with_label" + assert metric_with_label.label == "Transaction Revenue with label" def test_semantic_model_error(self, project): # Next, modify the default schema.yml to remove the semantic model. error_schema_yml = schema_yml.replace("sum_of_things", "has_revenue") write_file(error_schema_yml, project.project_root, "models", "schema.yml") events: List[BaseEvent] = [] - runner = dbtRunner(callbacks=[events.append]) + runner = dbtTestRunner(callbacks=[events.append]) result = runner.invoke(["parse"]) assert not result.success @@ -152,7 +72,7 @@ def models(self): def test_semantic_model_changed_partial_parsing(self, project): # First, use the default schema.yml to define our semantic model, and # run the dbt parse command - runner = dbtRunner() + runner = dbtTestRunner() result = runner.invoke(["parse"]) assert result.success @@ -173,7 +93,7 @@ def test_semantic_model_changed_partial_parsing(self, project): def test_semantic_model_deleted_partial_parsing(self, project): # First, use the default schema.yml to define our semantic model, and # run the dbt parse command - runner = dbtRunner() + runner = dbtTestRunner() result = runner.invoke(["parse"]) assert result.success assert "semantic_model.test.revenue" in result.result.semantic_models @@ -187,3 +107,77 @@ def test_semantic_model_deleted_partial_parsing(self, project): # Finally, verify that the manifest reflects the deletion assert "semantic_model.test.revenue" not in result.result.semantic_models + + def test_semantic_model_flipping_create_metric_partial_parsing(self, project): + generated_metric = "metric.test.txn_revenue" + generated_metric_with_label = "metric.test.txn_revenue_with_label" + # First, use the default schema.yml to define our semantic model, and + # run the dbt parse command + write_file(schema_yml, project.project_root, "models", "schema.yml") + runner = dbtTestRunner() + result = runner.invoke(["parse"]) + assert result.success + + # Verify the metric created by `create_metric: true` exists + metric = result.result.metrics[generated_metric] + assert metric.name == "txn_revenue" + assert metric.label == "txn_revenue" + + metric_with_label = result.result.metrics[generated_metric_with_label] + assert metric_with_label.name == "txn_revenue_with_label" + assert metric_with_label.label == "Transaction Revenue with label" + + # --- Next, modify the default schema.yml to have no `create_metric: true` --- + no_create_metric_schema_yml = schema_yml.replace( + "create_metric: true", "create_metric: false" + ) + write_file(no_create_metric_schema_yml, project.project_root, "models", "schema.yml") + + # Now, run the dbt parse command again. + result = runner.invoke(["parse"]) + assert result.success + + # Verify the metric originally created by `create_metric: true` was removed + assert result.result.metrics.get(generated_metric) is None + + # Verify that partial parsing didn't clobber the normal metric + assert result.result.metrics.get("metric.test.simple_metric") is not None + + # --- Now bring it back --- + create_metric_schema_yml = schema_yml.replace( + "create_metric: false", "create_metric: true" + ) + write_file(create_metric_schema_yml, project.project_root, "models", "schema.yml") + + # Now, run the dbt parse command again. + result = runner.invoke(["parse"]) + assert result.success + + # Verify the metric originally created by `create_metric: true` was removed + metric = result.result.metrics[generated_metric] + assert metric.name == "txn_revenue" + + +class TestSemanticModelPartialParsingGeneratedMetrics: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": multi_sm_schema_yml, + "fct_revenue.sql": fct_revenue_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + } + + def test_generated_metrics(self, project): + manifest = run_dbt(["parse"]) + expected = { + "metric.test.simple_metric", + "metric.test.txn_revenue", + "metric.test.alt_txn_revenue", + } + assert set(manifest.metrics.keys()) == expected + + # change description of 'revenue' semantic model + modified_schema_yml = multi_sm_schema_yml.replace("first", "FIRST") + write_file(modified_schema_yml, project.project_root, "models", "schema.yml") + manifest = run_dbt(["parse"]) + assert set(manifest.metrics.keys()) == expected diff --git a/tests/functional/semantic_models/test_semantic_models.py b/tests/functional/semantic_models/test_semantic_models.py index 627aae9b7a7..f773a4f8c5e 100644 --- a/tests/functional/semantic_models/test_semantic_models.py +++ b/tests/functional/semantic_models/test_semantic_models.py @@ -2,14 +2,15 @@ from dbt.contracts.graph.manifest import Manifest from dbt.exceptions import CompilationError -from dbt.tests.util import run_dbt - - +from dbt.tests.util import run_dbt, write_file from tests.functional.semantic_models.fixtures import ( + models_people_metrics_yml, models_people_sql, - metricflow_time_spine_sql, + semantic_model_descriptions, + semantic_model_people_diff_name_yml, semantic_model_people_yml, - models_people_metrics_yml, + semantic_model_people_yml_with_docs, + simple_metricflow_time_spine_sql, ) @@ -18,7 +19,7 @@ class TestSemanticModelDependsOn: def models(self): return { "people.sql": models_people_sql, - "metricflow_time_spine.sql": metricflow_time_spine_sql, + "metricflow_time_spine.sql": simple_metricflow_time_spine_sql, "semantic_models.yml": semantic_model_people_yml, "people_metrics.yml": models_people_metrics_yml, } @@ -36,12 +37,33 @@ def test_depends_on(self, project): ) +class TestSemanticModelNestedDocs: + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": simple_metricflow_time_spine_sql, + "semantic_models.yml": semantic_model_people_yml_with_docs, + "people_metrics.yml": models_people_metrics_yml, + "docs.md": semantic_model_descriptions, + } + + def test_depends_on(self, project): + manifest = run_dbt(["parse"]) + node = manifest.semantic_models["semantic_model.test.semantic_people"] + + assert node.description == "foo" + assert node.dimensions[0].description == "bar" + assert node.measures[0].description == "baz" + assert node.entities[0].description == "qux" + + class TestSemanticModelUnknownModel: @pytest.fixture(scope="class") def models(self): return { "not_people.sql": models_people_sql, - "metricflow_time_spine.sql": metricflow_time_spine_sql, + "metricflow_time_spine.sql": simple_metricflow_time_spine_sql, "semantic_models.yml": semantic_model_people_yml, "people_metrics.yml": models_people_metrics_yml, } @@ -50,3 +72,27 @@ def test_unknown_model_raises_issue(self, project): with pytest.raises(CompilationError) as excinfo: run_dbt(["parse"]) assert "depends on a node named 'people' which was not found" in str(excinfo.value) + + +class TestSemanticModelPartialParsing: + @pytest.fixture(scope="class") + def models(self): + return { + "people.sql": models_people_sql, + "metricflow_time_spine.sql": simple_metricflow_time_spine_sql, + "semantic_models.yml": semantic_model_people_yml, + "people_metrics.yml": models_people_metrics_yml, + } + + def test_semantic_model_deleted_partial_parsing(self, project): + # First, use the default saved_queries.yml to define our saved_query, and + # run the dbt parse command + run_dbt(["parse"]) + # Next, modify the default semantic_models.yml to remove the saved query. + write_file( + semantic_model_people_diff_name_yml, + project.project_root, + "models", + "semantic_models.yml", + ) + run_dbt(["compile"]) diff --git a/tests/functional/severity/test_severity.py b/tests/functional/severity/test_severity.py index 8a76ef6ac24..836d44426f1 100644 --- a/tests/functional/severity/test_severity.py +++ b/tests/functional/severity/test_severity.py @@ -16,14 +16,14 @@ - name: sample_seed columns: - name: email - tests: + data_tests: - not_null: severity: "{{ 'error' if var('strict', false) else 'warn' }}" models: - name: sample_model columns: - name: email - tests: + data_tests: - not_null: severity: "{{ 'error' if var('strict', false) else 'warn' }}" """ diff --git a/tests/functional/show/fixtures.py b/tests/functional/show/fixtures.py index 85bfcd26c29..1fc9b9fd797 100644 --- a/tests/functional/show/fixtures.py +++ b/tests/functional/show/fixtures.py @@ -2,6 +2,31 @@ select * from {{ ref('sample_seed') }} """ +models__sample_number_model = """ +select + cast(1.0 as int) as float_to_int_field, + 3.0 as float_field, + 4.3 as float_with_dec_field, + 5 as int_field +""" + +models__sample_number_model_with_nulls = """ +select + cast(1.0 as int) as float_to_int_field, + 3.0 as float_field, + 4.3 as float_with_dec_field, + 5 as int_field + +union all + +select + cast(null as int) as float_to_int_field, + cast(null as float) as float_field, + cast(null as float) as float_with_dec_field, + cast(null as int) as int_field + +""" + models__second_model = """ select sample_num as col_one, @@ -12,7 +37,7 @@ models__sql_header = """ {% call set_sql_header(config) %} -set session time zone 'Asia/Kolkata'; +set session time zone '{{ var("timezone", "Europe/Paris") }}'; {%- endcall %} select current_setting('timezone') as timezone """ diff --git a/tests/functional/show/test_show.py b/tests/functional/show/test_show.py index 4429050191c..b1aa16210b8 100644 --- a/tests/functional/show/test_show.py +++ b/tests/functional/show/test_show.py @@ -1,119 +1,156 @@ import pytest -from dbt.exceptions import DbtRuntimeError, Exception as DbtException -from dbt.tests.util import run_dbt_and_capture, run_dbt +from dbt.tests.util import run_dbt, run_dbt_and_capture +from dbt_common.exceptions import DbtBaseException as DbtException +from dbt_common.exceptions import DbtRuntimeError from tests.functional.show.fixtures import ( - models__second_ephemeral_model, - seeds__sample_seed, + models__ephemeral_model, models__sample_model, + models__sample_number_model, + models__sample_number_model_with_nulls, + models__second_ephemeral_model, models__second_model, - models__ephemeral_model, - schema_yml, - models__sql_header, private_model_yml, + schema_yml, + seeds__sample_seed, ) -class TestShow: +class ShowBase: @pytest.fixture(scope="class") def models(self): return { "sample_model.sql": models__sample_model, + "sample_number_model.sql": models__sample_number_model, + "sample_number_model_with_nulls.sql": models__sample_number_model_with_nulls, "second_model.sql": models__second_model, "ephemeral_model.sql": models__ephemeral_model, - "sql_header.sql": models__sql_header, } @pytest.fixture(scope="class") def seeds(self): return {"sample_seed.csv": seeds__sample_seed} + @pytest.fixture(scope="class", autouse=True) + def setup(self, project): + run_dbt(["seed"]) + + +class TestShowNone(ShowBase): def test_none(self, project): with pytest.raises( DbtRuntimeError, match="Either --select or --inline must be passed to show" ): - run_dbt(["seed"]) run_dbt(["show"]) + +class TestShowSelectText(ShowBase): def test_select_model_text(self, project): run_dbt(["build"]) - (results, log_output) = run_dbt_and_capture(["show", "--select", "second_model"]) + (_, log_output) = run_dbt_and_capture(["show", "--select", "second_model"]) assert "Previewing node 'sample_model'" not in log_output assert "Previewing node 'second_model'" in log_output assert "col_one" in log_output assert "col_two" in log_output assert "answer" in log_output + +class TestShowMultiple(ShowBase): def test_select_multiple_model_text(self, project): run_dbt(["build"]) - (results, log_output) = run_dbt_and_capture( - ["show", "--select", "sample_model second_model"] - ) + (_, log_output) = run_dbt_and_capture(["show", "--select", "sample_model second_model"]) assert "Previewing node 'sample_model'" in log_output assert "sample_num" in log_output assert "sample_bool" in log_output + +class TestShowSingle(ShowBase): def test_select_single_model_json(self, project): run_dbt(["build"]) - (results, log_output) = run_dbt_and_capture( + (_, log_output) = run_dbt_and_capture( ["show", "--select", "sample_model", "--output", "json"] ) assert "Previewing node 'sample_model'" not in log_output assert "sample_num" in log_output assert "sample_bool" in log_output + +class TestShowNumeric(ShowBase): + def test_numeric_values(self, project): + run_dbt(["build"]) + (_, log_output) = run_dbt_and_capture( + ["show", "--select", "sample_number_model", "--output", "json"] + ) + # json log output needs the escapes removed for string matching + log_output = log_output.replace("\\", "") + assert "Previewing node 'sample_number_model'" not in log_output + assert '"float_to_int_field": 1.0' not in log_output + assert '"float_to_int_field": 1' in log_output + assert '"float_field": 3.0' in log_output + assert '"float_with_dec_field": 4.3' in log_output + assert '"int_field": 5' in log_output + assert '"int_field": 5.0' not in log_output + + +class TestShowNumericNulls(ShowBase): + def test_numeric_values_with_nulls(self, project): + run_dbt(["build"]) + (_, log_output) = run_dbt_and_capture( + ["show", "--select", "sample_number_model_with_nulls", "--output", "json"] + ) + # json log output needs the escapes removed for string matching + log_output = log_output.replace("\\", "") + assert "Previewing node 'sample_number_model_with_nulls'" not in log_output + assert '"float_to_int_field": 1.0' not in log_output + assert '"float_to_int_field": 1' in log_output + assert '"float_field": 3.0' in log_output + assert '"float_with_dec_field": 4.3' in log_output + assert '"int_field": 5' in log_output + assert '"int_field": 5.0' not in log_output + + +class TestShowInline(ShowBase): def test_inline_pass(self, project): run_dbt(["build"]) - (results, log_output) = run_dbt_and_capture( + (_, log_output) = run_dbt_and_capture( ["show", "--inline", "select * from {{ ref('sample_model') }}"] ) assert "Previewing inline node" in log_output assert "sample_num" in log_output assert "sample_bool" in log_output + +class TestShowInlineFail(ShowBase): def test_inline_fail(self, project): with pytest.raises(DbtException, match="Error parsing inline query"): run_dbt(["show", "--inline", "select * from {{ ref('third_model') }}"]) + +class TestShowInlineFailDB(ShowBase): def test_inline_fail_database_error(self, project): with pytest.raises(DbtRuntimeError, match="Database Error"): run_dbt(["show", "--inline", "slect asdlkjfsld;j"]) + +class TestShowEphemeral(ShowBase): def test_ephemeral_model(self, project): run_dbt(["build"]) - (results, log_output) = run_dbt_and_capture(["show", "--select", "ephemeral_model"]) + (_, log_output) = run_dbt_and_capture(["show", "--select", "ephemeral_model"]) assert "col_deci" in log_output + +class TestShowSecondEphemeral(ShowBase): def test_second_ephemeral_model(self, project): run_dbt(["build"]) - (results, log_output) = run_dbt_and_capture( - ["show", "--inline", models__second_ephemeral_model] - ) + (_, log_output) = run_dbt_and_capture(["show", "--inline", models__second_ephemeral_model]) assert "col_hundo" in log_output - @pytest.mark.parametrize( - "args,expected", - [ - ([], 5), # default limit - (["--limit", 3], 3), # fetch 3 rows - (["--limit", -1], 7), # fetch all rows - ], - ) - def test_limit(self, project, args, expected): - run_dbt(["build"]) - dbt_args = ["show", "--inline", models__second_ephemeral_model, *args] - results, log_output = run_dbt_and_capture(dbt_args) - assert len(results.results[0].agate_table) == expected +class TestShowSeed(ShowBase): def test_seed(self, project): - (results, log_output) = run_dbt_and_capture(["show", "--select", "sample_seed"]) + (_, log_output) = run_dbt_and_capture(["show", "--select", "sample_seed"]) assert "Previewing node 'sample_seed'" in log_output - def test_sql_header(self, project): - run_dbt(["build"]) - (results, log_output) = run_dbt_and_capture(["show", "--select", "sql_header"]) - assert "Asia/Kolkata" in log_output - class TestShowModelVersions: @pytest.fixture(scope="class") diff --git a/tests/functional/simple_snapshot/data/invalidate_postgres.sql b/tests/functional/snapshots/data/invalidate_postgres.sql similarity index 100% rename from tests/functional/simple_snapshot/data/invalidate_postgres.sql rename to tests/functional/snapshots/data/invalidate_postgres.sql diff --git a/tests/functional/snapshots/data/seed_cn.sql b/tests/functional/snapshots/data/seed_cn.sql new file mode 100644 index 00000000000..089200afa47 --- /dev/null +++ b/tests/functional/snapshots/data/seed_cn.sql @@ -0,0 +1,82 @@ +create table {database}.{schema}.seed ( + id INTEGER, + first_name VARCHAR(50), + last_name VARCHAR(50), + email VARCHAR(50), + gender VARCHAR(50), + ip_address VARCHAR(20), + updated_at TIMESTAMP WITHOUT TIME ZONE +); + +create table {database}.{schema}.snapshot_expected ( + id INTEGER, + first_name VARCHAR(50), + last_name VARCHAR(50), + email VARCHAR(50), + gender VARCHAR(50), + ip_address VARCHAR(20), + + -- snapshotting fields + updated_at TIMESTAMP WITHOUT TIME ZONE, + test_valid_from TIMESTAMP WITHOUT TIME ZONE, + test_valid_to TIMESTAMP WITHOUT TIME ZONE, + test_scd_id TEXT, + test_updated_at TIMESTAMP WITHOUT TIME ZONE +); + + +-- seed inserts +-- use the same email for two users to verify that duplicated check_cols values +-- are handled appropriately +insert into {database}.{schema}.seed (id, first_name, last_name, email, gender, ip_address, updated_at) values +(1, 'Judith', 'Kennedy', '(not provided)', 'Female', '54.60.24.128', '2015-12-24 12:19:28'), +(2, 'Arthur', 'Kelly', '(not provided)', 'Male', '62.56.24.215', '2015-10-28 16:22:15'), +(3, 'Rachel', 'Moreno', 'rmoreno2@msu.edu', 'Female', '31.222.249.23', '2016-04-05 02:05:30'), +(4, 'Ralph', 'Turner', 'rturner3@hp.com', 'Male', '157.83.76.114', '2016-08-08 00:06:51'), +(5, 'Laura', 'Gonzales', 'lgonzales4@howstuffworks.com', 'Female', '30.54.105.168', '2016-09-01 08:25:38'), +(6, 'Katherine', 'Lopez', 'klopez5@yahoo.co.jp', 'Female', '169.138.46.89', '2016-08-30 18:52:11'), +(7, 'Jeremy', 'Hamilton', 'jhamilton6@mozilla.org', 'Male', '231.189.13.133', '2016-07-17 02:09:46'), +(8, 'Heather', 'Rose', 'hrose7@goodreads.com', 'Female', '87.165.201.65', '2015-12-29 22:03:56'), +(9, 'Gregory', 'Kelly', 'gkelly8@trellian.com', 'Male', '154.209.99.7', '2016-03-24 21:18:16'), +(10, 'Rachel', 'Lopez', 'rlopez9@themeforest.net', 'Female', '237.165.82.71', '2016-08-20 15:44:49'), +(11, 'Donna', 'Welch', 'dwelcha@shutterfly.com', 'Female', '103.33.110.138', '2016-02-27 01:41:48'), +(12, 'Russell', 'Lawrence', 'rlawrenceb@qq.com', 'Male', '189.115.73.4', '2016-06-11 03:07:09'), +(13, 'Michelle', 'Montgomery', 'mmontgomeryc@scientificamerican.com', 'Female', '243.220.95.82', '2016-06-18 16:27:19'), +(14, 'Walter', 'Castillo', 'wcastillod@pagesperso-orange.fr', 'Male', '71.159.238.196', '2016-10-06 01:55:44'), +(15, 'Robin', 'Mills', 'rmillse@vkontakte.ru', 'Female', '172.190.5.50', '2016-10-31 11:41:21'), +(16, 'Raymond', 'Holmes', 'rholmesf@usgs.gov', 'Male', '148.153.166.95', '2016-10-03 08:16:38'), +(17, 'Gary', 'Bishop', 'gbishopg@plala.or.jp', 'Male', '161.108.182.13', '2016-08-29 19:35:20'), +(18, 'Anna', 'Riley', 'arileyh@nasa.gov', 'Female', '253.31.108.22', '2015-12-11 04:34:27'), +(19, 'Sarah', 'Knight', 'sknighti@foxnews.com', 'Female', '222.220.3.177', '2016-09-26 00:49:06'), +(20, 'Phyllis', 'Fox', null, 'Female', '163.191.232.95', '2016-08-21 10:35:19'); + + +-- populate snapshot table +insert into {database}.{schema}.snapshot_expected ( + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + test_valid_from, + test_valid_to, + test_updated_at, + test_scd_id +) + +select + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + -- fields added by snapshotting + updated_at as test_valid_from, + null::timestamp as test_valid_to, + updated_at as test_updated_at, + md5(id || '-' || first_name || '|' || updated_at::text) as test_scd_id +from {database}.{schema}.seed; diff --git a/tests/functional/simple_snapshot/data/seed_pg.sql b/tests/functional/snapshots/data/seed_pg.sql similarity index 100% rename from tests/functional/simple_snapshot/data/seed_pg.sql rename to tests/functional/snapshots/data/seed_pg.sql diff --git a/tests/functional/simple_snapshot/data/shared_macros.sql b/tests/functional/snapshots/data/shared_macros.sql similarity index 100% rename from tests/functional/simple_snapshot/data/shared_macros.sql rename to tests/functional/snapshots/data/shared_macros.sql diff --git a/tests/functional/simple_snapshot/data/update.sql b/tests/functional/snapshots/data/update.sql similarity index 100% rename from tests/functional/simple_snapshot/data/update.sql rename to tests/functional/snapshots/data/update.sql diff --git a/tests/functional/simple_snapshot/fixtures.py b/tests/functional/snapshots/fixtures.py similarity index 99% rename from tests/functional/simple_snapshot/fixtures.py rename to tests/functional/snapshots/fixtures.py index 6b3ecc2b101..a94f0c04875 100644 --- a/tests/functional/simple_snapshot/fixtures.py +++ b/tests/functional/snapshots/fixtures.py @@ -86,10 +86,9 @@ models__schema_yml = """ -version: 2 snapshots: - name: snapshot_actual - tests: + data_tests: - mutually_exclusive_ranges config: meta: @@ -97,10 +96,9 @@ """ models__schema_with_target_schema_yml = """ -version: 2 snapshots: - name: snapshot_actual - tests: + data_tests: - mutually_exclusive_ranges config: meta: diff --git a/tests/functional/simple_snapshot/test_basic_snapshot.py b/tests/functional/snapshots/test_basic_snapshot.py similarity index 96% rename from tests/functional/simple_snapshot/test_basic_snapshot.py rename to tests/functional/snapshots/test_basic_snapshot.py index ff4799f10ab..ac6c3831642 100644 --- a/tests/functional/simple_snapshot/test_basic_snapshot.py +++ b/tests/functional/snapshots/test_basic_snapshot.py @@ -1,20 +1,27 @@ import os from datetime import datetime -import pytz + import pytest -from dbt.tests.util import run_dbt, check_relations_equal, relation_from_name, write_file -from tests.functional.simple_snapshot.fixtures import ( - models__schema_yml, - models__schema_with_target_schema_yml, +import pytz + +from dbt.tests.util import ( + check_relations_equal, + relation_from_name, + run_dbt, + write_file, +) +from tests.functional.snapshots.fixtures import ( + macros__test_no_overlaps_sql, + macros_custom_snapshot__custom_sql, models__ref_snapshot_sql, - seeds__seed_newcol_csv, + models__schema_with_target_schema_yml, + models__schema_yml, seeds__seed_csv, - snapshots_pg__snapshot_sql, + seeds__seed_newcol_csv, snapshots_pg__snapshot_no_target_schema_sql, - macros__test_no_overlaps_sql, - macros_custom_snapshot__custom_sql, - snapshots_pg_custom_namespaced__snapshot_sql, + snapshots_pg__snapshot_sql, snapshots_pg_custom__snapshot_sql, + snapshots_pg_custom_namespaced__snapshot_sql, ) snapshots_check_col__snapshot_sql = """ @@ -135,7 +142,7 @@ def project_config_update(self, unique_schema): return { "snapshots": { "test": { - "target_schema": unique_schema + "_alt", + "schema": "alt", } } } @@ -146,6 +153,8 @@ def test_target_schema(self, project): # ensure that the schema in the snapshot node is the same as target_schema snapshot_id = "snapshot.test.snapshot_actual" snapshot_node = manifest.nodes[snapshot_id] + # The schema field be changed by the default "generate_schema_name" + # to append an underscore plus the configured schema of "alt". assert snapshot_node.schema == f"{project.test_schema}_alt" assert ( snapshot_node.relation_name diff --git a/tests/functional/simple_snapshot/test_changing_check_cols_snapshot.py b/tests/functional/snapshots/test_changing_check_cols_snapshot.py similarity index 98% rename from tests/functional/simple_snapshot/test_changing_check_cols_snapshot.py rename to tests/functional/snapshots/test_changing_check_cols_snapshot.py index 0aee4aedb99..fa5edb51410 100644 --- a/tests/functional/simple_snapshot/test_changing_check_cols_snapshot.py +++ b/tests/functional/snapshots/test_changing_check_cols_snapshot.py @@ -1,5 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal + +from dbt.tests.util import check_relations_equal, run_dbt snapshot_sql = """ {% snapshot snapshot_check_cols_new_column %} diff --git a/tests/functional/simple_snapshot/test_changing_strategy_snapshot.py b/tests/functional/snapshots/test_changing_strategy_snapshot.py similarity index 97% rename from tests/functional/simple_snapshot/test_changing_strategy_snapshot.py rename to tests/functional/snapshots/test_changing_strategy_snapshot.py index 73afa1fc244..e02df65938d 100644 --- a/tests/functional/simple_snapshot/test_changing_strategy_snapshot.py +++ b/tests/functional/snapshots/test_changing_strategy_snapshot.py @@ -1,7 +1,7 @@ import pytest -from dbt.tests.util import run_dbt -from tests.functional.simple_snapshot.fixtures import models_slow__gen_sql +from dbt.tests.util import run_dbt +from tests.functional.snapshots.fixtures import models_slow__gen_sql test_snapshots_changing_strategy__test_snapshot_sql = """ diff --git a/tests/functional/simple_snapshot/test_check_cols_snapshot.py b/tests/functional/snapshots/test_check_cols_snapshot.py similarity index 98% rename from tests/functional/simple_snapshot/test_check_cols_snapshot.py rename to tests/functional/snapshots/test_check_cols_snapshot.py index bb32f27721c..2b38d78ceab 100644 --- a/tests/functional/simple_snapshot/test_check_cols_snapshot.py +++ b/tests/functional/snapshots/test_check_cols_snapshot.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt +from dbt.tests.util import run_dbt snapshot_sql = """ {% snapshot check_cols_cycle %} @@ -100,7 +100,7 @@ def tests(): return {"my_test.sql": snapshot_test_sql} -def test_simple_snapshot(project): +def test_snapshots(project): results = run_dbt(["snapshot", "--vars", "version: 1"]) assert len(results) == 1 diff --git a/tests/functional/simple_snapshot/test_check_cols_updated_at_snapshot.py b/tests/functional/snapshots/test_check_cols_updated_at_snapshot.py similarity index 97% rename from tests/functional/simple_snapshot/test_check_cols_updated_at_snapshot.py rename to tests/functional/snapshots/test_check_cols_updated_at_snapshot.py index c72fb1a1c73..73a76ffe716 100644 --- a/tests/functional/simple_snapshot/test_check_cols_updated_at_snapshot.py +++ b/tests/functional/snapshots/test_check_cols_updated_at_snapshot.py @@ -1,5 +1,6 @@ import pytest -from dbt.tests.util import run_dbt, check_relations_equal + +from dbt.tests.util import check_relations_equal, run_dbt snapshot_sql = """ {% snapshot snapshot_check_cols_updated_at_actual %} @@ -73,7 +74,7 @@ def project_config_update(): } -def test_simple_snapshot(project): +def test_snapshots(project): """ Test that the `dbt_updated_at` column reflects the `updated_at` timestamp expression in the config. diff --git a/tests/functional/simple_snapshot/test_comment_ending_snapshot.py b/tests/functional/snapshots/test_comment_ending_snapshot.py similarity index 99% rename from tests/functional/simple_snapshot/test_comment_ending_snapshot.py rename to tests/functional/snapshots/test_comment_ending_snapshot.py index 257afb042be..c0d482d9b7d 100644 --- a/tests/functional/simple_snapshot/test_comment_ending_snapshot.py +++ b/tests/functional/snapshots/test_comment_ending_snapshot.py @@ -1,6 +1,8 @@ +import os + import pytest + from dbt.tests.util import run_dbt -import os snapshots_with_comment_at_end__snapshot_sql = """ {% snapshot snapshot_actual %} diff --git a/tests/functional/simple_snapshot/test_cross_schema_snapshot.py b/tests/functional/snapshots/test_cross_schema_snapshot.py similarity index 95% rename from tests/functional/simple_snapshot/test_cross_schema_snapshot.py rename to tests/functional/snapshots/test_cross_schema_snapshot.py index 874f4ef2007..07fc8020773 100644 --- a/tests/functional/simple_snapshot/test_cross_schema_snapshot.py +++ b/tests/functional/snapshots/test_cross_schema_snapshot.py @@ -1,14 +1,15 @@ import os + import pytest + from dbt.tests.util import run_dbt -from tests.functional.simple_snapshot.fixtures import ( - models__schema_yml, - models__ref_snapshot_sql, +from tests.functional.snapshots.fixtures import ( macros__test_no_overlaps_sql, + models__ref_snapshot_sql, + models__schema_yml, snapshots_pg__snapshot_sql, ) - NUM_SNAPSHOT_MODELS = 1 diff --git a/tests/functional/simple_snapshot/test_hard_delete_snapshot.py b/tests/functional/snapshots/test_hard_delete_snapshot.py similarity index 97% rename from tests/functional/simple_snapshot/test_hard_delete_snapshot.py rename to tests/functional/snapshots/test_hard_delete_snapshot.py index 4b4b9e281a6..93cb524c59a 100644 --- a/tests/functional/simple_snapshot/test_hard_delete_snapshot.py +++ b/tests/functional/snapshots/test_hard_delete_snapshot.py @@ -1,16 +1,17 @@ import os from datetime import datetime, timedelta -import pytz + import pytest -from dbt.tests.util import run_dbt, check_relations_equal -from dbt.tests.adapter.utils.test_current_timestamp import is_aware -from tests.functional.simple_snapshot.fixtures import ( - models__schema_yml, - models__ref_snapshot_sql, +import pytz + +from dbt.tests.util import check_relations_equal, run_dbt +from tests.functional.snapshots.fixtures import ( macros__test_no_overlaps_sql, + models__ref_snapshot_sql, + models__schema_yml, snapshots_pg__snapshot_sql, ) - +from tests.functional.utils import is_aware # These tests uses the same seed data, containing 20 records of which we hard delete the last 10. # These deleted records set the dbt_valid_to to time the snapshot was ran. diff --git a/tests/functional/simple_snapshot/test_invalid_namespace_snapshot.py b/tests/functional/snapshots/test_invalid_namespace_snapshot.py similarity index 96% rename from tests/functional/simple_snapshot/test_invalid_namespace_snapshot.py rename to tests/functional/snapshots/test_invalid_namespace_snapshot.py index 40b91edf051..31060bbba3f 100644 --- a/tests/functional/simple_snapshot/test_invalid_namespace_snapshot.py +++ b/tests/functional/snapshots/test_invalid_namespace_snapshot.py @@ -1,13 +1,15 @@ import os + import pytest + from dbt.tests.util import run_dbt -from tests.functional.simple_snapshot.fixtures import ( - models__schema_yml, - models__ref_snapshot_sql, +from tests.functional.snapshots.fixtures import ( macros__test_no_overlaps_sql, macros_custom_snapshot__custom_sql, - seeds__seed_newcol_csv, + models__ref_snapshot_sql, + models__schema_yml, seeds__seed_csv, + seeds__seed_newcol_csv, ) NUM_SNAPSHOT_MODELS = 1 diff --git a/tests/functional/simple_snapshot/test_long_text_snapshot.py b/tests/functional/snapshots/test_long_text_snapshot.py similarity index 96% rename from tests/functional/simple_snapshot/test_long_text_snapshot.py rename to tests/functional/snapshots/test_long_text_snapshot.py index 332bc384f61..495bdca46b2 100644 --- a/tests/functional/simple_snapshot/test_long_text_snapshot.py +++ b/tests/functional/snapshots/test_long_text_snapshot.py @@ -1,9 +1,10 @@ import pytest + from dbt.tests.util import run_dbt -from tests.functional.simple_snapshot.fixtures import ( - models__schema_yml, - models__ref_snapshot_sql, +from tests.functional.snapshots.fixtures import ( macros__test_no_overlaps_sql, + models__ref_snapshot_sql, + models__schema_yml, ) seed_longtext_sql = """ diff --git a/tests/functional/simple_snapshot/test_missing_strategy_snapshot.py b/tests/functional/snapshots/test_missing_strategy_snapshot.py similarity index 64% rename from tests/functional/simple_snapshot/test_missing_strategy_snapshot.py rename to tests/functional/snapshots/test_missing_strategy_snapshot.py index dfb51f7992e..6f550d05a28 100644 --- a/tests/functional/simple_snapshot/test_missing_strategy_snapshot.py +++ b/tests/functional/snapshots/test_missing_strategy_snapshot.py @@ -1,20 +1,19 @@ import pytest + from dbt.tests.util import run_dbt -from dbt.exceptions import ParsingError -from tests.functional.simple_snapshot.fixtures import ( - models__schema_yml, - models__ref_snapshot_sql, +from dbt_common.dataclass_schema import ValidationError +from tests.functional.snapshots.fixtures import ( macros__test_no_overlaps_sql, + models__ref_snapshot_sql, + models__schema_yml, ) snapshots_invalid__snapshot_sql = """ -{# make sure to never name this anything with `target_schema` in the name, or the test will be invalid! #} -{% snapshot missing_field_target_underscore_schema %} - {# missing the mandatory target_schema parameter #} +{% snapshot snapshot_actual %} + {# missing the mandatory strategy parameter #} {{ config( unique_key='id || ' ~ "'-'" ~ ' || first_name', - strategy='timestamp', updated_at='updated_at', ) }} @@ -43,7 +42,7 @@ def macros(): def test_missing_strategy(project): - with pytest.raises(ParsingError) as exc: + with pytest.raises(ValidationError) as exc: run_dbt(["compile"], expect_pass=False) - assert "Snapshots must be configured with a 'strategy'" in str(exc.value) + assert "Snapshots must be configured with a 'strategy' and 'unique_key'" in str(exc.value) diff --git a/tests/functional/simple_snapshot/test_renamed_source_snapshot.py b/tests/functional/snapshots/test_renamed_source_snapshot.py similarity index 97% rename from tests/functional/simple_snapshot/test_renamed_source_snapshot.py rename to tests/functional/snapshots/test_renamed_source_snapshot.py index 6e71ce7cad5..95441aeaaeb 100644 --- a/tests/functional/simple_snapshot/test_renamed_source_snapshot.py +++ b/tests/functional/snapshots/test_renamed_source_snapshot.py @@ -1,13 +1,13 @@ import pytest + from dbt.tests.util import run_dbt -from tests.functional.simple_snapshot.fixtures import ( - seeds__seed_newcol_csv, - seeds__seed_csv, +from tests.functional.snapshots.fixtures import ( macros__test_no_overlaps_sql, macros_custom_snapshot__custom_sql, + seeds__seed_csv, + seeds__seed_newcol_csv, ) - snapshots_checkall__snapshot_sql = """ {% snapshot my_snapshot %} {{ config(check_cols='all', unique_key='id', strategy='check', target_database=database, target_schema=schema) }} diff --git a/tests/functional/simple_snapshot/test_select_exclude_snapshot.py b/tests/functional/snapshots/test_select_exclude_snapshot.py similarity index 97% rename from tests/functional/simple_snapshot/test_select_exclude_snapshot.py rename to tests/functional/snapshots/test_select_exclude_snapshot.py index f763c838915..b460f1fbc25 100644 --- a/tests/functional/simple_snapshot/test_select_exclude_snapshot.py +++ b/tests/functional/snapshots/test_select_exclude_snapshot.py @@ -1,12 +1,14 @@ import os + import pytest -from dbt.tests.util import run_dbt, check_relations_equal, check_table_does_not_exist -from tests.functional.simple_snapshot.fixtures import ( - seeds__seed_newcol_csv, - seeds__seed_csv, - models__schema_yml, - models__ref_snapshot_sql, + +from dbt.tests.util import check_relations_equal, check_table_does_not_exist, run_dbt +from tests.functional.snapshots.fixtures import ( macros__test_no_overlaps_sql, + models__ref_snapshot_sql, + models__schema_yml, + seeds__seed_csv, + seeds__seed_newcol_csv, snapshots_pg__snapshot_sql, snapshots_select__snapshot_sql, snapshots_select_noconfig__snapshot_sql, diff --git a/tests/functional/simple_snapshot/test_slow_query_snapshot.py b/tests/functional/snapshots/test_slow_query_snapshot.py similarity index 95% rename from tests/functional/simple_snapshot/test_slow_query_snapshot.py rename to tests/functional/snapshots/test_slow_query_snapshot.py index 63dc939dfdd..ac01105c6cf 100644 --- a/tests/functional/simple_snapshot/test_slow_query_snapshot.py +++ b/tests/functional/snapshots/test_slow_query_snapshot.py @@ -1,6 +1,7 @@ import pytest + from dbt.tests.util import run_dbt -from tests.functional.simple_snapshot.fixtures import models_slow__gen_sql +from tests.functional.snapshots.fixtures import models_slow__gen_sql snapshots_slow__snapshot_sql = """ diff --git a/tests/functional/snapshots/test_snapshot_column_names.py b/tests/functional/snapshots/test_snapshot_column_names.py new file mode 100644 index 00000000000..85e9f425765 --- /dev/null +++ b/tests/functional/snapshots/test_snapshot_column_names.py @@ -0,0 +1,234 @@ +import os + +import pytest + +from dbt.tests.util import ( + check_relations_equal, + get_manifest, + run_dbt, + run_dbt_and_capture, + update_config_file, +) + +snapshot_actual_sql = """ +{% snapshot snapshot_actual %} + + {{ + config( + unique_key='id || ' ~ "'-'" ~ ' || first_name', + ) + }} + + select * from {{target.database}}.{{target.schema}}.seed + +{% endsnapshot %} +""" + +snapshots_yml = """ +snapshots: + - name: snapshot_actual + config: + strategy: timestamp + updated_at: updated_at + snapshot_meta_column_names: + dbt_valid_to: test_valid_to + dbt_valid_from: test_valid_from + dbt_scd_id: test_scd_id + dbt_updated_at: test_updated_at +""" + +snapshots_no_column_names_yml = """ +snapshots: + - name: snapshot_actual + config: + strategy: timestamp + updated_at: updated_at +""" + +ref_snapshot_sql = """ +select * from {{ ref('snapshot_actual') }} +""" + + +invalidate_sql = """ +-- update records 11 - 21. Change email and updated_at field +update {schema}.seed set + updated_at = updated_at + interval '1 hour', + email = case when id = 20 then 'pfoxj@creativecommons.org' else 'new_' || email end +where id >= 10 and id <= 20; + + +-- invalidate records 11 - 21 +update {schema}.snapshot_expected set + test_valid_to = updated_at + interval '1 hour' +where id >= 10 and id <= 20; + +""" + +update_sql = """ +-- insert v2 of the 11 - 21 records + +insert into {database}.{schema}.snapshot_expected ( + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + test_valid_from, + test_valid_to, + test_updated_at, + test_scd_id +) + +select + id, + first_name, + last_name, + email, + gender, + ip_address, + updated_at, + -- fields added by snapshotting + updated_at as test_valid_from, + null::timestamp as test_valid_to, + updated_at as test_updated_at, + md5(id || '-' || first_name || '|' || updated_at::text) as test_scd_id +from {database}.{schema}.seed +where id >= 10 and id <= 20; +""" + + +class TestSnapshotColumnNames: + @pytest.fixture(scope="class") + def snapshots(self): + return {"snapshot.sql": snapshot_actual_sql} + + @pytest.fixture(scope="class") + def models(self): + return { + "snapshots.yml": snapshots_yml, + "ref_snapshot.sql": ref_snapshot_sql, + } + + def test_snapshot_column_names(self, project): + path = os.path.join(project.test_data_dir, "seed_cn.sql") + project.run_sql_file(path) + results = run_dbt(["snapshot"]) + assert len(results) == 1 + + project.run_sql(invalidate_sql) + project.run_sql(update_sql) + + results = run_dbt(["snapshot"]) + assert len(results) == 1 + + # run_dbt(["test"]) + check_relations_equal(project.adapter, ["snapshot_actual", "snapshot_expected"]) + + +class TestSnapshotColumnNamesFromDbtProject: + @pytest.fixture(scope="class") + def snapshots(self): + return {"snapshot.sql": snapshot_actual_sql} + + @pytest.fixture(scope="class") + def models(self): + return { + "snapshots.yml": snapshots_no_column_names_yml, + "ref_snapshot.sql": ref_snapshot_sql, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "snapshots": { + "test": { + "+snapshot_meta_column_names": { + "dbt_valid_to": "test_valid_to", + "dbt_valid_from": "test_valid_from", + "dbt_scd_id": "test_scd_id", + "dbt_updated_at": "test_updated_at", + } + } + } + } + + def test_snapshot_column_names_from_project(self, project): + path = os.path.join(project.test_data_dir, "seed_cn.sql") + project.run_sql_file(path) + results = run_dbt(["snapshot"]) + assert len(results) == 1 + + project.run_sql(invalidate_sql) + project.run_sql(update_sql) + + results = run_dbt(["snapshot"]) + assert len(results) == 1 + + # run_dbt(["test"]) + check_relations_equal(project.adapter, ["snapshot_actual", "snapshot_expected"]) + + +class TestSnapshotInvalidColumnNames: + @pytest.fixture(scope="class") + def snapshots(self): + return {"snapshot.sql": snapshot_actual_sql} + + @pytest.fixture(scope="class") + def models(self): + return { + "snapshots.yml": snapshots_no_column_names_yml, + "ref_snapshot.sql": ref_snapshot_sql, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "snapshots": { + "test": { + "+snapshot_meta_column_names": { + "dbt_valid_to": "test_valid_to", + "dbt_valid_from": "test_valid_from", + "dbt_scd_id": "test_scd_id", + "dbt_updated_at": "test_updated_at", + } + } + } + } + + def test_snapshot_invalid_column_names(self, project): + path = os.path.join(project.test_data_dir, "seed_cn.sql") + project.run_sql_file(path) + results = run_dbt(["snapshot"]) + assert len(results) == 1 + manifest = get_manifest(project.project_root) + snapshot_node = manifest.nodes["snapshot.test.snapshot_actual"] + snapshot_node.config.snapshot_meta_column_names == { + "dbt_valid_to": "test_valid_to", + "dbt_valid_from": "test_valid_from", + "dbt_scd_id": "test_scd_id", + "dbt_updated_at": "test_updated_at", + } + + project.run_sql(invalidate_sql) + project.run_sql(update_sql) + + # Change snapshot_meta_columns and look for an error + different_columns = { + "snapshots": { + "test": { + "+snapshot_meta_column_names": { + "dbt_valid_to": "test_valid_to", + "dbt_updated_at": "test_updated_at", + } + } + } + } + update_config_file(different_columns, "dbt_project.yml") + + results, log_output = run_dbt_and_capture(["snapshot"], expect_pass=False) + assert len(results) == 1 + assert "Compilation Error in snapshot snapshot_actual" in log_output + assert "Snapshot target is missing configured columns" in log_output diff --git a/tests/functional/snapshots/test_snapshot_config.py b/tests/functional/snapshots/test_snapshot_config.py new file mode 100644 index 00000000000..5124cf9c38b --- /dev/null +++ b/tests/functional/snapshots/test_snapshot_config.py @@ -0,0 +1,67 @@ +import pytest + +from dbt.tests.util import run_dbt, write_file + +orders_sql = """ +select 1 as id, 101 as user_id, 'pending' as status +""" + +snapshot_sql = """ +{% snapshot orders_snapshot %} + +{{ + config( + target_schema=schema, + strategy='check', + unique_key='id', + check_cols=['status'], + ) +}} + +select * from {{ ref('orders') }} + +{% endsnapshot %} +""" + +snapshot_no_config_sql = """ +{% snapshot orders_snapshot %} + +select * from {{ ref('orders') }} + +{% endsnapshot %} +""" + +snapshot_schema_yml = """ +snapshots: + - name: orders_snapshot + config: + target_schema: test + strategy: check + unique_key: id + check_cols: ['status'] +""" + + +class TestSnapshotConfig: + @pytest.fixture(scope="class") + def models(self): + return {"orders.sql": orders_sql} + + @pytest.fixture(scope="class") + def snapshots(self): + return {"snapshot_orders.sql": snapshot_sql} + + def test_config(self, project): + run_dbt(["run"]) + results = run_dbt(["snapshot"]) + assert len(results) == 1 + + # try to parse with config in schema file + write_file( + snapshot_no_config_sql, project.project_root, "snapshots", "snapshot_orders.sql" + ) + write_file(snapshot_schema_yml, project.project_root, "snapshots", "snapshot.yml") + results = run_dbt(["parse"]) + + results = run_dbt(["snapshot"]) + assert len(results) == 1 diff --git a/tests/functional/snapshots/test_snapshot_timestamps.py b/tests/functional/snapshots/test_snapshot_timestamps.py new file mode 100644 index 00000000000..a0faa6d06a1 --- /dev/null +++ b/tests/functional/snapshots/test_snapshot_timestamps.py @@ -0,0 +1,72 @@ +import pytest + +from dbt.tests.util import run_dbt, run_dbt_and_capture + +create_source_sql = """ +create table {database}.{schema}.source_users ( + id INTEGER, + first_name VARCHAR(50), + last_name VARCHAR(50), + email VARCHAR(50), + gender VARCHAR(50), + ip_address VARCHAR(20), + updated_time TIMESTAMP WITH TIME ZONE +); +insert into {database}.{schema}.source_users (id, first_name, last_name, email, gender, ip_address, updated_time) values +(1, 'Judith', 'Kennedy', '(not provided)', 'Female', '54.60.24.128', '2015-12-24 12:19:28'), +(2, 'Arthur', 'Kelly', '(not provided)', 'Male', '62.56.24.215', '2015-10-28 16:22:15'), +(3, 'Rachel', 'Moreno', 'rmoreno2@msu.edu', 'Female', '31.222.249.23', '2016-04-05 02:05:30'); +""" + +model_users_sql = """ +select * from {{ source('test_source', 'source_users') }} +""" + +snapshot_sql = """ +{% snapshot users_snapshot %} + +select * from {{ ref('users') }} + +{% endsnapshot %} +""" + +source_schema_yml = """ +sources: + - name: test_source + loader: custom + schema: "{{ target.schema }}" + tables: + - name: source_users + loaded_at_field: updated_time +""" + +snapshot_schema_yml = """ +snapshots: + - name: users_snapshot + config: + target_schema: "{{ target.schema }}" + strategy: timestamp + unique_key: id + updated_at: updated_time +""" + + +class TestSnapshotConfig: + @pytest.fixture(scope="class") + def models(self): + return { + "users.sql": model_users_sql, + "source_schema.yml": source_schema_yml, + "snapshot_schema.yml": snapshot_schema_yml, + } + + @pytest.fixture(scope="class") + def snapshots(self): + return {"snapshot.sql": snapshot_sql} + + def test_timestamp_snapshot(self, project): + project.run_sql(create_source_sql) + run_dbt(["run"]) + results, log_output = run_dbt_and_capture(["snapshot"]) + assert len(results) == 1 + assert "Please update snapshot config" in log_output diff --git a/tests/functional/source_overrides/fixtures.py b/tests/functional/source_overrides/fixtures.py index 6d4b17960d7..ca6d4a03e66 100644 --- a/tests/functional/source_overrides/fixtures.py +++ b/tests/functional/source_overrides/fixtures.py @@ -1,6 +1,5 @@ import pytest - dupe_models__schema2_yml = """ version: 2 sources: @@ -12,20 +11,23 @@ error_after: {count: 3, period: day} tables: - name: my_table + freshness: null identifier: my_real_seed # on the override, the "color" column is only unique, it can be null! columns: - name: id - tests: + data_tests: - not_null - unique - name: color - tests: + data_tests: - unique - name: my_other_table + freshness: null identifier: my_real_other_seed - name: snapshot_freshness identifier: snapshot_freshness_base + freshness: error_after: {count: 1, period: day} @@ -42,20 +44,23 @@ error_after: {count: 3, period: day} tables: - name: my_table + freshness: null identifier: my_real_seed # on the override, the "color" column is only unique, it can be null! columns: - name: id - tests: + data_tests: - not_null - unique - name: color - tests: + data_tests: - unique - name: my_other_table + freshness: null identifier: my_real_other_seed - name: snapshot_freshness identifier: snapshot_freshness_base + loaded_at_field: updated_at freshness: error_after: {count: 1, period: day} @@ -86,25 +91,26 @@ error_after: {count: 3, period: hour} tables: - name: my_table + freshness: null identifier: my_seed columns: - name: id - tests: + data_tests: - unique - not_null - name: color - tests: + data_tests: - unique - not_null - name: my_other_table identifier: my_other_seed columns: - name: id - tests: + data_tests: - unique - not_null - name: letter - tests: + data_tests: - unique - not_null - name: snapshot_freshness @@ -316,20 +322,23 @@ error_after: {count: 3, period: day} tables: - name: my_table + freshness: null identifier: my_real_seed # on the override, the "color" column is only unique, it can be null! columns: - name: id - tests: + data_tests: - not_null - unique - name: color - tests: + data_tests: - unique - name: my_other_table + freshness: null identifier: my_real_other_seed - name: snapshot_freshness identifier: snapshot_freshness_base + loaded_at_field: updated_at freshness: error_after: {count: 1, period: day} diff --git a/tests/functional/source_overrides/test_simple_source_override.py b/tests/functional/source_overrides/test_simple_source_override.py index da1b4856e32..5505f8ca9ba 100644 --- a/tests/functional/source_overrides/test_simple_source_override.py +++ b/tests/functional/source_overrides/test_simple_source_override.py @@ -1,8 +1,9 @@ from datetime import datetime, timedelta + import pytest -from dbt.tests.util import run_dbt, update_config_file, check_relations_equal from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import check_relations_equal, run_dbt, update_config_file from tests.functional.source_overrides.fixtures import ( # noqa: F401 local_dependency, models__schema_yml, diff --git a/tests/functional/source_overrides/test_source_overrides_duplicate_model.py b/tests/functional/source_overrides/test_source_overrides_duplicate_model.py index e3cdebe4794..b07b04a7f63 100644 --- a/tests/functional/source_overrides/test_source_overrides_duplicate_model.py +++ b/tests/functional/source_overrides/test_source_overrides_duplicate_model.py @@ -1,12 +1,13 @@ import os -from dbt.exceptions import CompilationError + import pytest -from dbt.tests.util import run_dbt +from dbt.exceptions import CompilationError from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import run_dbt from tests.functional.source_overrides.fixtures import ( # noqa: F401 - dupe_models__schema2_yml, dupe_models__schema1_yml, + dupe_models__schema2_yml, local_dependency, ) diff --git a/tests/functional/sources/common_source_setup.py b/tests/functional/sources/common_source_setup.py index ed68dedf5d4..8608754855d 100644 --- a/tests/functional/sources/common_source_setup.py +++ b/tests/functional/sources/common_source_setup.py @@ -1,19 +1,20 @@ import os + import pytest import yaml -from dbt.tests.util import run_dbt +from dbt.tests.util import run_dbt, run_dbt_and_capture from tests.functional.sources.fixtures import ( - models_schema_yml, - models_view_model_sql, - models_ephemeral_model_sql, models_descendant_model_sql, + models_ephemeral_model_sql, models_multi_source_model_sql, models_nonsource_descendant_sql, - seeds_source_csv, - seeds_other_table_csv, + models_schema_yml, + models_view_model_sql, seeds_expected_multi_source_csv, seeds_other_source_table_csv, + seeds_other_table_csv, + seeds_source_csv, ) @@ -57,10 +58,17 @@ def project_config_update(self): }, } - def run_dbt_with_vars(self, project, cmd, *args, **kwargs): + def _extend_cmd_with_vars(self, project, cmd): vars_dict = { "test_run_schema": project.test_schema, "test_loaded_at": project.adapter.quote("updated_at"), } cmd.extend(["--vars", yaml.safe_dump(vars_dict)]) + + def run_dbt_with_vars(self, project, cmd, *args, **kwargs): + self._extend_cmd_with_vars(project, cmd) return run_dbt(cmd, *args, **kwargs) + + def run_dbt_and_capture_with_vars(self, project, cmd, *args, **kwargs): + self._extend_cmd_with_vars(project, cmd) + return run_dbt_and_capture(cmd, *args, **kwargs) diff --git a/tests/functional/sources/fixtures.py b/tests/functional/sources/fixtures.py index 66aa058ff2e..b40b1869541 100644 --- a/tests/functional/sources/fixtures.py +++ b/tests/functional/sources/fixtures.py @@ -64,12 +64,12 @@ - name: descendant_model columns: - name: favorite_color - tests: + data_tests: - relationships: to: source('test_source', 'test_table') field: favorite_color - name: id - tests: + data_tests: - unique - not_null @@ -97,21 +97,21 @@ description: The favorite color - name: id description: The user ID - tests: + data_tests: - unique - not_null tags: - id_column - name: first_name description: The first name of the user - tests: [] + data_tests: [] - name: email description: The email address of the user - name: ip_address description: The last IP address the user logged in from - name: updated_at description: The last update time for this user - tests: + data_tests: - relationships: # do this as a table-level test, just to test out that aspect column_name: favorite_color @@ -119,9 +119,10 @@ field: favorite_color - name: other_test_table identifier: other_table + freshness: null columns: - name: id - tests: + data_tests: - not_null - unique tags: @@ -174,7 +175,7 @@ tables: - name: test_table identifier: source - tests: + data_tests: - relationships: # this is invalid (list of 3 1-key dicts instead of a single 3-key dict) - column_name: favorite_color @@ -351,7 +352,7 @@ identifier: source columns: - name: favorite_color - tests: + data_tests: - relationships: to: ref('model') # this will get rendered as its literal @@ -455,3 +456,19 @@ {{ return(load_result('collect_freshness').table) }} {% endmacro %} """ + + +freshness_via_metadata_schema_yml = """version: 2 +sources: + - name: test_source + loader: custom + freshness: + warn_after: {count: 10, period: hour} + error_after: {count: 1, period: day} + schema: my_schema + quoting: + identifier: True + tables: + - name: test_table + identifier: source +""" diff --git a/tests/functional/sources/test_name_chars.py b/tests/functional/sources/test_name_chars.py new file mode 100644 index 00000000000..9e58687cf53 --- /dev/null +++ b/tests/functional/sources/test_name_chars.py @@ -0,0 +1,31 @@ +from dbt.tests.util import get_manifest, run_dbt, write_file +from tests.fixtures.jaffle_shop import JaffleShopProject + +# Note: in an actual file (as opposed to a string that we write into a files) +# there would only be a single backslash. +sources_yml = """ +sources: + - name: something_else + database: raw + schema: jaffle_shop + tables: + - name: "\\"/test/orders\\"" + - name: customers +""" + + +class TestNameChars(JaffleShopProject): + def test_quotes_in_table_names(self, project): + # Write out a sources definition that includes a table name with quotes and a forward slash + # Note: forward slashes are not legal in filenames in Linux (or Windows), + # so we won't see forward slashes in model names, because they come from file names. + write_file(sources_yml, project.project_root, "models", "sources.yml") + manifest = run_dbt(["parse"]) + assert len(manifest.sources) == 2 + assert 'source.jaffle_shop.something_else."/test/orders"' in manifest.sources.keys() + # We've written out the manifest.json artifact, we want to ensure + # that it can be read in again (the json is valid). + # Note: the key in the json actually looks like: "source.jaffle_shop.something_else.\"/test/orders\"" + new_manifest = get_manifest(project.project_root) + assert new_manifest + assert 'source.jaffle_shop.something_else."/test/orders"' in new_manifest.sources.keys() diff --git a/tests/functional/sources/test_simple_source.py b/tests/functional/sources/test_simple_source.py index cd08647f367..3d84dd8a167 100644 --- a/tests/functional/sources/test_simple_source.py +++ b/tests/functional/sources/test_simple_source.py @@ -1,23 +1,22 @@ import os + import pytest import yaml -from dbt.exceptions import ParsingError +from dbt.exceptions import ParsingError from dbt.tests.util import ( - run_dbt, - update_config_file, check_relations_equal, check_table_does_not_exist, + run_dbt, + update_config_file, ) -from tests.functional.sources.common_source_setup import ( - BaseSourcesTest, -) +from tests.functional.sources.common_source_setup import BaseSourcesTest from tests.functional.sources.fixtures import ( macros_macro_sql, - malformed_models_schema_yml, malformed_models_descendant_model_sql, - malformed_schema_tests_schema_yml, + malformed_models_schema_yml, malformed_schema_tests_model_sql, + malformed_schema_tests_schema_yml, ) diff --git a/tests/functional/sources/test_source_configs.py b/tests/functional/sources/test_source_configs.py index 27c4c3d8b37..1ceca5d0522 100644 --- a/tests/functional/sources/test_source_configs.py +++ b/tests/functional/sources/test_source_configs.py @@ -1,16 +1,15 @@ import pytest -from hologram import ValidationError -from dbt.contracts.graph.model_config import SourceConfig - -from dbt.tests.util import run_dbt, update_config_file, get_manifest +from dbt.artifacts.resources import SourceConfig +from dbt.tests.util import get_manifest, run_dbt, update_config_file +from dbt_common.dataclass_schema import ValidationError from tests.functional.sources.fixtures import ( - basic_source_schema_yml, - disabled_source_level_schema_yml, - disabled_source_table_schema_yml, all_configs_everywhere_schema_yml, all_configs_not_table_schema_yml, all_configs_project_source_schema_yml, + basic_source_schema_yml, + disabled_source_level_schema_yml, + disabled_source_table_schema_yml, invalid_config_source_schema_yml, ) diff --git a/tests/functional/sources/test_source_fresher_state.py b/tests/functional/sources/test_source_fresher_state.py index 460a44e1a92..e1756b220e0 100644 --- a/tests/functional/sources/test_source_fresher_state.py +++ b/tests/functional/sources/test_source_fresher_state.py @@ -1,20 +1,19 @@ -import os import json +import os import shutil -import pytest from datetime import datetime, timedelta -from dbt.exceptions import DbtInternalError - +import pytest -from dbt.tests.util import AnyStringWith, AnyFloat import dbt.version +from dbt.contracts.results import FreshnessExecutionResultArtifact +from dbt.tests.util import AnyFloat, AnyStringWith +from dbt_common.exceptions import DbtInternalError from tests.functional.sources.common_source_setup import BaseSourcesTest - from tests.functional.sources.fixtures import ( error_models_schema_yml, - models_newly_added_model_sql, models_newly_added_error_model_sql, + models_newly_added_model_sql, ) @@ -81,6 +80,10 @@ def _assert_freshness_results(self, path, state): with open(path) as fp: data = json.load(fp) + try: + FreshnessExecutionResultArtifact.validate(data) + except Exception: + raise pytest.fail("FreshnessExecutionResultArtifact did not validate") assert set(data) == {"metadata", "results", "elapsed_time"} assert "generated_at" in data["metadata"] assert isinstance(data["elapsed_time"], float) @@ -621,7 +624,7 @@ def test_intentional_failure_no_previous_state(self, project): with pytest.raises(DbtInternalError) as excinfo: self.run_dbt_with_vars( project, - ["run", "-s", "source_status:fresher", "--defer", "--state", "previous_state"], + ["run", "-s", "source_status:fresher", "--state", "previous_state"], ) assert "No previous state comparison freshness results in sources.json" in str( excinfo.value diff --git a/tests/functional/sources/test_source_freshness.py b/tests/functional/sources/test_source_freshness.py index 17eca2d3522..2f42a3aaa56 100644 --- a/tests/functional/sources/test_source_freshness.py +++ b/tests/functional/sources/test_source_freshness.py @@ -1,19 +1,25 @@ -import os import json -import pytest +import os from datetime import datetime, timedelta +import pytest +import yaml + import dbt.version +from dbt import deprecations +from dbt.artifacts.schemas.freshness import FreshnessResult +from dbt.artifacts.schemas.results import FreshnessStatus +from dbt.cli.main import dbtRunner +from dbt.tests.util import AnyFloat, AnyStringWith from tests.functional.sources.common_source_setup import BaseSourcesTest from tests.functional.sources.fixtures import ( - error_models_schema_yml, + collect_freshness_macro_override_previous_return_signature, error_models_model_sql, + error_models_schema_yml, filtered_models_schema_yml, + freshness_via_metadata_schema_yml, override_freshness_models_schema_yml, - collect_freshness_macro_override_previous_return_signature, ) -from dbt.tests.util import AnyStringWith, AnyFloat -from dbt import deprecations class SuccessfulSourceFreshnessTest(BaseSourcesTest): @@ -122,6 +128,14 @@ def _assert_freshness_results(self, path, state): } ] + def _assert_project_hooks_called(self, logs: str): + assert "Running 1 on-run-start hook" in logs + assert "Running 1 on-run-end hook" in logs + + def _assert_project_hooks_not_called(self, logs: str): + assert "Running 1 on-run-start hook" not in logs + assert "Running 1 on-run-end hook" not in logs + class TestSourceFreshness(SuccessfulSourceFreshnessTest): def test_source_freshness(self, project): @@ -366,12 +380,201 @@ def macros(self): def test_source_freshness(self, project): # ensure that the deprecation warning is raised + vars_dict = { + "test_run_schema": project.test_schema, + "test_loaded_at": project.adapter.quote("updated_at"), + } + events = [] + dbtRunner(callbacks=[events.append]).invoke( + ["source", "freshness", "--vars", yaml.safe_dump(vars_dict)] + ) + matches = list([e for e in events if e.info.name == "CollectFreshnessReturnSignature"]) + assert matches + + +class TestMetadataFreshnessFails: + @pytest.fixture(scope="class") + def models(self): + return {"schema.yml": freshness_via_metadata_schema_yml} + + def test_metadata_freshness_unsupported_parse_warning(self, project): + """Since the default test adapter (postgres) does not support metadata + based source freshness checks, trying to use that mechanism should + result in a parse-time warning.""" + got_warning = False + + def warning_probe(e): + nonlocal got_warning + if e.info.name == "FreshnessConfigProblem" and e.info.level == "warn": + got_warning = True + + runner = dbtRunner(callbacks=[warning_probe]) + runner.invoke(["parse"]) + + assert got_warning + + def test_metadata_freshness_unsupported_error_when_run(self, project): + + runner = dbtRunner() + result = runner.invoke(["source", "freshness"]) + assert isinstance(result.result, FreshnessResult) + assert len(result.result.results) == 1 + freshness_result = result.result.results[0] + assert freshness_result.status == FreshnessStatus.RuntimeErr + assert "Could not compute freshness for source test_table" in freshness_result.message + + +class TestSourceFreshnessProjectHooksNotRun(SuccessfulSourceFreshnessTest): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "on-run-start": ["{{ log('on-run-start hooks called') }}"], + "on-run-end": ["{{ log('on-run-end hooks called') }}"], + "flags": { + "source_freshness_run_project_hooks": False, + }, + } + + @pytest.fixture(scope="class") + def global_deprecations(self): + deprecations.reset_deprecations() + yield deprecations.reset_deprecations() + + def test_hooks_do_run_for_source_freshness( + self, + project, + global_deprecations, + ): assert deprecations.active_deprecations == set() - self.run_dbt_with_vars( + _, log_output = self.run_dbt_and_capture_with_vars( project, - ["source", "freshness"], + [ + "source", + "freshness", + ], expect_pass=False, ) - expected = {"collect-freshness-return-signature"} + assert "on-run-start hooks called" not in log_output + assert "on-run-end hooks called" not in log_output + expected = {"source-freshness-project-hooks"} assert expected == deprecations.active_deprecations + + +class TestHooksInSourceFreshness(SuccessfulSourceFreshnessTest): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "on-run-start": ["{{ log('on-run-start hooks called') }}"], + "on-run-end": ["{{ log('on-run-end hooks called') }}"], + "flags": { + "source_freshness_run_project_hooks": True, + }, + } + + def test_hooks_do_run_for_source_freshness( + self, + project, + ): + _, log_output = self.run_dbt_and_capture_with_vars( + project, + [ + "source", + "freshness", + ], + expect_pass=False, + ) + + self._assert_project_hooks_called(log_output) + + +class TestHooksInSourceFreshnessError: + @pytest.fixture(scope="class") + def models(self): + return { + "schema.yml": error_models_schema_yml, + "model.sql": error_models_model_sql, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "on-run-start": ["select fake_column from table_does_not_exist"], + "flags": { + "source_freshness_run_project_hooks": True, + }, + } + + def test_hooks_do_not_run_for_source_freshness( + self, + project, + ): + run_result_error = None + + def run_result_error_probe(e): + nonlocal run_result_error + if ( + e.info.name == "RunResultError" + and e.info.level == "error" + and "on-run-start" in e.info.msg + ): + run_result_error = e.info.msg + + runner = dbtRunner(callbacks=[run_result_error_probe]) + runner.invoke(["source", "freshness"]) + assert 'relation "table_does_not_exist" does not exist' in run_result_error + + +class TestHooksInSourceFreshnessDisabled(SuccessfulSourceFreshnessTest): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "on-run-start": ["{{ log('on-run-start hooks called') }}"], + "on-run-end": ["{{ log('on-run-end hooks called') }}"], + "flags": { + "source_freshness_run_project_hooks": False, + }, + } + + def test_hooks_do_not_run_for_source_freshness( + self, + project, + ): + _, log_output = self.run_dbt_and_capture_with_vars( + project, + [ + "source", + "freshness", + ], + expect_pass=False, + ) + self._assert_project_hooks_not_called(log_output) + + +class TestHooksInSourceFreshnessDefault(SuccessfulSourceFreshnessTest): + @pytest.fixture(scope="class") + def project_config_update(self): + return { + "config-version": 2, + "on-run-start": ["{{ log('on-run-start hooks called') }}"], + "on-run-end": ["{{ log('on-run-end hooks called') }}"], + } + + def test_hooks_do_not_run_for_source_freshness( + self, + project, + ): + _, log_output = self.run_dbt_and_capture_with_vars( + project, + [ + "source", + "freshness", + ], + expect_pass=False, + ) + # default behaviour - no hooks run in source freshness + self._assert_project_hooks_not_called(log_output) diff --git a/tests/functional/sources/test_source_loaded_at_field.py b/tests/functional/sources/test_source_loaded_at_field.py new file mode 100644 index 00000000000..b89b8ddd372 --- /dev/null +++ b/tests/functional/sources/test_source_loaded_at_field.py @@ -0,0 +1,136 @@ +import pytest + +from dbt.exceptions import YamlParseDictError +from dbt.tests.util import get_manifest, run_dbt, write_file + +loaded_at_field_null_schema_yml = """ +sources: + - name: test_source + freshness: + warn_after: + count: 1 + period: day + error_after: + count: 4 + period: day + loaded_at_field: updated_at + tables: + - name: table1 + loaded_at_field: null +""" + +loaded_at_field_blank_schema_yml = """ +sources: + - name: test_source + freshness: + warn_after: + count: 1 + period: day + error_after: + count: 4 + period: day + loaded_at_field: updated_at + tables: + - name: table1 + loaded_at_field: null +""" + +loaded_at_field_missing_schema_yml = """ +sources: + - name: test_source + freshness: + warn_after: + count: 1 + period: day + error_after: + count: 4 + period: day + loaded_at_field: updated_at + tables: + - name: table1 +""" + +loaded_at_field_defined_schema_yml = """ +sources: + - name: test_source + freshness: + warn_after: + count: 1 + period: day + error_after: + count: 4 + period: day + loaded_at_field: updated_at + tables: + - name: table1 + loaded_at_field: updated_at_another_place +""" + +loaded_at_field_empty_string_schema_yml = """ +sources: + - name: test_source + freshness: + warn_after: + count: 1 + period: day + error_after: + count: 4 + period: day + loaded_at_field: updated_at + tables: + - name: table1 + loaded_at_field: "" +""" + + +class TestParsingLoadedAtField: + @pytest.fixture(scope="class") + def models(self): + return {"schema.yml": loaded_at_field_null_schema_yml} + + def test_loaded_at_field(self, project): + # test setting loaded_at_field to null explicitly at table level + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + + assert "source.test.test_source.table1" in manifest.sources + assert manifest.sources.get("source.test.test_source.table1").loaded_at_field is None + + # test setting loaded_at_field at source level, do not set at table level + # end up with source level loaded_at_field + write_file( + loaded_at_field_missing_schema_yml, project.project_root, "models", "schema.yml" + ) + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "source.test.test_source.table1" in manifest.sources + assert ( + manifest.sources.get("source.test.test_source.table1").loaded_at_field == "updated_at" + ) + + # test setting loaded_at_field to nothing, should override Source value for None + write_file(loaded_at_field_blank_schema_yml, project.project_root, "models", "schema.yml") + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + + assert "source.test.test_source.table1" in manifest.sources + assert manifest.sources.get("source.test.test_source.table1").loaded_at_field is None + + # test setting loaded_at_field at table level to a value - it should override source level + write_file( + loaded_at_field_defined_schema_yml, project.project_root, "models", "schema.yml" + ) + run_dbt(["parse"]) + manifest = get_manifest(project.project_root) + assert "source.test.test_source.table1" in manifest.sources + assert ( + manifest.sources.get("source.test.test_source.table1").loaded_at_field + == "updated_at_another_place" + ) + + # test setting loaded_at_field at table level to an empty string - should error + write_file( + loaded_at_field_empty_string_schema_yml, project.project_root, "models", "schema.yml" + ) + with pytest.raises(YamlParseDictError): + run_dbt(["parse"]) diff --git a/tests/functional/statements/test_statements.py b/tests/functional/statements/test_statements.py index 87933af20fa..9e3d5005759 100644 --- a/tests/functional/statements/test_statements.py +++ b/tests/functional/statements/test_statements.py @@ -1,7 +1,8 @@ import pathlib + import pytest -from dbt.tests.util import run_dbt, check_relations_equal, write_file +from dbt.tests.util import check_relations_equal, run_dbt, write_file from tests.functional.statements.fixtures import ( models__statement_actual, models__statement_duplicated_load, diff --git a/tests/functional/test_empty.py b/tests/functional/test_empty.py new file mode 100644 index 00000000000..d284fdcc3e5 --- /dev/null +++ b/tests/functional/test_empty.py @@ -0,0 +1,82 @@ +import pytest + +from dbt.tests.util import relation_from_name, run_dbt + +model_input_sql = """ +select 1 as id +""" + +ephemeral_model_input_sql = """ +{{ config(materialized='ephemeral') }} +select 2 as id +""" + +raw_source_csv = """id +3 +""" + + +model_sql = """ +select * +from {{ ref('model_input') }} +union all +select * +from {{ ref('ephemeral_model_input') }} +union all +select * +from {{ source('seed_sources', 'raw_source') }} +""" + + +schema_sources_yml = """ +sources: + - name: seed_sources + schema: "{{ target.schema }}" + tables: + - name: raw_source +""" + + +class TestEmptyFlag: + @pytest.fixture(scope="class") + def seeds(self): + return { + "raw_source.csv": raw_source_csv, + } + + @pytest.fixture(scope="class") + def models(self): + return { + "model_input.sql": model_input_sql, + "ephemeral_model_input.sql": ephemeral_model_input_sql, + "model.sql": model_sql, + "sources.yml": schema_sources_yml, + } + + def assert_row_count(self, project, relation_name: str, expected_row_count: int): + relation = relation_from_name(project.adapter, relation_name) + result = project.run_sql(f"select count(*) as num_rows from {relation}", fetch="one") + assert result[0] == expected_row_count + + def test_run_with_empty(self, project): + # create source from seed + run_dbt(["seed"]) + + # run without empty - 3 expected rows in output - 1 from each input + run_dbt(["run"]) + self.assert_row_count(project, "model", 3) + + # run with empty - 0 expected rows in output + run_dbt(["run", "--empty"]) + self.assert_row_count(project, "model", 0) + + # build without empty - 3 expected rows in output - 1 from each input + run_dbt(["build"]) + self.assert_row_count(project, "model", 3) + + # build with empty - 0 expected rows in output + run_dbt(["build", "--empty"]) + self.assert_row_count(project, "model", 0) + + # ensure dbt compile supports --empty flag + run_dbt(["compile", "--empty"]) diff --git a/tests/functional/test_selection/fixtures.py b/tests/functional/test_selection/fixtures.py index 48c3f40c62d..84f4c532e5f 100644 --- a/tests/functional/test_selection/fixtures.py +++ b/tests/functional/test_selection/fixtures.py @@ -1,6 +1,4 @@ import pytest -from dbt.tests.fixtures.project import write_project_files - tests__cf_a_b_sql = """ select * from {{ ref('model_a') }} @@ -32,7 +30,7 @@ identifier: model_b columns: - name: fun - tests: + data_tests: - unique models: @@ -40,7 +38,7 @@ columns: - name: fun tags: [column_level_tag] - tests: + data_tests: - unique - relationships: to: ref('model_b') @@ -84,13 +82,3 @@ def models(): "model_b.sql": models__model_b_sql, "model_a.sql": models__model_a_sql, } - - -@pytest.fixture(scope="class") -def project_files( - project_root, - tests, - models, -): - write_project_files(project_root, "tests", tests) - write_project_files(project_root, "models", models) diff --git a/tests/functional/test_selection/test_selection_expansion.py b/tests/functional/test_selection/test_selection_expansion.py index 3e6f1f82ae3..d163588303c 100644 --- a/tests/functional/test_selection/test_selection_expansion.py +++ b/tests/functional/test_selection/test_selection_expansion.py @@ -1,11 +1,7 @@ import pytest from dbt.tests.util import run_dbt -from tests.functional.test_selection.fixtures import ( # noqa: F401 - tests, - models, - project_files, -) +from tests.functional.test_selection.fixtures import models, tests # noqa: F401 class TestSelectionExpansion: diff --git a/tests/functional/test_singular_tests.py b/tests/functional/test_singular_tests.py new file mode 100644 index 00000000000..a4b9d05b510 --- /dev/null +++ b/tests/functional/test_singular_tests.py @@ -0,0 +1,34 @@ +import pytest + +from dbt.tests.util import run_dbt + +single_test_sql = """ +{{ config(warn_if = '>0', error_if ="> 10") }} + +select 1 as issue +""" + + +class TestSingularTestWarnError: + @pytest.fixture(scope="class") + def tests(self): + return {"single_test.sql": single_test_sql} + + def test_singular_test_warn_error(self, project): + results = run_dbt(["--warn-error", "test"], expect_pass=False) + assert results.results[0].status == "fail" + + def test_singular_test_warn_error_options(self, project): + results = run_dbt( + ["--warn-error-options", "{'include': 'all'}", "test"], expect_pass=False + ) + assert results.results[0].status == "fail" + + def test_singular_test_equals_warn_error(self, project): + results = run_dbt(["--warn-error", "test"], expect_pass=False) + warn_error_result = results.results[0].status + + results = run_dbt( + ["--warn-error-options", "{'include': 'all'}", "test"], expect_pass=False + ) + assert warn_error_result == results.results[0].status diff --git a/tests/functional/threading/test_thread_count.py b/tests/functional/threading/test_thread_count.py index c31f5ed6312..9c94356e630 100644 --- a/tests/functional/threading/test_thread_count.py +++ b/tests/functional/threading/test_thread_count.py @@ -1,6 +1,6 @@ import pytest -from dbt.tests.util import run_dbt +from dbt.tests.util import run_dbt models__do_nothing__sql = """ with x as (select pg_sleep(1)) select 1 diff --git a/tests/functional/time_spines/fixtures.py b/tests/functional/time_spines/fixtures.py new file mode 100644 index 00000000000..0f67488ff11 --- /dev/null +++ b/tests/functional/time_spines/fixtures.py @@ -0,0 +1,105 @@ +models_people_sql = """ +select 1 as id, 'Drew' as first_name, 'Banin' as last_name, 'yellow' as favorite_color, true as loves_dbt, 5 as tenure, current_timestamp as created_at +union all +select 2 as id, 'Jeremy' as first_name, 'Cohen' as last_name, 'indigo' as favorite_color, true as loves_dbt, 4 as tenure, current_timestamp as created_at +union all +select 3 as id, 'Callum' as first_name, 'McCann' as last_name, 'emerald' as favorite_color, true as loves_dbt, 0 as tenure, current_timestamp as created_at +""" + +semantic_model_people_yml = """ +version: 2 + +semantic_models: + - name: semantic_people + model: ref('people') + dimensions: + - name: favorite_color + type: categorical + - name: created_at + type: TIME + type_params: + time_granularity: day + measures: + - name: years_tenure + agg: SUM + expr: tenure + - name: people + agg: count + expr: id + entities: + - name: id + type: primary + defaults: + agg_time_dimension: created_at +""" + +metricflow_time_spine_sql = """ +SELECT to_date('02/20/2023, 'mm/dd/yyyy') as date_day +""" + +metricflow_time_spine_second_sql = """ +SELECT to_datetime('02/20/2023, 'mm/dd/yyyy hh:mm:ss') as ts_second +""" + +valid_time_spines_yml = """ +version: 2 + +models: + - name: metricflow_time_spine_second + time_spine: + standard_granularity_column: ts_second + columns: + - name: ts_second + granularity: second + - name: metricflow_time_spine + time_spine: + standard_granularity_column: date_day + custom_granularities: + - name: retail_month + - name: martian_year + column_name: martian__year_xyz + columns: + - name: date_day + granularity: day + - name: retail_month + - name: martian__year_xyz + +""" + +missing_time_spine_yml = """ +models: + - name: metricflow_time_spine + columns: + - name: ts_second + granularity: second +""" + +time_spine_missing_granularity_yml = """ +models: + - name: metricflow_time_spine_second + time_spine: + standard_granularity_column: ts_second + columns: + - name: ts_second +""" + +time_spine_missing_standard_column_yml = """ +models: + - name: metricflow_time_spine_second + time_spine: + standard_granularity_column: ts_second + columns: + - name: date_day +""" + +time_spine_missing_custom_column_yml = """ +models: + - name: metricflow_time_spine_second + time_spine: + standard_granularity_column: date_day + custom_granularities: + - name: retail_month + columns: + - name: date_day + granularity: day +""" diff --git a/tests/functional/time_spines/test_time_spines.py b/tests/functional/time_spines/test_time_spines.py new file mode 100644 index 00000000000..03063d347be --- /dev/null +++ b/tests/functional/time_spines/test_time_spines.py @@ -0,0 +1,233 @@ +from typing import Set + +import pytest + +from dbt.cli.main import dbtRunner +from dbt.contracts.graph.manifest import Manifest +from dbt.contracts.graph.semantic_manifest import SemanticManifest +from dbt.exceptions import ParsingError +from dbt.tests.util import get_manifest +from dbt_semantic_interfaces.type_enums import TimeGranularity +from tests.functional.time_spines.fixtures import ( + metricflow_time_spine_second_sql, + metricflow_time_spine_sql, + models_people_sql, + semantic_model_people_yml, + time_spine_missing_custom_column_yml, + time_spine_missing_granularity_yml, + time_spine_missing_standard_column_yml, + valid_time_spines_yml, +) + + +class TestValidTimeSpines: + """Tests that YAML using current time spine configs parses as expected.""" + + @pytest.fixture(scope="class") + def models(self): + return { + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "metricflow_time_spine_second.sql": metricflow_time_spine_second_sql, + "time_spines.yml": valid_time_spines_yml, + "semantic_model_people.yml": semantic_model_people_yml, + "people.sql": models_people_sql, + } + + def test_time_spines(self, project): + runner = dbtRunner() + result = runner.invoke(["parse"]) + assert result.success + assert isinstance(result.result, Manifest) + + manifest = get_manifest(project.project_root) + assert manifest + + # Test that models and columns are set as expected + time_spine_models = { + id.split(".")[-1]: node for id, node in manifest.nodes.items() if node.time_spine + } + day_model_name = "metricflow_time_spine" + second_model_name = "metricflow_time_spine_second" + day_column_name = "date_day" + second_column_name = "ts_second" + model_names_to_col_names = { + day_model_name: day_column_name, + second_model_name: second_column_name, + } + model_names_to_granularities = { + day_model_name: TimeGranularity.DAY, + second_model_name: TimeGranularity.SECOND, + } + assert len(time_spine_models) == 2 + expected_time_spine_aliases = {second_model_name, day_model_name} + assert set(time_spine_models.keys()) == expected_time_spine_aliases + for model in time_spine_models.values(): + assert ( + model.time_spine.standard_granularity_column + == model_names_to_col_names[model.name] + ) + if model.name == day_model_name: + assert len(model.time_spine.custom_granularities) == 2 + assert { + custom_granularity.name + for custom_granularity in model.time_spine.custom_granularities + } == {"retail_month", "martian_year"} + for custom_granularity in model.time_spine.custom_granularities: + if custom_granularity.name == "martian_year": + assert custom_granularity.column_name == "martian__year_xyz" + else: + assert len(model.time_spine.custom_granularities) == 0 + assert len(model.columns) > 0 + assert ( + list(model.columns.values())[0].granularity + == model_names_to_granularities[model.name] + ) + + # Test that project configs are set as expected in semantic manifest + semantic_manifest = SemanticManifest(manifest) + assert semantic_manifest.validate() + project_config = semantic_manifest._get_pydantic_semantic_manifest().project_configuration + # Legacy config + assert len(project_config.time_spine_table_configurations) == 1 + legacy_time_spine_config = project_config.time_spine_table_configurations[0] + assert legacy_time_spine_config.column_name == day_column_name + assert legacy_time_spine_config.location.replace('"', "").split(".")[-1] == day_model_name + assert legacy_time_spine_config.grain == TimeGranularity.DAY + # Current configs + assert len(project_config.time_spines) == 2 + sl_time_spine_aliases: Set[str] = set() + for sl_time_spine in project_config.time_spines: + alias = sl_time_spine.node_relation.alias + sl_time_spine_aliases.add(alias) + assert sl_time_spine.primary_column.name == model_names_to_col_names[alias] + assert ( + sl_time_spine.primary_column.time_granularity + == model_names_to_granularities[alias] + ) + assert sl_time_spine_aliases == expected_time_spine_aliases + + +class TestValidLegacyTimeSpine: + """Tests that YAML using only legacy time spine config parses as expected.""" + + @pytest.fixture(scope="class") + def models(self): + return { + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "semantic_model_people.yml": semantic_model_people_yml, + "people.sql": models_people_sql, + } + + def test_time_spines(self, project): + runner = dbtRunner() + result = runner.invoke(["parse"]) + assert result.success + assert isinstance(result.result, Manifest) + + manifest = get_manifest(project.project_root) + assert manifest + + # Test that project configs are set as expected in semantic manifest + semantic_manifest = SemanticManifest(manifest) + assert semantic_manifest.validate() + project_config = semantic_manifest._get_pydantic_semantic_manifest().project_configuration + # Legacy config + assert len(project_config.time_spine_table_configurations) == 1 + legacy_time_spine_config = project_config.time_spine_table_configurations[0] + assert legacy_time_spine_config.column_name == "date_day" + assert ( + legacy_time_spine_config.location.replace('"', "").split(".")[-1] + == "metricflow_time_spine" + ) + assert legacy_time_spine_config.grain == TimeGranularity.DAY + # Current configs + assert len(project_config.time_spines) == 0 + + +class TestMissingTimeSpine: + """Tests that YAML with semantic models but no time spines errors.""" + + @pytest.fixture(scope="class") + def models(self): + return { + "semantic_model_people.yml": semantic_model_people_yml, + "people.sql": models_people_sql, + } + + def test_time_spines(self, project): + runner = dbtRunner() + result = runner.invoke(["parse"]) + assert isinstance(result.exception, ParsingError) + assert ( + "The semantic layer requires a time spine model with granularity DAY or smaller" + in result.exception.msg + ) + + +class TestTimeSpineStandardColumnMissing: + """Tests that YAML with time spine standard granularity column not in model errors.""" + + @pytest.fixture(scope="class") + def models(self): + return { + "semantic_model_people.yml": semantic_model_people_yml, + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "metricflow_time_spine_second.sql": metricflow_time_spine_second_sql, + "time_spines.yml": time_spine_missing_standard_column_yml, + } + + def test_time_spines(self, project): + runner = dbtRunner() + result = runner.invoke(["parse"]) + assert isinstance(result.exception, ParsingError) + assert ( + "Time spine standard granularity column must be defined on the model." + in result.exception.msg + ) + + +class TestTimeSpineCustomColumnMissing: + """Tests that YAML with time spine custom granularity column not in model errors.""" + + @pytest.fixture(scope="class") + def models(self): + return { + "semantic_model_people.yml": semantic_model_people_yml, + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "metricflow_time_spine_second.sql": metricflow_time_spine_second_sql, + "time_spines.yml": time_spine_missing_custom_column_yml, + } + + def test_time_spines(self, project): + runner = dbtRunner() + result = runner.invoke(["parse"]) + assert isinstance(result.exception, ParsingError) + assert ( + "Time spine custom granularity columns do not exist in the model." + in result.exception.msg + ) + + +class TestTimeSpineGranularityMissing: + """Tests that YAML with time spine column without granularity errors.""" + + @pytest.fixture(scope="class") + def models(self): + return { + "semantic_model_people.yml": semantic_model_people_yml, + "people.sql": models_people_sql, + "metricflow_time_spine.sql": metricflow_time_spine_sql, + "metricflow_time_spine_second.sql": metricflow_time_spine_second_sql, + "time_spines.yml": time_spine_missing_granularity_yml, + } + + def test_time_spines(self, project): + runner = dbtRunner() + result = runner.invoke(["parse"]) + assert isinstance(result.exception, ParsingError) + assert ( + "Time spine standard granularity column must have a granularity defined." + in result.exception.msg + ) diff --git a/tests/functional/timezones/test_timezones.py b/tests/functional/timezones/test_timezones.py index a4627fb63c3..1e2de2585f3 100644 --- a/tests/functional/timezones/test_timezones.py +++ b/tests/functional/timezones/test_timezones.py @@ -1,9 +1,9 @@ import os + import pytest from dbt.tests.util import run_dbt - # Canada/Saskatchewan does not observe DST so the time diff won't change depending on when it is in the year model_sql = """ {{ diff --git a/tests/functional/unit_testing/fixtures.py b/tests/functional/unit_testing/fixtures.py new file mode 100644 index 00000000000..83e98677f20 --- /dev/null +++ b/tests/functional/unit_testing/fixtures.py @@ -0,0 +1,1141 @@ +import pytest + +my_model_vars_sql = """ +SELECT +a+b as c, +concat(string_a, string_b) as string_c, +not_testing, date_a, +{{ dbt.string_literal(type_numeric()) }} as macro_call, +{{ dbt.string_literal(var('my_test')) }} as var_call, +{{ dbt.string_literal(env_var('MY_TEST', 'default')) }} as env_var_call, +{{ dbt.string_literal(invocation_id) }} as invocation_id +FROM {{ ref('my_model_a')}} my_model_a +JOIN {{ ref('my_model_b' )}} my_model_b +ON my_model_a.id = my_model_b.id +""" + +my_model_sql = """ +SELECT +a+b as c, +concat(string_a, string_b) as string_c, +not_testing, date_a +FROM {{ ref('my_model_a')}} my_model_a +JOIN {{ ref('my_model_b' )}} my_model_b +ON my_model_a.id = my_model_b.id +""" + +my_model_a_sql = """ +SELECT +1 as a, +1 as id, +2 as not_testing, +'a' as string_a, +DATE '2020-01-02' as date_a +""" + +my_model_b_sql = """ +SELECT +2 as b, +1 as id, +2 as c, +'b' as string_b +""" + +my_model_check_null_sql = """ +SELECT +CASE + WHEN a IS null THEN True + ELSE False +END a_is_null +FROM {{ ref('my_model_a') }} +""" + +test_my_model_a_yml = """ +models: + - name: my_model_a + columns: + - name: a + tests: + - not_null + - name: id + tests: + - not_null +""" + +test_my_model_yml = """ +unit_tests: + - name: test_my_model + model: my_model + given: + - input: ref('my_model_a') + rows: + - {id: 1, a: 1} + - input: ref('my_model_b') + rows: + - {id: 1, b: 2} + - {id: 2, b: 2} + expect: + rows: + - {c: 2} + + - name: test_my_model_empty + model: my_model + given: + - input: ref('my_model_a') + rows: [] + - input: ref('my_model_b') + rows: + - {id: 1, b: 2} + - {id: 2, b: 2} + expect: + rows: [] + + - name: test_my_model_overrides + model: my_model + given: + - input: ref('my_model_a') + rows: + - {id: 1, a: 1} + - input: ref('my_model_b') + rows: + - {id: 1, b: 2} + - {id: 2, b: 2} + overrides: + macros: + type_numeric: override + invocation_id: 123 + vars: + my_test: var_override + env_vars: + MY_TEST: env_var_override + expect: + rows: + - {macro_call: override, var_call: var_override, env_var_call: env_var_override, invocation_id: 123} + + - name: test_my_model_string_concat + model: my_model + given: + - input: ref('my_model_a') + rows: + - {id: 1, string_a: a} + - input: ref('my_model_b') + rows: + - {id: 1, string_b: b} + expect: + rows: + - {string_c: ab} + config: + tags: test_this +""" + +test_my_model_pass_yml = """ +unit_tests: + - name: test_my_model + model: my_model + given: + - input: ref('my_model_a') + rows: + - {id: 1, a: 1} + - input: ref('my_model_b') + rows: + - {id: 1, b: 2} + - {id: 2, b: 2} + expect: + rows: + - {c: 3} +""" + + +test_my_model_simple_fixture_yml = """ +unit_tests: + - name: test_my_model + model: my_model + given: + - input: ref('my_model_a') + rows: + - {id: 1, a: 1} + - input: ref('my_model_b') + rows: + - {id: 1, b: 2} + - {id: 2, b: 2} + expect: + rows: + - {c: 2} + + - name: test_depends_on_fixture + model: my_model + given: + - input: ref('my_model_a') + rows: [] + - input: ref('my_model_b') + format: csv + fixture: test_my_model_fixture + expect: + rows: [] + + - name: test_my_model_overrides + model: my_model + given: + - input: ref('my_model_a') + rows: + - {id: 1, a: 1} + - input: ref('my_model_b') + rows: + - {id: 1, b: 2} + - {id: 2, b: 2} + overrides: + macros: + type_numeric: override + invocation_id: 123 + vars: + my_test: var_override + env_vars: + MY_TEST: env_var_override + expect: + rows: + - {macro_call: override, var_call: var_override, env_var_call: env_var_override, invocation_id: 123} + + - name: test_has_string_c_ab + model: my_model + given: + - input: ref('my_model_a') + rows: + - {id: 1, string_a: a} + - input: ref('my_model_b') + rows: + - {id: 1, string_b: b} + expect: + rows: + - {string_c: ab} + config: + tags: test_this +""" + + +datetime_test = """ + - name: test_my_model_datetime + model: my_model + given: + - input: ref('my_model_a') + rows: + - {id: 1, date_a: "2020-01-01"} + - input: ref('my_model_b') + rows: + - {id: 1} + expect: + rows: + - {date_a: "2020-01-01"} +""" + +event_sql = """ +select DATE '2020-01-01' as event_time, 1 as event +union all +select DATE '2020-01-02' as event_time, 2 as event +union all +select DATE '2020-01-03' as event_time, 3 as event +""" + +datetime_test_invalid_format_key = """ + - name: test_my_model_datetime + model: my_model + given: + - input: ref('my_model_a') + format: xxxx + rows: + - {id: 1, date_a: "2020-01-01"} + - input: ref('my_model_b') + rows: + - {id: 1} + expect: + rows: + - {date_a: "2020-01-01"} +""" + +datetime_test_invalid_csv_values = """ + - name: test_my_model_datetime + model: my_model + given: + - input: ref('my_model_a') + format: csv + rows: + - {id: 1, date_a: "2020-01-01"} + - input: ref('my_model_b') + rows: + - {id: 1} + expect: + rows: + - {date_a: "2020-01-01"} +""" + +datetime_test_invalid_csv_file_values = """ + - name: test_my_model_datetime + model: my_model + given: + - input: ref('my_model_a') + format: csv + rows: + - {id: 1, date_a: "2020-01-01"} + - input: ref('my_model_b') + rows: + - {id: 1} + expect: + rows: + - {date_a: "2020-01-01"} +""" + +event_sql = """ +select DATE '2020-01-01' as event_time, 1 as event +union all +select DATE '2020-01-02' as event_time, 2 as event +union all +select DATE '2020-01-03' as event_time, 3 as event +""" + +my_incremental_model_sql = """ +{{ + config( + materialized='incremental' + ) +}} + +select * from {{ ref('events') }} +{% if is_incremental() %} +where event_time > (select max(event_time) from {{ this }}) +{% endif %} +""" + +test_my_model_incremental_yml_basic = """ +unit_tests: + - name: incremental_false + model: my_incremental_model + overrides: + macros: + is_incremental: false + given: + - input: ref('events') + rows: + - {event_time: "2020-01-01", event: 1} + expect: + rows: + - {event_time: "2020-01-01", event: 1} + - name: incremental_true + model: my_incremental_model + overrides: + macros: + is_incremental: true + given: + - input: ref('events') + rows: + - {event_time: "2020-01-01", event: 1} + - {event_time: "2020-01-02", event: 2} + - {event_time: "2020-01-03", event: 3} + - input: this + rows: + - {event_time: "2020-01-01", event: 1} + expect: + rows: + - {event_time: "2020-01-02", event: 2} + - {event_time: "2020-01-03", event: 3} +""" + +test_my_model_incremental_yml_no_override = """ +unit_tests: + - name: incremental_false + model: my_incremental_model + given: + - input: ref('events') + rows: + - {event_time: "2020-01-01", event: 1} + expect: + rows: + - {event_time: "2020-01-01", event: 1} +""" + +test_my_model_incremental_yml_wrong_override = """ +unit_tests: + - name: incremental_false + model: my_incremental_model + overrides: + macros: + is_incremental: foobar + given: + - input: ref('events') + rows: + - {event_time: "2020-01-01", event: 1} + expect: + rows: + - {event_time: "2020-01-01", event: 1} +""" + +test_my_model_incremental_yml_no_this_input = """ +unit_tests: + - name: incremental_true + model: my_incremental_model + overrides: + macros: + is_incremental: true + given: + - input: ref('events') + rows: + - {event_time: "2020-01-01", event: 1} + - {event_time: "2020-01-02", event: 2} + - {event_time: "2020-01-03", event: 3} + expect: + rows: + - {event_time: "2020-01-02", event: 2} + - {event_time: "2020-01-03", event: 3} +""" + +# -- inline csv tests + +test_my_model_csv_yml = """ +unit_tests: + - name: test_my_model + model: my_model + given: + - input: ref('my_model_a') + format: csv + rows: | + id,a + 1,1 + - input: ref('my_model_b') + format: csv + rows: | + id,b + 1,2 + 2,2 + expect: + format: csv + rows: | + c + 2 + + - name: test_my_model_empty + model: my_model + given: + - input: ref('my_model_a') + rows: [] + - input: ref('my_model_b') + format: csv + rows: | + id,b + 1,2 + 2,2 + expect: + rows: [] + - name: test_my_model_overrides + model: my_model + given: + - input: ref('my_model_a') + format: csv + rows: | + id,a + 1,1 + - input: ref('my_model_b') + format: csv + rows: | + id,b + 1,2 + 2,2 + overrides: + macros: + type_numeric: override + invocation_id: 123 + vars: + my_test: var_override + env_vars: + MY_TEST: env_var_override + expect: + rows: + - {macro_call: override, var_call: var_override, env_var_call: env_var_override, invocation_id: 123} + - name: test_my_model_string_concat + model: my_model + given: + - input: ref('my_model_a') + format: csv + rows: | + id,string_a + 1,a + - input: ref('my_model_b') + format: csv + rows: | + id,string_b + 1,b + expect: + format: csv + rows: | + string_c + ab + config: + tags: test_this +""" + +# -- csv file tests +test_my_model_file_csv_yml = """ +unit_tests: + - name: test_my_model + model: my_model + given: + - input: ref('my_model_a') + format: csv + fixture: test_my_model_a_numeric_fixture + - input: ref('my_model_b') + format: csv + fixture: test_my_model_fixture + expect: + format: csv + fixture: test_my_model_basic_fixture + + - name: test_my_model_empty + model: my_model + given: + - input: ref('my_model_a') + format: csv + fixture: test_my_model_a_empty_fixture + - input: ref('my_model_b') + format: csv + fixture: test_my_model_fixture + expect: + format: csv + fixture: test_my_model_a_empty_fixture + + - name: test_my_model_overrides + model: my_model + given: + - input: ref('my_model_a') + format: csv + fixture: test_my_model_a_numeric_fixture + - input: ref('my_model_b') + format: csv + fixture: test_my_model_fixture + overrides: + macros: + type_numeric: override + invocation_id: 123 + vars: + my_test: var_override + env_vars: + MY_TEST: env_var_override + expect: + rows: + - {macro_call: override, var_call: var_override, env_var_call: env_var_override, invocation_id: 123} + + - name: test_my_model_string_concat + model: my_model + given: + - input: ref('my_model_a') + format: csv + fixture: test_my_model_a_fixture + - input: ref('my_model_b') + format: csv + fixture: test_my_model_b_fixture + expect: + format: csv + fixture: test_my_model_concat_fixture + config: + tags: test_this +""" + +test_my_model_fixture_csv = """id,b +1,2 +2,2 +""" + +test_my_model_a_fixture_csv = """id,string_a +1,a +""" + +test_my_model_a_with_null_fixture_csv = """id,a +1, +2,3 +""" + +test_my_model_a_empty_fixture_csv = """ +""" + +test_my_model_a_numeric_fixture_csv = """id,a +1,1 +""" + +test_my_model_b_fixture_csv = """id,string_b +1,b +""" + +test_my_model_basic_fixture_csv = """c +2 +""" + +test_my_model_concat_fixture_csv = """string_c +ab +""" + +# -- mixed inline and file csv +test_my_model_mixed_csv_yml = """ +unit_tests: + - name: test_my_model + model: my_model + given: + - input: ref('my_model_a') + format: csv + rows: | + id,a + 1,1 + - input: ref('my_model_b') + format: csv + rows: | + id,b + 1,2 + 2,2 + expect: + format: csv + fixture: test_my_model_basic_fixture + + - name: test_my_model_empty + model: my_model + given: + - input: ref('my_model_a') + format: csv + fixture: test_my_model_a_empty_fixture + - input: ref('my_model_b') + format: csv + rows: | + id,b + 1,2 + 2,2 + expect: + format: csv + fixture: test_my_model_a_empty_fixture + + - name: test_my_model_overrides + model: my_model + given: + - input: ref('my_model_a') + format: csv + rows: | + id,a + 1,1 + - input: ref('my_model_b') + format: csv + fixture: test_my_model_fixture + overrides: + macros: + type_numeric: override + invocation_id: 123 + vars: + my_test: var_override + env_vars: + MY_TEST: env_var_override + expect: + rows: + - {macro_call: override, var_call: var_override, env_var_call: env_var_override, invocation_id: 123} + + - name: test_my_model_string_concat + model: my_model + given: + - input: ref('my_model_a') + format: csv + fixture: test_my_model_a_fixture + - input: ref('my_model_b') + format: csv + fixture: test_my_model_b_fixture + expect: + format: csv + rows: | + string_c + ab + config: + tags: test_this +""" + +# unit tests with errors + +# -- fixture file doesn't exist +test_my_model_missing_csv_yml = """ +unit_tests: + - name: test_missing_csv_file + model: my_model + given: + - input: ref('my_model_a') + format: csv + rows: | + id,a + 1,1 + - input: ref('my_model_b') + format: csv + rows: | + id,b + 1,2 + 2,2 + expect: + format: csv + fixture: fake_fixture +""" + +test_my_model_duplicate_csv_yml = """ +unit_tests: + - name: test_missing_csv_file + model: my_model + given: + - input: ref('my_model_a') + format: csv + rows: | + id,a + 1,1 + - input: ref('my_model_b') + format: csv + rows: | + id,b + 1,2 + 2,2 + expect: + format: csv + fixture: test_my_model_basic_fixture +""" + +test_model_a_b_yml = """ +unit_tests: + - name: my_test_name + model: my_model_a + given: [] + expect: + rows: + - {a: 1, id: 1, not_testing: 2, string_a: "a", date_a: "2020-01-02"} + + - name: my_test_name + model: my_model_b + given: [] + expect: + rows: + - {b: 2, id: 1, c: 2, string_b: "b"} +""" + +test_model_a_with_duplicate_test_name_yml = """ +unit_tests: + - name: my_test_name + model: my_model_a + given: [] + expect: + rows: + - {a: 1, id: 1, not_testing: 2, string_a: "a", date_a: "2020-01-02"} + + - name: my_test_name + model: my_model_a + given: [] + expect: + rows: + - {a: 1, id: 1, not_testing: 2, string_a: "a", date_a: "2020-01-02"} +""" + +test_my_model_yml_invalid = """ +unit_tests: + - name: test_my_model + model: my_model + given: + - input: ref('my_model_a') + rows: + - {id: 1, a: "a"} + - input: ref('my_model_b') + rows: + - {id: 1, b: 2} + - {id: 2, b: 2} + expect: + rows: + - {c: 3} +""" + +test_my_model_yml_invalid_ref = """ +unit_tests: + - name: test_my_model + model: my_model + given: + - input: ref('my_model_x') + rows: + - {id: 1, a: 1} + - input: ref('my_model_b') + rows: + - {id: 1, b: 2} + - {id: 2, b: 2} + expect: + rows: + - {c: 3} +""" + +# -- unit testing versioned models +my_model_v1_sql = """ +SELECT +a, +b, +a+b as c, +concat(string_a, string_b) as string_c, +not_testing, date_a +FROM {{ ref('my_model_a')}} my_model_a +JOIN {{ ref('my_model_b' )}} my_model_b +ON my_model_a.id = my_model_b.id +""" + +my_model_v2_sql = """ +SELECT +a, +b, +a+b as c, +concat(string_a, string_b) as string_c, +date_a +FROM {{ ref('my_model_a')}} my_model_a +JOIN {{ ref('my_model_b' )}} my_model_b +ON my_model_a.id = my_model_b.id +""" + +my_model_v3_sql = """ +SELECT +a, +b, +a+b as c, +concat(string_a, string_b) as string_c +FROM {{ ref('my_model_a')}} my_model_a +JOIN {{ ref('my_model_b' )}} my_model_b +ON my_model_a.id = my_model_b.id +""" + +my_model_versioned_yml = """ +models: + - name: my_model + latest_version: 1 + access: public + config: + contract: + enforced: true + columns: + - name: a + data_type: integer + - name: b + data_type: integer + - name: c + data_type: integer + - name: string_c + data_type: string + - name: not_testing + data_type: integer + - name: date_a + data_type: date + versions: + - v: 1 + - v: 2 + columns: + # This means: use the 'columns' list from above, but exclude not_testing + - include: "all" + exclude: + - not_testing + - v: 3 + # now exclude another column + columns: + - include: all + exclude: + - not_testing + - date_a +""" + +my_model_versioned_no_2_yml = """ +models: + - name: my_model + latest_version: 1 + access: public + config: + contract: + enforced: true + columns: + - name: a + data_type: integer + - name: b + data_type: integer + - name: c + data_type: integer + - name: string_c + data_type: string + - name: not_testing + data_type: integer + - name: date_a + data_type: date + versions: + - v: 1 + - v: 3 + # now exclude another column + columns: + - include: all + exclude: + - not_testing + - date_a +""" + +test_my_model_all_versions_yml = """ +unit_tests: + - name: test_my_model + model: my_model + given: + - input: ref('my_model_a') + format: csv + rows: | + id,a + 1,1 + 2,3 + - input: ref('my_model_b') + format: csv + rows: | + id,b + 1,2 + 2,2 + expect: + format: csv + rows: | + a,b,c + 1,2,3 + 3,2,5 +""" + +test_my_model_exclude_versions_yml = """ +unit_tests: + - name: test_my_model + model: my_model + versions: + exclude: + - 2 + given: + - input: ref('my_model_a') + format: csv + rows: | + id,a + 1,1 + 2,3 + - input: ref('my_model_b') + format: csv + rows: | + id,b + 1,2 + 2,2 + expect: + format: csv + rows: | + a,b,c + 1,2,3 + 3,2,5 +""" + +test_my_model_include_versions_yml = """ +unit_tests: + - name: test_my_model + model: my_model + versions: + include: + - 2 + given: + - input: ref('my_model_a') + format: csv + rows: | + id,a + 1,1 + 2,3 + - input: ref('my_model_b') + format: csv + rows: | + id,b + 1,2 + 2,2 + expect: + format: csv + rows: | + a,b,c + 1,2,3 + 3,2,5 +""" + +test_my_model_include_exclude_versions_yml = """ +unit_tests: + - name: test_my_model + model: my_model + versions: + include: + - 2 + exclude: + - 3 + given: + - input: ref('my_model_a') + format: csv + rows: | + id,a + 1,1 + 2,3 + - input: ref('my_model_b') + format: csv + rows: | + id,b + 1,2 + 2,2 + expect: + format: csv + rows: | + a,b,c + 1,2,3 + 3,2,5 +""" + +test_my_model_include_unversioned_yml = """ +unit_tests: + - name: test_my_model + model: my_model + versions: + include: + - 2 + given: + - input: ref('my_model_a') + rows: + - {id: 1, a: 1} + - input: ref('my_model_b') + rows: + - {id: 1, b: 2} + - {id: 2, b: 2} + expect: + rows: + - {c: 2} +""" + +my_model_version_ref_sql = """ + select * from {{ ref('my_model', version=2) }} +""" + +test_my_model_version_ref_yml = """ +unit_tests: + - name: test_my_model_version_ref + model: my_model_version_ref + given: + - input: ref('my_model', version=2) + rows: + - {c: 2} + expect: + rows: + - {c: 2} +""" + + +# -- unit testing external models +top_level_domains_sql = """ +SELECT 'example.com' AS tld +UNION ALL +SELECT 'gmail.com' AS tld +""" + +valid_emails_sql = """ +WITH +accounts AS ( + SELECT user_id, email, email_top_level_domain + FROM {{ ref('external_package', 'external_model')}} +), +top_level_domains AS ( + SELECT tld FROM {{ ref('top_level_domains')}} +), +joined AS ( + SELECT + accounts.user_id as user_id, + top_level_domains.tld as tld + FROM accounts + LEFT OUTER JOIN top_level_domains + ON accounts.email_top_level_domain = top_level_domains.tld +) + +SELECT + joined.user_id as user_id, + CASE WHEN joined.tld IS NULL THEN FALSE ELSE TRUE END AS is_valid_email_address +from joined +""" + +external_package__accounts_seed_csv = """user_id,email,email_top_level_domain +1,"example@example.com","example.com" +""" + +external_package__external_model_sql = """ +SELECT user_id, email, email_top_level_domain FROM {{ ref('accounts_seed') }} +""" + + +external_package_project_yml = """ +name: external_package +version: '1.0' +config-version: 2 + +model-paths: ["models"] # paths to models +analysis-paths: ["analyses"] # path with analysis files which are compiled, but not run +target-path: "target" # path for compiled code +clean-targets: ["target"] # directories removed by the clean task +test-paths: ["tests"] # where to store test results +seed-paths: ["seeds"] # load CSVs from this directory with `dbt seed` +macro-paths: ["macros"] # where to find macros + +profile: user + +models: + external_package: +""" + + +@pytest.fixture(scope="class") +def external_package(): + return { + "dbt_project.yml": external_package_project_yml, + "seeds": {"accounts_seed.csv": external_package__accounts_seed_csv}, + "models": { + "external_model.sql": external_package__external_model_sql, + }, + } + + +model_select_1_sql = """ +select 1 as id +""" + +model_select_2_sql = """ +select 2 as id +""" + +test_expect_2_yml = """ +unit_tests: + - name: test_my_model + model: my_model + given: [] + expect: + rows: + - {id: 2} +""" + + +test_my_model_csv_null_yml = """ +unit_tests: + - name: test_my_model_check_null + model: my_model_check_null + given: + - input: ref('my_model_a') + format: csv + rows: | + id,a + 1, + 2,3 + expect: + format: csv + rows: | + a_is_null + True + False +""" + +test_my_model_file_csv_null_yml = """ +unit_tests: + - name: test_my_model_check_null + model: my_model_check_null + given: + - input: ref('my_model_a') + format: csv + fixture: test_my_model_a_with_null_fixture + expect: + format: csv + rows: | + a_is_null + True + False +""" diff --git a/tests/functional/unit_testing/test_csv_fixtures.py b/tests/functional/unit_testing/test_csv_fixtures.py new file mode 100644 index 00000000000..11a910edfb6 --- /dev/null +++ b/tests/functional/unit_testing/test_csv_fixtures.py @@ -0,0 +1,299 @@ +import pytest +from fixtures import ( + datetime_test, + datetime_test_invalid_csv_values, + datetime_test_invalid_format_key, + my_model_a_sql, + my_model_b_sql, + my_model_check_null_sql, + my_model_sql, + test_my_model_a_empty_fixture_csv, + test_my_model_a_fixture_csv, + test_my_model_a_numeric_fixture_csv, + test_my_model_a_with_null_fixture_csv, + test_my_model_b_fixture_csv, + test_my_model_basic_fixture_csv, + test_my_model_concat_fixture_csv, + test_my_model_csv_null_yml, + test_my_model_csv_yml, + test_my_model_duplicate_csv_yml, + test_my_model_file_csv_null_yml, + test_my_model_file_csv_yml, + test_my_model_fixture_csv, + test_my_model_missing_csv_yml, + test_my_model_mixed_csv_yml, +) + +from dbt.exceptions import DuplicateResourceNameError, ParsingError, YamlParseDictError +from dbt.tests.util import rm_file, run_dbt, write_file + + +class TestUnitTestsWithInlineCSV: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "my_model_a.sql": my_model_a_sql, + "my_model_b.sql": my_model_b_sql, + "test_my_model.yml": test_my_model_csv_yml + datetime_test, + } + + def test_unit_test(self, project): + results = run_dbt(["run"]) + assert len(results) == 3 + + # Select by model name + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + assert len(results) == 5 + + # Check error with invalid format key + write_file( + test_my_model_csv_yml + datetime_test_invalid_format_key, + project.project_root, + "models", + "test_my_model.yml", + ) + with pytest.raises(YamlParseDictError): + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + + # Check error with csv format defined but dict on rows + write_file( + test_my_model_csv_yml + datetime_test_invalid_csv_values, + project.project_root, + "models", + "test_my_model.yml", + ) + with pytest.raises(ParsingError): + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + + +class TestUnitTestsWithFileCSV: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "my_model_a.sql": my_model_a_sql, + "my_model_b.sql": my_model_b_sql, + "test_my_model.yml": test_my_model_file_csv_yml + datetime_test, + } + + @pytest.fixture(scope="class") + def tests(self): + return { + "fixtures": { + "test_my_model_fixture.csv": test_my_model_fixture_csv, + "test_my_model_a_fixture.csv": test_my_model_a_fixture_csv, + "test_my_model_b_fixture.csv": test_my_model_b_fixture_csv, + "test_my_model_basic_fixture.csv": test_my_model_basic_fixture_csv, + "test_my_model_a_numeric_fixture.csv": test_my_model_a_numeric_fixture_csv, + "test_my_model_a_empty_fixture.csv": test_my_model_a_empty_fixture_csv, + "test_my_model_concat_fixture.csv": test_my_model_concat_fixture_csv, + } + } + + def test_unit_test(self, project): + results = run_dbt(["run"]) + assert len(results) == 3 + + manifest = run_dbt(["parse"]) # Note: this manifest is deserialized from msgpack + fixture = manifest.fixtures["fixture.test.test_my_model_a_fixture"] + fixture_source_file = manifest.files[fixture.file_id] + assert fixture_source_file.fixture == "fixture.test.test_my_model_a_fixture" + assert fixture_source_file.unit_tests == [ + "unit_test.test.my_model.test_my_model_string_concat" + ] + + # Select by model name + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + assert len(results) == 5 + + # Check partial parsing remove fixture file + rm_file(project.project_root, "tests", "fixtures", "test_my_model_a_fixture.csv") + with pytest.raises( + ParsingError, + match="File not found for fixture 'test_my_model_a_fixture' in unit tests", + ): + run_dbt(["test", "--select", "my_model"], expect_pass=False) + # put back file and check that it works + write_file( + test_my_model_a_fixture_csv, + project.project_root, + "tests", + "fixtures", + "test_my_model_a_fixture.csv", + ) + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + assert len(results) == 5 + # Now update file + write_file( + test_my_model_a_fixture_csv + "2,2", + project.project_root, + "tests", + "fixtures", + "test_my_model_a_fixture.csv", + ) + manifest = run_dbt(["parse"]) + fixture = manifest.fixtures["fixture.test.test_my_model_a_fixture"] + fixture_source_file = manifest.files[fixture.file_id] + assert "2,2" in fixture_source_file.contents + assert fixture.rows == [{"id": "1", "string_a": "a"}, {"id": "2", "string_a": "2"}] + + # Check error with invalid format key + write_file( + test_my_model_file_csv_yml + datetime_test_invalid_format_key, + project.project_root, + "models", + "test_my_model.yml", + ) + with pytest.raises(YamlParseDictError): + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + + # Check error with csv format defined but dict on rows + write_file( + test_my_model_file_csv_yml + datetime_test_invalid_csv_values, + project.project_root, + "models", + "test_my_model.yml", + ) + with pytest.raises(ParsingError): + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + + +class TestUnitTestsWithMixedCSV: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "my_model_a.sql": my_model_a_sql, + "my_model_b.sql": my_model_b_sql, + "test_my_model.yml": test_my_model_mixed_csv_yml + datetime_test, + } + + @pytest.fixture(scope="class") + def tests(self): + return { + "fixtures": { + "test_my_model_fixture.csv": test_my_model_fixture_csv, + "test_my_model_a_fixture.csv": test_my_model_a_fixture_csv, + "test_my_model_b_fixture.csv": test_my_model_b_fixture_csv, + "test_my_model_basic_fixture.csv": test_my_model_basic_fixture_csv, + "test_my_model_a_numeric_fixture.csv": test_my_model_a_numeric_fixture_csv, + "test_my_model_a_empty_fixture.csv": test_my_model_a_empty_fixture_csv, + "test_my_model_concat_fixture.csv": test_my_model_concat_fixture_csv, + } + } + + def test_unit_test(self, project): + results = run_dbt(["run"]) + assert len(results) == 3 + + # Select by model name + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + assert len(results) == 5 + + # Check error with invalid format key + write_file( + test_my_model_mixed_csv_yml + datetime_test_invalid_format_key, + project.project_root, + "models", + "test_my_model.yml", + ) + with pytest.raises(YamlParseDictError): + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + + # Check error with csv format defined but dict on rows + write_file( + test_my_model_mixed_csv_yml + datetime_test_invalid_csv_values, + project.project_root, + "models", + "test_my_model.yml", + ) + with pytest.raises(ParsingError): + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + + +class TestUnitTestsInlineCSVEmptyValueIsNull: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model_a.sql": my_model_a_sql, + "my_model_check_null.sql": my_model_check_null_sql, + "test_my_model_csv_null.yml": test_my_model_csv_null_yml, + } + + def test_unit_test(self, project): + results = run_dbt(["run"]) + assert len(results) == 2 + + # Select by model name + results = run_dbt(["test", "--select", "my_model_check_null"], expect_pass=True) + assert len(results) == 1 + + +class TestUnitTestsFileCSVEmptyValueIsNull: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model_a.sql": my_model_a_sql, + "my_model_check_null.sql": my_model_check_null_sql, + "test_my_model_file_csv_null.yml": test_my_model_file_csv_null_yml, + } + + @pytest.fixture(scope="class") + def tests(self): + return { + "fixtures": { + "test_my_model_a_with_null_fixture.csv": test_my_model_a_with_null_fixture_csv, + } + } + + def test_unit_test(self, project): + results = run_dbt(["run"]) + assert len(results) == 2 + + # Select by model name + results = run_dbt(["test", "--select", "my_model_check_null"], expect_pass=True) + assert len(results) == 1 + + +class TestUnitTestsMissingCSVFile: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "my_model_a.sql": my_model_a_sql, + "my_model_b.sql": my_model_b_sql, + "test_my_model.yml": test_my_model_missing_csv_yml, + } + + def test_missing(self, project): + with pytest.raises(ParsingError): + run_dbt(["run"]) + + +class TestUnitTestsDuplicateCSVFile: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "my_model_a.sql": my_model_a_sql, + "my_model_b.sql": my_model_b_sql, + "test_my_model.yml": test_my_model_duplicate_csv_yml, + } + + @pytest.fixture(scope="class") + def tests(self): + return { + "fixtures": { + "one-folder": { + "test_my_model_basic_fixture.csv": test_my_model_basic_fixture_csv, + }, + "another-folder": { + "test_my_model_basic_fixture.csv": test_my_model_basic_fixture_csv, + }, + } + } + + def test_duplicate(self, project): + with pytest.raises(DuplicateResourceNameError): + run_dbt(["run"]) diff --git a/tests/functional/unit_testing/test_sql_format.py b/tests/functional/unit_testing/test_sql_format.py new file mode 100644 index 00000000000..700008eaf9c --- /dev/null +++ b/tests/functional/unit_testing/test_sql_format.py @@ -0,0 +1,246 @@ +import pytest + +from dbt.tests.util import run_dbt + +wizards_csv = """id,w_name,email,email_tld,phone,world +1,Albus Dumbledore,a.dumbledore@gmail.com,gmail.com,813-456-9087,1 +2,Gandalf,gandy811@yahoo.com,yahoo.com,551-329-8367,2 +3,Winifred Sanderson,winnie@hocuspocus.com,hocuspocus.com,,6 +4,Marnie Piper,cromwellwitch@gmail.com,gmail.com,,5 +5,Grace Goheen,grace.goheen@dbtlabs.com,dbtlabs.com,,3 +6,Glinda,glinda_good@hotmail.com,hotmail.com,912-458-3289,4 +""" + +top_level_email_domains_csv = """tld +gmail.com +yahoo.com +hocuspocus.com +dbtlabs.com +hotmail.com +""" + +worlds_csv = """id,name +1,The Wizarding World +2,Middle-earth +3,dbt Labs +4,Oz +5,Halloweentown +6,Salem +""" + +stg_wizards_sql = """ +select + id as wizard_id, + w_name as wizard_name, + email, + email_tld as email_top_level_domain, + phone as phone_number, + world as world_id +from {{ ref('wizards') }} +""" + +stg_worlds_sql = """ +select + id as world_id, + name as world_name +from {{ ref('worlds') }} +""" + +dim_wizards_sql = """ +with wizards as ( + + select * from {{ ref('stg_wizards') }} + +), + +worlds as ( + + select * from {{ ref('stg_worlds') }} + +), + +accepted_email_domains as ( + + select * from {{ ref('top_level_email_domains') }} + +), + +check_valid_emails as ( + + select + wizards.wizard_id, + wizards.wizard_name, + wizards.email, + wizards.phone_number, + wizards.world_id, + + coalesce ( + wizards.email ~ '^[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\\.[A-Za-z]{2,}$' + = true + and accepted_email_domains.tld is not null, + false) as is_valid_email_address + + from wizards + left join accepted_email_domains + on wizards.email_top_level_domain = lower(accepted_email_domains.tld) + +) + +select + check_valid_emails.wizard_id, + check_valid_emails.wizard_name, + check_valid_emails.email, + check_valid_emails.is_valid_email_address, + check_valid_emails.phone_number, + worlds.world_name +from check_valid_emails +left join worlds + on check_valid_emails.world_id = worlds.world_id +""" + +orig_schema_yml = """ +unit_tests: + - name: test_valid_email_address + model: dim_wizards + given: + - input: ref('stg_wizards') + rows: + - {email: cool@example.com, email_top_level_domain: example.com} + - {email: cool@unknown.com, email_top_level_domain: unknown.com} + - {email: badgmail.com, email_top_level_domain: gmail.com} + - {email: missingdot@gmailcom, email_top_level_domain: gmail.com} + - input: ref('top_level_email_domains') + rows: + - {tld: example.com} + - {tld: gmail.com} + - input: ref('stg_worlds') + rows: [] + expect: + rows: + - {email: cool@example.com, is_valid_email_address: true} + - {email: cool@unknown.com, is_valid_email_address: false} + - {email: badgmail.com, is_valid_email_address: false} + - {email: missingdot@gmailcom, is_valid_email_address: false} +""" + +schema_yml = """ +unit_tests: + - name: test_valid_email_address + model: dim_wizards + given: + - input: ref('stg_wizards') + format: sql + rows: | + select 1 as wizard_id, 'joe' as wizard_name, 'cool@example.com' as email, 'example.com' as email_top_level_domain, '123' as phone_number, 1 as world_id union all + select 2 as wizard_id, 'don' as wizard_name, 'cool@unknown.com' as email, 'unknown.com' as email_top_level_domain, '456' as phone_number, 2 as world_id union all + select 3 as wizard_id, 'mary' as wizard_name, 'badgmail.com' as email, 'gmail.com' as email_top_level_domain, '789' as phone_number, 3 as world_id union all + select 4 as wizard_id, 'jane' as wizard_name, 'missingdot@gmailcom' as email, 'gmail.com' as email_top_level_domain, '102' as phone_number, 4 as world_id + - input: ref('top_level_email_domains') + format: sql + rows: | + select 'example.com' as tld union all + select 'gmail.com' as tld + - input: ref('stg_worlds') + rows: [] + expect: + format: sql + rows: | + select 1 as wizard_id, 'joe' as wizard_name, 'cool@example.com' as email, true as is_valid_email_address, '123' as phone_number, null as world_name union all + select 2 as wizard_id, 'don' as wizard_name, 'cool@unknown.com' as email, false as is_valid_email_address, '456' as phone_number, null as world_name union all + select 3 as wizard_id, 'mary' as wizard_name, 'badgmail.com' as email, false as is_valid_email_address, '789' as phone_number, null as world_name union all + select 4 as wizard_id, 'jane' as wizard_name, 'missingdot@gmailcom' as email, false as is_valid_email_address, '102' as phone_number, null as world_name +""" + + +class TestSQLFormat: + @pytest.fixture(scope="class") + def seeds(self): + return { + "wizards.csv": wizards_csv, + "top_level_email_domains.csv": top_level_email_domains_csv, + "worlds.csv": worlds_csv, + } + + @pytest.fixture(scope="class") + def models(self): + return { + "stg_wizards.sql": stg_wizards_sql, + "stg_worlds.sql": stg_worlds_sql, + "dim_wizards.sql": dim_wizards_sql, + "schema.yml": schema_yml, + } + + def test_sql_format(self, project): + results = run_dbt(["build"]) + assert len(results) == 7 + + +stg_wizards_fixture_sql = """ + select 1 as wizard_id, 'joe' as wizard_name, 'cool@example.com' as email, 'example.com' as email_top_level_domain, '123' as phone_number, 1 as world_id union all + select 2 as wizard_id, 'don' as wizard_name, 'cool@unknown.com' as email, 'unknown.com' as email_top_level_domain, '456' as phone_number, 2 as world_id union all + select 3 as wizard_id, 'mary' as wizard_name, 'badgmail.com' as email, 'gmail.com' as email_top_level_domain, '789' as phone_number, 3 as world_id union all + select 4 as wizard_id, 'jane' as wizard_name, 'missingdot@gmailcom' as email, 'gmail.com' as email_top_level_domain, '102' as phone_number, 4 as world_id +""" + +top_level_email_domains_fixture_sql = """ + select 'example.com' as tld union all + select 'gmail.com' as tld +""" + +test_valid_email_address_fixture_sql = """ + select 1 as wizard_id, 'joe' as wizard_name, 'cool@example.com' as email, true as is_valid_email_address, '123' as phone_number, null as world_name union all + select 2 as wizard_id, 'don' as wizard_name, 'cool@unknown.com' as email, false as is_valid_email_address, '456' as phone_number, null as world_name union all + select 3 as wizard_id, 'mary' as wizard_name, 'badgmail.com' as email, false as is_valid_email_address, '789' as phone_number, null as world_name union all + select 4 as wizard_id, 'jane' as wizard_name, 'missingdot@gmailcom' as email, false as is_valid_email_address, '102' as phone_number, null as world_name +""" + +fixture_schema_yml = """ +unit_tests: + - name: test_valid_email_address + model: dim_wizards + given: + - input: ref('stg_wizards') + format: sql + fixture: stg_wizards_fixture + - input: ref('top_level_email_domains') + format: sql + fixture: top_level_email_domains_fixture + - input: ref('stg_worlds') + rows: [] + expect: + format: sql + fixture: test_valid_email_address_fixture +""" + + +class TestSQLFormatFixtures: + @pytest.fixture(scope="class") + def tests(self): + return { + "fixtures": { + "test_valid_email_address_fixture.sql": test_valid_email_address_fixture_sql, + "top_level_email_domains_fixture.sql": top_level_email_domains_fixture_sql, + "stg_wizards_fixture.sql": stg_wizards_fixture_sql, + } + } + + @pytest.fixture(scope="class") + def seeds(self): + return { + "wizards.csv": wizards_csv, + "top_level_email_domains.csv": top_level_email_domains_csv, + "worlds.csv": worlds_csv, + } + + @pytest.fixture(scope="class") + def models(self): + return { + "stg_wizards.sql": stg_wizards_sql, + "stg_worlds.sql": stg_worlds_sql, + "dim_wizards.sql": dim_wizards_sql, + "schema.yml": fixture_schema_yml, + } + + def test_sql_format_fixtures(self, project): + results = run_dbt(["build"]) + assert len(results) == 7 diff --git a/tests/functional/unit_testing/test_state.py b/tests/functional/unit_testing/test_state.py new file mode 100644 index 00000000000..cf87f49b406 --- /dev/null +++ b/tests/functional/unit_testing/test_state.py @@ -0,0 +1,151 @@ +import os +import shutil +from copy import deepcopy + +import pytest +from fixtures import ( + model_select_1_sql, + model_select_2_sql, + my_model_a_sql, + my_model_b_sql, + my_model_vars_sql, + test_expect_2_yml, +) +from fixtures import test_my_model_b_fixture_csv as test_my_model_fixture_csv_modified +from fixtures import test_my_model_fixture_csv, test_my_model_simple_fixture_yml + +from dbt.tests.util import run_dbt, write_config_file, write_file + + +class UnitTestState: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_vars_sql, + "my_model_a.sql": my_model_a_sql, + "my_model_b.sql": my_model_b_sql, + "test_my_model.yml": test_my_model_simple_fixture_yml, + } + + @pytest.fixture(scope="class") + def tests(self): + return { + "fixtures": { + "test_my_model_fixture.csv": test_my_model_fixture_csv, + } + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"vars": {"my_test": "my_test_var"}} + + def copy_state(self, project_root): + state_path = os.path.join(project_root, "state") + if not os.path.exists(state_path): + os.makedirs(state_path) + shutil.copyfile( + f"{project_root}/target/manifest.json", f"{project_root}/state/manifest.json" + ) + shutil.copyfile( + f"{project_root}/target/run_results.json", f"{project_root}/state/run_results.json" + ) + + +class TestUnitTestStateModified(UnitTestState): + def test_state_modified(self, project): + run_dbt(["run"]) + run_dbt(["test"], expect_pass=False) + self.copy_state(project.project_root) + + # no changes + results = run_dbt(["test", "--select", "state:modified", "--state", "state"]) + assert len(results) == 0 + + # change underlying fixture file + write_file( + test_my_model_fixture_csv_modified, + project.project_root, + "tests", + "fixtures", + "test_my_model_fixture.csv", + ) + results = run_dbt( + ["test", "--select", "state:modified", "--state", "state"], expect_pass=True + ) + assert len(results) == 1 + assert results[0].node.name.endswith("test_depends_on_fixture") + # reset changes + self.copy_state(project.project_root) + + # change unit test definition of a single unit test + with_changes = test_my_model_simple_fixture_yml.replace("{string_c: ab}", "{string_c: bc}") + write_config_file(with_changes, project.project_root, "models", "test_my_model.yml") + results = run_dbt( + ["test", "--select", "state:modified", "--state", "state"], expect_pass=False + ) + assert len(results) == 1 + assert results[0].node.name.endswith("test_has_string_c_ab") + + # change underlying model logic + write_config_file( + test_my_model_simple_fixture_yml, project.project_root, "models", "test_my_model.yml" + ) + write_file( + my_model_vars_sql.replace("a+b as c,", "a + b as c,"), + project.project_root, + "models", + "my_model.sql", + ) + results = run_dbt( + ["test", "--select", "state:modified", "--state", "state"], expect_pass=False + ) + assert len(results) == 4 + + +class TestUnitTestRetry(UnitTestState): + def test_unit_test_retry(self, project): + run_dbt(["run"]) + run_dbt(["test"], expect_pass=False) + self.copy_state(project.project_root) + + results = run_dbt(["retry"], expect_pass=False) + assert len(results) == 1 + + +class TestUnitTestDeferState(UnitTestState): + @pytest.fixture(scope="class") + def other_schema(self, unique_schema): + return unique_schema + "_other" + + @pytest.fixture(scope="class") + def profiles_config_update(self, dbt_profile_target, unique_schema, other_schema): + outputs = {"default": dbt_profile_target, "otherschema": deepcopy(dbt_profile_target)} + outputs["default"]["schema"] = unique_schema + outputs["otherschema"]["schema"] = other_schema + return {"test": {"outputs": outputs, "target": "default"}} + + def test_unit_test_defer_state(self, project): + run_dbt(["run", "--target", "otherschema"]) + self.copy_state(project.project_root) + results = run_dbt(["test", "--defer", "--state", "state"], expect_pass=False) + assert len(results) == 4 + assert sorted([r.status for r in results]) == ["fail", "pass", "pass", "pass"] + + +class TestUnitTestDeferDoesntOverwrite(UnitTestState): + @pytest.fixture(scope="class") + def models(self): + return {"my_model.sql": model_select_1_sql, "test_my_model.yml": test_expect_2_yml} + + def test_unit_test_defer_state(self, project): + run_dbt(["test"], expect_pass=False) + self.copy_state(project.project_root) + write_file( + model_select_2_sql, + project.project_root, + "models", + "my_model.sql", + ) + results = run_dbt(["test", "--defer", "--state", "state"]) + assert len(results) == 1 + assert sorted([r.status for r in results]) == ["pass"] diff --git a/tests/functional/unit_testing/test_unit_testing.py b/tests/functional/unit_testing/test_unit_testing.py new file mode 100644 index 00000000000..53cfc84f4bf --- /dev/null +++ b/tests/functional/unit_testing/test_unit_testing.py @@ -0,0 +1,515 @@ +import os +from unittest import mock + +import pytest +from fixtures import ( # noqa: F401 + datetime_test, + event_sql, + external_package, + external_package__accounts_seed_csv, + my_incremental_model_sql, + my_model_a_sql, + my_model_b_sql, + my_model_sql, + my_model_vars_sql, + test_my_model_incremental_yml_basic, + test_my_model_incremental_yml_no_override, + test_my_model_incremental_yml_no_this_input, + test_my_model_incremental_yml_wrong_override, + test_my_model_yml, + test_my_model_yml_invalid, + test_my_model_yml_invalid_ref, + top_level_domains_sql, + valid_emails_sql, +) + +from dbt.contracts.results import NodeStatus +from dbt.exceptions import DuplicateResourceNameError, ParsingError +from dbt.plugins.manifest import ModelNodeArgs, PluginNodes +from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import ( + file_exists, + get_manifest, + read_file, + run_dbt, + run_dbt_and_capture, + write_file, +) +from tests.unit.utils import normalize + + +class TestUnitTests: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_vars_sql, + "my_model_a.sql": my_model_a_sql, + "my_model_b.sql": my_model_b_sql, + "test_my_model.yml": test_my_model_yml + datetime_test, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"vars": {"my_test": "my_test_var"}} + + def test_basic(self, project): + results = run_dbt(["run"]) + assert len(results) == 3 + + # Select by model name + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + assert len(results) == 5 + + results = run_dbt( + ["build", "--select", "my_model", "--resource-types", "model unit_test"], + expect_pass=False, + ) + assert len(results) == 6 + for result in results: + if result.node.unique_id == "model.test.my_model": + result.status == NodeStatus.Skipped + + # Run build command but specify no unit tests + results = run_dbt( + ["build", "--select", "my_model", "--exclude-resource-types", "unit_test"], + expect_pass=True, + ) + assert len(results) == 1 + + # Exclude unit tests with environment variable for build command + os.environ["DBT_EXCLUDE_RESOURCE_TYPES"] = "unit_test" + results = run_dbt(["build", "--select", "my_model"], expect_pass=True) + assert len(results) == 1 + + # Exclude unit tests with environment variable for test command + results = run_dbt(["test", "--select", "my_model"], expect_pass=True) + assert len(results) == 0 + + # Exclude unit tests with environment variable for list command + results = run_dbt(["list", "--select", "my_model"], expect_pass=True) + assert len(results) == 1 + + del os.environ["DBT_EXCLUDE_RESOURCE_TYPES"] + + # Test select by test name + results = run_dbt(["test", "--select", "test_name:test_my_model_string_concat"]) + assert len(results) == 1 + + # Select, method not specified + results = run_dbt(["test", "--select", "test_my_model_overrides"]) + assert len(results) == 1 + + # Select using tag + results = run_dbt(["test", "--select", "tag:test_this"]) + assert len(results) == 1 + + # Partial parsing... remove test + write_file(test_my_model_yml, project.project_root, "models", "test_my_model.yml") + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + assert len(results) == 4 + + # Partial parsing... put back removed test + write_file( + test_my_model_yml + datetime_test, project.project_root, "models", "test_my_model.yml" + ) + results = run_dbt(["test", "--select", "my_model"], expect_pass=False) + assert len(results) == 5 + + manifest = get_manifest(project.project_root) + assert len(manifest.unit_tests) == 5 + # Every unit test has a depends_on to the model it tests + for unit_test_definition in manifest.unit_tests.values(): + assert unit_test_definition.depends_on.nodes[0] == "model.test.my_model" + + # Check for duplicate unit test name + # this doesn't currently pass with partial parsing because of the root problem + # described in https://github.com/dbt-labs/dbt-core/issues/8982 + write_file( + test_my_model_yml + datetime_test + datetime_test, + project.project_root, + "models", + "test_my_model.yml", + ) + with pytest.raises(DuplicateResourceNameError): + run_dbt(["run", "--no-partial-parse", "--select", "my_model"]) + + +class TestUnitTestIncrementalModelBasic: + @pytest.fixture(scope="class") + def models(self): + return { + "my_incremental_model.sql": my_incremental_model_sql, + "events.sql": event_sql, + "schema.yml": test_my_model_incremental_yml_basic, + } + + def test_basic(self, project): + results = run_dbt(["run"]) + assert len(results) == 2 + + # Select by model name + results = run_dbt(["test", "--select", "my_incremental_model"], expect_pass=True) + assert len(results) == 2 + + +class TestUnitTestIncrementalModelNoOverride: + @pytest.fixture(scope="class") + def models(self): + return { + "my_incremental_model.sql": my_incremental_model_sql, + "events.sql": event_sql, + "schema.yml": test_my_model_incremental_yml_no_override, + } + + def test_no_override(self, project): + with pytest.raises( + ParsingError, + match="Boolean override for 'is_incremental' must be provided for unit test 'incremental_false' in model 'my_incremental_model'", + ): + run_dbt(["parse"]) + + +class TestUnitTestIncrementalModelWrongOverride: + @pytest.fixture(scope="class") + def models(self): + return { + "my_incremental_model.sql": my_incremental_model_sql, + "events.sql": event_sql, + "schema.yml": test_my_model_incremental_yml_wrong_override, + } + + def test_str_override(self, project): + with pytest.raises( + ParsingError, + match="Boolean override for 'is_incremental' must be provided for unit test 'incremental_false' in model 'my_incremental_model'", + ): + run_dbt(["parse"]) + + +class TestUnitTestIncrementalModelNoThisInput: + @pytest.fixture(scope="class") + def models(self): + return { + "my_incremental_model.sql": my_incremental_model_sql, + "events.sql": event_sql, + "schema.yml": test_my_model_incremental_yml_no_this_input, + } + + def test_no_this_input(self, project): + with pytest.raises( + ParsingError, + match="Unit test 'incremental_true' for incremental model 'my_incremental_model' must have a 'this' input", + ): + run_dbt(["parse"]) + + +my_new_model = """ +select +my_favorite_seed.id, +a + b as c +from {{ ref('my_favorite_seed') }} as my_favorite_seed +inner join {{ ref('my_favorite_model') }} as my_favorite_model +on my_favorite_seed.id = my_favorite_model.id +""" + +my_favorite_model = """ +select +2 as id, +3 as b +""" + +seed_my_favorite_seed = """id,a +1,5 +2,4 +3,3 +4,2 +5,1 +""" + +schema_yml_explicit_seed = """ +unit_tests: + - name: t + model: my_new_model + given: + - input: ref('my_favorite_seed') + rows: + - {id: 1, a: 10} + - input: ref('my_favorite_model') + rows: + - {id: 1, b: 2} + expect: + rows: + - {id: 1, c: 12} +""" + +schema_yml_implicit_seed = """ +unit_tests: + - name: t + model: my_new_model + given: + - input: ref('my_favorite_seed') + - input: ref('my_favorite_model') + rows: + - {id: 1, b: 2} + expect: + rows: + - {id: 1, c: 7} +""" + +schema_yml_nonexistent_seed = """ +unit_tests: + - name: t + model: my_new_model + given: + - input: ref('my_second_favorite_seed') + - input: ref('my_favorite_model') + rows: + - {id: 1, b: 2} + expect: + rows: + - {id: 1, c: 7} +""" + + +class TestUnitTestExplicitSeed: + @pytest.fixture(scope="class") + def seeds(self): + return {"my_favorite_seed.csv": seed_my_favorite_seed} + + @pytest.fixture(scope="class") + def models(self): + return { + "my_new_model.sql": my_new_model, + "my_favorite_model.sql": my_favorite_model, + "schema.yml": schema_yml_explicit_seed, + } + + def test_explicit_seed(self, project): + run_dbt(["seed"]) + run_dbt(["run"]) + + # Select by model name + results = run_dbt(["test", "--select", "my_new_model"], expect_pass=True) + assert len(results) == 1 + + +class TestUnitTestImplicitSeed: + @pytest.fixture(scope="class") + def seeds(self): + return {"my_favorite_seed.csv": seed_my_favorite_seed} + + @pytest.fixture(scope="class") + def models(self): + return { + "my_new_model.sql": my_new_model, + "my_favorite_model.sql": my_favorite_model, + "schema.yml": schema_yml_implicit_seed, + } + + def test_implicit_seed(self, project): + run_dbt(["seed"]) + run_dbt(["run"]) + + # Select by model name + results = run_dbt(["test", "--select", "my_new_model"], expect_pass=True) + assert len(results) == 1 + + +class TestUnitTestNonexistentSeed: + @pytest.fixture(scope="class") + def seeds(self): + return {"my_favorite_seed.csv": seed_my_favorite_seed} + + @pytest.fixture(scope="class") + def models(self): + return { + "my_new_model.sql": my_new_model, + "my_favorite_model.sql": my_favorite_model, + "schema.yml": schema_yml_nonexistent_seed, + } + + def test_nonexistent_seed(self, project): + with pytest.raises( + ParsingError, match="Unable to find seed 'test.my_second_favorite_seed' for unit tests" + ): + run_dbt(["test", "--select", "my_new_model"], expect_pass=False) + + +class TestUnitTestInvalidInputConfiguration: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "my_model_a.sql": my_model_a_sql, + "my_model_b.sql": my_model_b_sql, + "test_my_model.yml": test_my_model_yml_invalid, + } + + def test_invalid_input_configuration(self, project): + results = run_dbt(["run"]) + assert len(results) == 3 + + # A data type in a given row is incorrect, and we'll get a runtime error + run_dbt(["test"], expect_pass=False) + + # Test invalid model ref. Parsing error InvalidUnitTestGivenInput + write_file( + test_my_model_yml_invalid_ref, project.project_root, "models", "test_my_model.yml" + ) + results = run_dbt(["test"], expect_pass=False) + result = results.results[0] + assert "not found in the manifest" in result.message + + +unit_test_ext_node_yml = """ +unit_tests: + - name: unit_test_ext_node + model: valid_emails + given: + - input: ref('external_package', 'external_model') + rows: + - {user_id: 1, email: cool@example.com, email_top_level_domain: example.com} + - {user_id: 2, email: cool@unknown.com, email_top_level_domain: unknown.com} + - {user_id: 3, email: badgmail.com, email_top_level_domain: gmail.com} + - {user_id: 4, email: missingdot@gmailcom, email_top_level_domain: gmail.com} + - input: ref('top_level_domains') + rows: + - {tld: example.com} + - {tld: gmail.com} + expect: + rows: + - {user_id: 1, is_valid_email_address: true} + - {user_id: 2, is_valid_email_address: false} + - {user_id: 3, is_valid_email_address: true} + - {user_id: 4, is_valid_email_address: true} +""" + + +class TestUnitTestExternalPackageNode: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root, external_package): # noqa: F811 + write_project_files(project_root, "external_package", external_package) + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "external_package"}]} + + @pytest.fixture(scope="class") + def models(self): + return { + "top_level_domains.sql": top_level_domains_sql, + "valid_emails.sql": valid_emails_sql, + "unit_test_ext_node.yml": unit_test_ext_node_yml, + } + + def test_unit_test_ext_nodes( + self, + project, + ): + # `deps` to install the external package + run_dbt(["deps"], expect_pass=True) + # `seed` need so a table exists for `external_model` to point to + run_dbt(["seed"], expect_pass=True) + # `run` needed to ensure `top_level_domains` exists in database for column getting step + run_dbt(["run"], expect_pass=True) + results = run_dbt(["test", "--select", "valid_emails"], expect_pass=True) + assert len(results) == 1 + + +class TestUnitTestExternalProjectNode: + @pytest.fixture(scope="class") + def external_model_node(self, unique_schema): + return ModelNodeArgs( + name="external_model", + package_name="external_package", + identifier="external_node_seed", + schema=unique_schema, + ) + + @pytest.fixture(scope="class") + def seeds(self): + return {"external_node_seed.csv": external_package__accounts_seed_csv} + + @pytest.fixture(scope="class") + def models(self): + return { + "top_level_domains.sql": top_level_domains_sql, + "valid_emails.sql": valid_emails_sql, + "unit_test_ext_node.yml": unit_test_ext_node_yml, + } + + @mock.patch("dbt.plugins.get_plugin_manager") + def test_unit_test_ext_nodes( + self, + get_plugin_manager, + project, + external_model_node, + ): + # initial plugin - one external model + external_nodes = PluginNodes() + external_nodes.add_model(external_model_node) + get_plugin_manager.return_value.get_nodes.return_value = external_nodes + + # `seed` need so a table exists for `external_model` to point to + run_dbt(["seed"], expect_pass=True) + # `run` needed to ensure `top_level_domains` exists in database for column getting step + run_dbt(["run"], expect_pass=True) + results = run_dbt(["test", "--select", "valid_emails"], expect_pass=True) + assert len(results) == 1 + + +subfolder_model_a_sql = """select 1 as id, 'blue' as color""" + +subfolder_model_b_sql = """ +select + id, + color +from {{ ref('model_a') }} +""" + +subfolder_my_model_yml = """ +unit_tests: + - name: my_unit_test + model: model_b + given: + - input: ref('model_a') + rows: + - { id: 1, color: 'blue' } + expect: + rows: + - { id: 1, color: 'red' } +""" + + +class TestUnitTestSubfolderPath: + @pytest.fixture(scope="class") + def models(self): + return { + "subfolder": { + "model_a.sql": subfolder_model_a_sql, + "model_b.sql": subfolder_model_b_sql, + "my_model.yml": subfolder_my_model_yml, + } + } + + def test_subfolder_unit_test(self, project): + results, output = run_dbt_and_capture(["build"], expect_pass=False) + + # Test that input fixture doesn't overwrite the original model + assert ( + read_file("target/compiled/test/models/subfolder/model_a.sql").strip() + == subfolder_model_a_sql.strip() + ) + + # Test that correct path is written in logs + assert ( + normalize( + "target/compiled/test/models/subfolder/my_model.yml/models/subfolder/my_unit_test.sql" + ) + in output + ) + assert file_exists( + normalize( + "target/compiled/test/models/subfolder/my_model.yml/models/subfolder/my_unit_test.sql" + ) + ) diff --git a/tests/functional/unit_testing/test_ut_adapter_hooks.py b/tests/functional/unit_testing/test_ut_adapter_hooks.py new file mode 100644 index 00000000000..a2f496752e2 --- /dev/null +++ b/tests/functional/unit_testing/test_ut_adapter_hooks.py @@ -0,0 +1,75 @@ +from unittest import mock + +import pytest + +from dbt.tests.util import run_dbt, run_dbt_and_capture +from dbt_common.exceptions import CompilationError +from tests.functional.unit_testing.fixtures import ( + my_model_a_sql, + my_model_b_sql, + my_model_sql, + test_my_model_pass_yml, +) + + +class BaseUnitTestAdapterHook: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "my_model_a.sql": my_model_a_sql, + "my_model_b.sql": my_model_b_sql, + "test_my_model.yml": test_my_model_pass_yml, + } + + +class TestUnitTestAdapterPreHook(BaseUnitTestAdapterHook): + def test_unit_test_runs_adapter_pre_hook_passes(self, project): + results = run_dbt(["run"]) + assert len(results) == 3 + + mock_pre_model_hook = mock.Mock() + with mock.patch.object(type(project.adapter), "pre_model_hook", mock_pre_model_hook): + results = run_dbt(["test", "--select", "test_name:test_my_model"], expect_pass=True) + + assert len(results) == 1 + mock_pre_model_hook.assert_called_once() + + def test_unit_test_runs_adapter_pre_hook_fails(self, project): + results = run_dbt(["run"]) + assert len(results) == 3 + + mock_pre_model_hook = mock.Mock() + mock_pre_model_hook.side_effect = CompilationError("exception from adapter.pre_model_hook") + with mock.patch.object(type(project.adapter), "pre_model_hook", mock_pre_model_hook): + (_, log_output) = run_dbt_and_capture( + ["test", "--select", "test_name:test_my_model"], expect_pass=False + ) + assert "exception from adapter.pre_model_hook" in log_output + + +class TestUnitTestAdapterPostHook(BaseUnitTestAdapterHook): + def test_unit_test_runs_adapter_post_hook_pass(self, project): + results = run_dbt(["run"]) + assert len(results) == 3 + + mock_post_model_hook = mock.Mock() + with mock.patch.object(type(project.adapter), "post_model_hook", mock_post_model_hook): + results = run_dbt(["test", "--select", "test_name:test_my_model"], expect_pass=True) + + assert len(results) == 1 + mock_post_model_hook.assert_called_once() + + def test_unit_test_runs_adapter_post_hook_fails(self, project): + results = run_dbt(["run"]) + assert len(results) == 3 + + mock_post_model_hook = mock.Mock() + mock_post_model_hook.side_effect = CompilationError( + "exception from adapter.post_model_hook" + ) + with mock.patch.object(type(project.adapter), "post_model_hook", mock_post_model_hook): + (_, log_output) = run_dbt_and_capture( + ["test", "--select", "test_name:test_my_model"], expect_pass=False + ) + assert "exception from adapter.post_model_hook" in log_output diff --git a/tests/functional/unit_testing/test_ut_dependency.py b/tests/functional/unit_testing/test_ut_dependency.py new file mode 100644 index 00000000000..32e5a0111b7 --- /dev/null +++ b/tests/functional/unit_testing/test_ut_dependency.py @@ -0,0 +1,115 @@ +import pytest + +from dbt.tests.fixtures.project import write_project_files +from dbt.tests.util import get_unique_ids_in_results, run_dbt + +local_dependency__dbt_project_yml = """ + +name: 'local_dep' +version: '1.0' + +seeds: + quote_columns: False + +""" + +local_dependency__schema_yml = """ +sources: + - name: seed_source + schema: "{{ var('schema_override', target.schema) }}" + tables: + - name: "seed" + columns: + - name: id + data_tests: + - unique + +unit_tests: + - name: test_dep_model_id + model: dep_model + given: + - input: ref('seed') + rows: + - {id: 1, name: Joe} + expect: + rows: + - {name_id: Joe_1} + + +""" + +local_dependency__dep_model_sql = """ +select name || '_' || id as name_id from {{ ref('seed') }} + +""" + +local_dependency__seed_csv = """id,name +1,Mary +2,Sam +3,John +""" + +my_model_sql = """ +select * from {{ ref('dep_model') }} +""" + +my_model_schema_yml = """ +unit_tests: + - name: test_my_model_name_id + model: my_model + given: + - input: ref('dep_model') + rows: + - {name_id: Joe_1} + expect: + rows: + - {name_id: Joe_1} +""" + + +class TestUnitTestingInDependency: + @pytest.fixture(scope="class", autouse=True) + def setUp(self, project_root): + local_dependency_files = { + "dbt_project.yml": local_dependency__dbt_project_yml, + "models": { + "schema.yml": local_dependency__schema_yml, + "dep_model.sql": local_dependency__dep_model_sql, + }, + "seeds": {"seed.csv": local_dependency__seed_csv}, + } + write_project_files(project_root, "local_dependency", local_dependency_files) + + @pytest.fixture(scope="class") + def packages(self): + return {"packages": [{"local": "local_dependency"}]} + + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "schema.yml": my_model_schema_yml, + } + + def test_unit_test_in_dependency(self, project): + run_dbt(["deps"]) + run_dbt(["seed"]) + results = run_dbt(["run"]) + assert len(results) == 2 + + results = run_dbt(["test"]) + assert len(results) == 3 + unique_ids = get_unique_ids_in_results(results) + assert "unit_test.local_dep.dep_model.test_dep_model_id" in unique_ids + + results = run_dbt(["test", "--select", "test_type:unit"]) + # two unit tests, 1 in root package, one in local_dep package + assert len(results) == 2 + + results = run_dbt(["test", "--select", "local_dep"]) + # 2 tests in local_dep package + assert len(results) == 2 + + results = run_dbt(["test", "--select", "test"]) + # 1 test in root package + assert len(results) == 1 diff --git a/tests/functional/unit_testing/test_ut_diffing.py b/tests/functional/unit_testing/test_ut_diffing.py new file mode 100644 index 00000000000..dd23ea25fdb --- /dev/null +++ b/tests/functional/unit_testing/test_ut_diffing.py @@ -0,0 +1,113 @@ +import pytest + +from dbt.tests.util import run_dbt + +my_input_model = """ +SELECT 1 as id, 'some string' as status +""" + +my_model = """ +SELECT * FROM {{ ref("my_input_model") }} +""" + +test_my_model_order_insensitive = """ +unit_tests: + - name: unordered_no_nulls + model: my_model + given: + - input: ref("my_input_model") + rows: + - {"id": 1, "status": 'B'} + - {"id": 2, "status": 'B'} + - {"id": 3, "status": 'A'} + expect: + rows: + - {"id": 3, "status": 'A'} + - {"id": 2, "status": 'B'} + - {"id": 1, "status": 'B'} + + - name: unordered_with_nulls + model: my_model + given: + - input: ref("my_input_model") + rows: + - {"id": , "status": 'B'} + - {"id": , "status": 'B'} + - {"id": 3, "status": 'A'} + expect: + rows: + - {"id": 3, "status": 'A'} + - {"id": , "status": 'B'} + - {"id": , "status": 'B'} + + - name: unordered_with_nulls_2 + model: my_model + given: + - input: ref("my_input_model") + rows: + - {"id": 3, "status": 'A'} + - {"id": , "status": 'B'} + - {"id": , "status": 'B'} + expect: + rows: + - {"id": , "status": 'B'} + - {"id": , "status": 'B'} + - {"id": 3, "status": 'A'} + + - name: unordered_with_nulls_mixed_columns + model: my_model + given: + - input: ref("my_input_model") + rows: + - {"id": 3, "status": 'A'} + - {"id": , "status": 'B'} + - {"id": 1, "status": } + expect: + rows: + - {"id": 1, "status": } + - {"id": , "status": 'B'} + - {"id": 3, "status": 'A'} + + - name: unordered_with_null + model: my_model + given: + - input: ref("my_input_model") + rows: + - {"id": 3, "status": 'A'} + - {"id": , "status": 'B'} + expect: + rows: + - {"id": , "status": 'B'} + - {"id": 3, "status": 'A'} + + - name: ordered_with_nulls + model: my_model + given: + - input: ref("my_input_model") + rows: + - {"id": 3, "status": 'A'} + - {"id": , "status": 'B'} + - {"id": , "status": 'B'} + expect: + rows: + - {"id": 3, "status": 'A'} + - {"id": , "status": 'B'} + - {"id": , "status": 'B'} +""" + + +class TestUnitTestingDiffIsOrderAgnostic: + @pytest.fixture(scope="class") + def models(self): + return { + "my_input_model.sql": my_input_model, + "my_model.sql": my_model, + "test_my_model.yml": test_my_model_order_insensitive, + } + + def test_unit_testing_diff_is_order_insensitive(self, project): + run_dbt(["run"]) + + # Select by model name + results = run_dbt(["test", "--select", "my_model"], expect_pass=True) + assert len(results) == 6 diff --git a/tests/functional/unit_testing/test_ut_ephemeral.py b/tests/functional/unit_testing/test_ut_ephemeral.py new file mode 100644 index 00000000000..cb2de2d2596 --- /dev/null +++ b/tests/functional/unit_testing/test_ut_ephemeral.py @@ -0,0 +1,84 @@ +import pytest + +from dbt.contracts.results import RunStatus, TestStatus +from dbt.tests.util import run_dbt, write_file + +ephemeral_model_sql = """ +{{ config(materialized="ephemeral") }} +select 1 as id, 'Emily' as first_name +""" + +nested_ephemeral_model_sql = """ +{{ config(materialized="ephemeral") }} +select * from {{ ref('ephemeral_model') }} +""" + +customers_sql = """ +select * from {{ ref('nested_ephemeral_model') }} +""" + +test_sql_format_yml = """ +unit_tests: + - name: test_customers + model: customers + given: + - input: ref('nested_ephemeral_model') + format: sql + rows: | + select 1 as id, 'Emily' as first_name + expect: + rows: + - {id: 1, first_name: Emily} +""" + +failing_test_sql_format_yml = """ + - name: fail_test_customers + model: customers + given: + - input: ref('nested_ephemeral_model') + format: sql + rows: | + select 1 as id, 'Emily' as first_name + expect: + rows: + - {id: 1, first_name: Joan} +""" + + +class TestUnitTestEphemeralInput: + @pytest.fixture(scope="class") + def models(self): + return { + "customers.sql": customers_sql, + "ephemeral_model.sql": ephemeral_model_sql, + "nested_ephemeral_model.sql": nested_ephemeral_model_sql, + "tests.yml": test_sql_format_yml, + } + + def test_ephemeral_input(self, project): + results = run_dbt(["run"]) + len(results) == 1 + + results = run_dbt(["test", "--select", "test_type:unit"]) + assert len(results) == 1 + + results = run_dbt(["build"]) + assert len(results) == 2 + result_unique_ids = [result.node.unique_id for result in results] + assert len(result_unique_ids) == 2 + assert "unit_test.test.customers.test_customers" in result_unique_ids + + # write failing unit test + write_file( + test_sql_format_yml + failing_test_sql_format_yml, + project.project_root, + "models", + "tests.yml", + ) + results = run_dbt(["build"], expect_pass=False) + for result in results: + if result.node.unique_id == "model.test.customers": + assert result.status == RunStatus.Skipped + elif result.node.unique_id == "unit_test.test.customers.fail_test_customers": + assert result.status == TestStatus.Fail + assert len(results) == 3 diff --git a/tests/functional/unit_testing/test_ut_list.py b/tests/functional/unit_testing/test_ut_list.py new file mode 100644 index 00000000000..0b4f263909b --- /dev/null +++ b/tests/functional/unit_testing/test_ut_list.py @@ -0,0 +1,83 @@ +import json +import os + +import pytest +from fixtures import ( # noqa: F401 + datetime_test, + my_model_a_sql, + my_model_b_sql, + my_model_vars_sql, + test_my_model_yml, +) + +from dbt.tests.util import run_dbt + + +class TestUnitTestList: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_vars_sql, + "my_model_a.sql": my_model_a_sql, + "my_model_b.sql": my_model_b_sql, + "test_my_model.yml": test_my_model_yml + datetime_test, + } + + @pytest.fixture(scope="class") + def project_config_update(self): + return {"vars": {"my_test": "my_test_var"}} + + def test_unit_test_list(self, project): + # make sure things are working + results = run_dbt(["run"]) + assert len(results) == 3 + results = run_dbt(["test"], expect_pass=False) + assert len(results) == 5 + + results = run_dbt(["list"]) + expected = [ + "test.my_model", + "test.my_model_a", + "test.my_model_b", + "unit_test:test.test_my_model", + "unit_test:test.test_my_model_datetime", + "unit_test:test.test_my_model_empty", + "unit_test:test.test_my_model_overrides", + "unit_test:test.test_my_model_string_concat", + ] + assert sorted(results) == sorted(expected) + + results = run_dbt(["list", "--select", "test_type:unit"]) + assert len(results) == 5 + + # Check json result + results = run_dbt(["list", "--select", "test_type:unit", "--output", "json"]) + expected_test_my_model = { + "name": "test_my_model", + "resource_type": "unit_test", + "package_name": "test", + "original_file_path": os.path.join("models", "test_my_model.yml"), + "unique_id": "unit_test.test.my_model.test_my_model", + "depends_on": {"macros": [], "nodes": ["model.test.my_model"]}, + "config": {"tags": [], "meta": {}}, + } + for result in results: + json_result = json.loads(result) + if "name" in json_result and json_result["name"] == "test_my_model": + assert json_result == expected_test_my_model + + results = run_dbt( + [ + "list", + "--select", + "test_type:unit", + "--output", + "json", + "--output-keys", + "unique_id", + "model", + ] + ) + for result in results: + json_result = json.loads(result) + assert json_result["model"] == "my_model" diff --git a/tests/functional/unit_testing/test_ut_names.py b/tests/functional/unit_testing/test_ut_names.py new file mode 100644 index 00000000000..a2de3764da4 --- /dev/null +++ b/tests/functional/unit_testing/test_ut_names.py @@ -0,0 +1,72 @@ +import pytest +from fixtures import ( + my_model_a_sql, + my_model_b_sql, + test_model_a_b_yml, + test_model_a_with_duplicate_test_name_yml, +) + +from dbt.exceptions import DuplicateResourceNameError +from dbt.tests.util import run_dbt, run_dbt_and_capture + + +class TestUnitTestDuplicateTestNamesAcrossModels: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model_a.sql": my_model_a_sql, + "my_model_b.sql": my_model_b_sql, + "test_model_a_b.yml": test_model_a_b_yml, + } + + def test_duplicate_test_names_across_models(self, project): + results = run_dbt(["run"]) + assert len(results) == 2 + + # Select duplicate tests + results, log_output = run_dbt_and_capture(["test"], expect_pass=True) + assert len(results) == 2 + assert {"model.test.my_model_a", "model.test.my_model_b"} == { + result.node.tested_node_unique_id for result in results + } + assert "my_model_a::my_test_name" in log_output + assert "my_model_b::my_test_name" in log_output + + # Test select duplicates by by test name + results = run_dbt(["test", "--select", "test_name:my_test_name"]) + assert len(results) == 2 + assert {"model.test.my_model_a", "model.test.my_model_b"} == { + result.node.tested_node_unique_id for result in results + } + assert "my_model_a::my_test_name" in log_output + assert "my_model_b::my_test_name" in log_output + + results = run_dbt(["test", "--select", "my_model_a,test_name:my_test_name"]) + assert len(results) == 1 + assert results[0].node.tested_node_unique_id == "model.test.my_model_a" + + results = run_dbt(["test", "--select", "my_model_b,test_name:my_test_name"]) + assert len(results) == 1 + assert results[0].node.tested_node_unique_id == "model.test.my_model_b" + + # Test select by model name + results = run_dbt(["test", "--select", "my_model_a"]) + assert len(results) == 1 + assert results[0].node.tested_node_unique_id == "model.test.my_model_a" + + results = run_dbt(["test", "--select", "my_model_b"]) + assert len(results) == 1 + assert results[0].node.tested_node_unique_id == "model.test.my_model_b" + + +class TestUnitTestDuplicateTestNamesWithinModel: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model_a.sql": my_model_a_sql, + "test_model_a.yml": test_model_a_with_duplicate_test_name_yml, + } + + def test_duplicate_test_names_within_model(self, project): + with pytest.raises(DuplicateResourceNameError): + run_dbt(["run"]) diff --git a/tests/functional/unit_testing/test_ut_overrides.py b/tests/functional/unit_testing/test_ut_overrides.py new file mode 100644 index 00000000000..466ceadc437 --- /dev/null +++ b/tests/functional/unit_testing/test_ut_overrides.py @@ -0,0 +1,69 @@ +import pytest + +from dbt.tests.util import run_dbt + +my_model_with_macros = """ +SELECT +{{ current_timestamp() }} as global_current_timestamp, +{{ dbt.current_timestamp() }} as dbt_current_timestamp, +{{ dbt.type_int() }} as dbt_type_int, +{{ my_macro() }} as user_defined_my_macro, +{{ dbt_utils.generate_surrogate_key() }} as package_defined_macro +""" + +test_my_model_with_macros = """ +unit_tests: + - name: test_macro_overrides + model: my_model_with_macros + overrides: + macros: + current_timestamp: "'current_timestamp_override'" + dbt.type_int: "'dbt_macro_override'" + my_macro: "'global_user_defined_macro_override'" + dbt_utils.generate_surrogate_key: "'package_macro_override'" + given: [] + expect: + rows: + - global_current_timestamp: "current_timestamp_override" + dbt_current_timestamp: "current_timestamp_override" + dbt_type_int: "dbt_macro_override" + user_defined_my_macro: "global_user_defined_macro_override" + package_defined_macro: "package_macro_override" +""" + +MY_MACRO_SQL = """ +{% macro my_macro() -%} + {{ test }} +{%- endmacro %} +""" + + +class TestUnitTestingMacroOverrides: + @pytest.fixture(scope="class") + def packages(self): + return { + "packages": [ + { + "package": "dbt-labs/dbt_utils", + "version": "1.1.1", + }, + ] + } + + @pytest.fixture(scope="class") + def models(self): + return { + "my_model_with_macros.sql": my_model_with_macros, + "test_my_model_with_macros.yml": test_my_model_with_macros, + } + + @pytest.fixture(scope="class") + def macros(self): + return {"my_macro.sql": MY_MACRO_SQL} + + def test_macro_overrides(self, project): + run_dbt(["deps"]) + + # Select by model name + results = run_dbt(["test", "--select", "my_model_with_macros"], expect_pass=True) + assert len(results) == 1 diff --git a/tests/functional/unit_testing/test_ut_resource_types.py b/tests/functional/unit_testing/test_ut_resource_types.py new file mode 100644 index 00000000000..09f64bdd061 --- /dev/null +++ b/tests/functional/unit_testing/test_ut_resource_types.py @@ -0,0 +1,91 @@ +import pytest +from fixtures import ( # noqa: F401 + my_model_a_sql, + my_model_b_sql, + my_model_sql, + test_my_model_a_yml, + test_my_model_pass_yml, +) + +from dbt.tests.util import run_dbt + +EXPECTED_MODELS = [ + "test.my_model", + "test.my_model_a", + "test.my_model_b", +] + +EXPECTED_DATA_TESTS = [ + "test.not_null_my_model_a_a", + "test.not_null_my_model_a_id", +] + +EXPECTED_UNIT_TESTS = [ + "unit_test:test.test_my_model", +] + + +class TestUnitTestResourceTypes: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model.sql": my_model_sql, + "my_model_a.sql": my_model_a_sql, + "my_model_b.sql": my_model_b_sql, + "test_my_model.yml": test_my_model_pass_yml, + "test_my_model_a.yml": test_my_model_a_yml, + } + + def test_unit_test_list(self, project): + results = run_dbt(["run"]) + + # unit tests + results = run_dbt(["list", "--resource-type", "unit_test"]) + assert sorted(results) == EXPECTED_UNIT_TESTS + + results = run_dbt(["list", "--exclude-resource-types", "model", "test"]) + assert sorted(results) == EXPECTED_UNIT_TESTS + + results = run_dbt(["test", "--resource-type", "unit_test"]) + assert len(results) == len(EXPECTED_UNIT_TESTS) + + results = run_dbt(["test", "--exclude-resource-types", "model", "test"]) + assert len(results) == len(EXPECTED_UNIT_TESTS) + + # data tests + results = run_dbt(["list", "--resource-type", "test"]) + assert sorted(results) == EXPECTED_DATA_TESTS + + results = run_dbt(["list", "--exclude-resource-types", "unit_test", "model"]) + assert sorted(results) == EXPECTED_DATA_TESTS + + results = run_dbt(["test", "--resource-type", "test"]) + assert len(results) == len(EXPECTED_DATA_TESTS) + + results = run_dbt(["test", "--exclude-resource-types", "unit_test", "model"]) + assert len(results) == len(EXPECTED_DATA_TESTS) + + results = run_dbt(["build", "--resource-type", "test"]) + assert len(results) == len(EXPECTED_DATA_TESTS) + + results = run_dbt(["build", "--exclude-resource-types", "unit_test", "model"]) + assert len(results) == len(EXPECTED_DATA_TESTS) + + # models + results = run_dbt(["list", "--resource-type", "model"]) + assert sorted(results) == EXPECTED_MODELS + + results = run_dbt(["list", "--exclude-resource-type", "unit_test", "test"]) + assert sorted(results) == EXPECTED_MODELS + + results = run_dbt(["test", "--resource-type", "model"]) + assert len(results) == 0 + + results = run_dbt(["test", "--exclude-resource-types", "unit_test", "test"]) + assert len(results) == 0 + + results = run_dbt(["build", "--resource-type", "model"]) + assert len(results) == len(EXPECTED_MODELS) + + results = run_dbt(["build", "--exclude-resource-type", "unit_test", "test"]) + assert len(results) == len(EXPECTED_MODELS) diff --git a/tests/functional/unit_testing/test_ut_snapshot_dependency.py b/tests/functional/unit_testing/test_ut_snapshot_dependency.py new file mode 100644 index 00000000000..a76fe1a39f4 --- /dev/null +++ b/tests/functional/unit_testing/test_ut_snapshot_dependency.py @@ -0,0 +1,164 @@ +import pytest + +from dbt.contracts.results import RunStatus, TestStatus +from dbt.tests.util import run_dbt + +raw_customers_csv = """id,first_name,last_name,email,gender,ip_address,updated_at +1,'Judith','Kennedy','(not provided)','Female','54.60.24.128','2015-12-24 12:19:28' +2,'Arthur','Kelly','(not provided)','Male','62.56.24.215','2015-10-28 16:22:15' +3,'Rachel','Moreno','rmoreno2@msu.edu','Female','31.222.249.23','2016-04-05 02:05:30' +4,'Ralph','Turner','rturner3@hp.com','Male','157.83.76.114','2016-08-08 00:06:51' +5,'Laura','Gonzales','lgonzales4@howstuffworks.com','Female','30.54.105.168','2016-09-01 08:25:38' +6,'Katherine','Lopez','klopez5@yahoo.co.jp','Female','169.138.46.89','2016-08-30 18:52:11' +7,'Jeremy','Hamilton','jhamilton6@mozilla.org','Male','231.189.13.133','2016-07-17 02:09:46' +""" + +top_level_domains_csv = """id,domain +3,'msu.edu' +4,'hp.com' +5,'howstuffworks.com' +6,'yahoo.co.jp' +7,'mozilla.org' +""" + +snapshots_users__snapshot_sql = """ +{% snapshot snapshot_users %} + + {{ + config( + target_database=var('target_database', database), + target_schema=schema, + unique_key='id || ' ~ "'-'" ~ ' || first_name', + strategy='check', + check_cols=['email'], + ) + }} + select *, split_part(email, '@', 2) as domain from {{target.database}}.{{schema}}.raw_customers + +{% endsnapshot %} +""" + +unit_test_yml = """ +sources: + - name: seed_sources + schema: "{{ target.schema }}" + tables: + - name: top_level_domains + columns: + - name: id + - name: domain + +unit_tests: + - name: test_is_valid_email_address + model: customers + given: + - input: ref('snapshot_users') + rows: + - {id: 1, email: cool@example.com, domain: example.com} + - {id: 2, email: cool@unknown.com, domain: unknown.com} + - {id: 3, email: badgmail.com, domain: gmailcom} + - {id: 4, email: missingdot@gmailcom, domain: gmailcom} + - input: source('seed_sources', 'top_level_domains') + rows: + - {domain: example.com} + - {domain: gmail.com} + expect: + rows: + - {id: 1, is_valid_email_address: true} + - {id: 2, is_valid_email_address: false} + - {id: 3, is_valid_email_address: false} + - {id: 4, is_valid_email_address: false} + + - name: fail_is_valid_email_address + model: customers + given: + - input: ref('snapshot_users') + rows: + - {id: 1, email: cool@example.com, domain: example.com} + - input: source('seed_sources', 'top_level_domains') + rows: + - {domain: example.com} + - {domain: gmail.com} + expect: + rows: + - {id: 1, is_valid_email_address: false} +""" + +customers_sql = """ +with snapshot_users as ( +select * from {{ ref('snapshot_users') }} +), + +top_level_domains as ( +select * from {{ source('seed_sources', 'top_level_domains') }} +), +matched_values as ( + select + snapshot_users.*, + case when exists ( + select 1 from top_level_domains + where top_level_domains.domain = snapshot_users.domain + ) then true else false end as is_valid_email_address + from + snapshot_users +) + +select * from matched_values +""" + + +class TestUnitTestSnapshotDependency: + @pytest.fixture(scope="class") + def seeds(self): + return { + "raw_customers.csv": raw_customers_csv, + "top_level_domains.csv": top_level_domains_csv, + } + + @pytest.fixture(scope="class") + def models(self): + return { + "customers.sql": customers_sql, + "unit_tests.yml": unit_test_yml, + } + + @pytest.fixture(scope="class") + def snapshots(self): + return { + "snapshot_users.sql": snapshots_users__snapshot_sql, + } + + def test_snapshot_dependency(self, project): + seed_results = run_dbt(["seed"]) + len(seed_results) == 2 + snapshot_results = run_dbt(["snapshot"]) + len(snapshot_results) == 1 + model_results = run_dbt(["run"]) + len(model_results) == 1 + + # test passing unit test + results = run_dbt(["test", "--select", "test_name:test_is_valid_email_address"]) + assert len(results) == 1 + + # test failing unit test + results = run_dbt( + ["test", "--select", "test_name:fail_is_valid_email_address"], expect_pass=False + ) + assert len(results) == 1 + assert results[0].status == TestStatus.Fail + + # test all with build + results = run_dbt(["build"], expect_pass=False) + + for result in results: + if result.node.unique_id == "unit_test.test.customers.fail_is_valid_email_address": + # This will always fail, regarless of order executed + assert result.status == TestStatus.Fail + elif result.node.unique_id == "unit_test.test.customers.test_is_valid_email_address": + # there's no guarantee that the order of the results will be the same. If the + # failed test runs first this one gets skipped. If this runs first it passes. + assert result.status in [TestStatus.Pass, TestStatus.Skipped] + elif result.node.unique_id == "model.test.customers": + # This is always skipped because one test always fails + assert result.status == RunStatus.Skipped + assert len(results) == 6 diff --git a/tests/functional/unit_testing/test_ut_sources.py b/tests/functional/unit_testing/test_ut_sources.py new file mode 100644 index 00000000000..61a45f70f76 --- /dev/null +++ b/tests/functional/unit_testing/test_ut_sources.py @@ -0,0 +1,104 @@ +import pytest + +from dbt.contracts.results import RunStatus, TestStatus +from dbt.tests.util import run_dbt, write_file + +raw_customers_csv = """id,first_name,last_name,email +1,Michael,Perez,mperez0@chronoengine.com +2,Shawn,Mccoy,smccoy1@reddit.com +3,Kathleen,Payne,kpayne2@cargocollective.com +4,Jimmy,Cooper,jcooper3@cargocollective.com +5,Katherine,Rice,krice4@typepad.com +6,Sarah,Ryan,sryan5@gnu.org +7,Martin,Mcdonald,mmcdonald6@opera.com +8,Frank,Robinson,frobinson7@wunderground.com +9,Jennifer,Franklin,jfranklin8@mail.ru +10,Henry,Welch,hwelch9@list-manage.com +""" + +schema_sources_yml = """ +sources: + - name: seed_sources + schema: "{{ target.schema }}" + tables: + - name: raw_customers + columns: + - name: id + data_tests: + - not_null: + severity: "{{ 'error' if target.name == 'prod' else 'warn' }}" + - unique + - name: first_name + - name: last_name + - name: email +unit_tests: + - name: test_customers + model: customers + given: + - input: source('seed_sources', 'raw_customers') + rows: + - {id: 1, first_name: Emily} + expect: + rows: + - {id: 1, first_name: Emily} +""" + +customers_sql = """ +select * from {{ source('seed_sources', 'raw_customers') }} +""" + +failing_test_schema_yml = """ + - name: fail_test_customers + model: customers + given: + - input: source('seed_sources', 'raw_customers') + rows: + - {id: 1, first_name: Emily} + expect: + rows: + - {id: 1, first_name: Joan} +""" + + +class TestUnitTestSourceInput: + @pytest.fixture(scope="class") + def seeds(self): + return { + "raw_customers.csv": raw_customers_csv, + } + + @pytest.fixture(scope="class") + def models(self): + return { + "customers.sql": customers_sql, + "sources.yml": schema_sources_yml, + } + + def test_source_input(self, project): + results = run_dbt(["seed"]) + results = run_dbt(["run"]) + len(results) == 1 + + results = run_dbt(["test", "--select", "test_type:unit"]) + assert len(results) == 1 + + results = run_dbt(["build"]) + assert len(results) == 5 + result_unique_ids = [result.node.unique_id for result in results] + assert len(result_unique_ids) == 5 + assert "unit_test.test.customers.test_customers" in result_unique_ids + + # write failing unit test + write_file( + schema_sources_yml + failing_test_schema_yml, + project.project_root, + "models", + "sources.yml", + ) + results = run_dbt(["build"], expect_pass=False) + for result in results: + if result.node.unique_id == "model.test.customers": + assert result.status == RunStatus.Skipped + elif result.node.unique_id == "unit_test.test.customers.fail_test_customers": + assert result.status == TestStatus.Fail + assert len(results) == 6 diff --git a/tests/functional/unit_testing/test_ut_versions.py b/tests/functional/unit_testing/test_ut_versions.py new file mode 100644 index 00000000000..059688ce473 --- /dev/null +++ b/tests/functional/unit_testing/test_ut_versions.py @@ -0,0 +1,165 @@ +import pytest + +from dbt.exceptions import ParsingError, YamlParseDictError +from dbt.tests.util import get_unique_ids_in_results, run_dbt, write_file +from tests.functional.unit_testing.fixtures import ( + my_model_a_sql, + my_model_b_sql, + my_model_sql, + my_model_v1_sql, + my_model_v2_sql, + my_model_v3_sql, + my_model_version_ref_sql, + my_model_versioned_no_2_yml, + my_model_versioned_yml, + test_my_model_all_versions_yml, + test_my_model_exclude_versions_yml, + test_my_model_include_exclude_versions_yml, + test_my_model_include_unversioned_yml, + test_my_model_include_versions_yml, + test_my_model_version_ref_yml, +) + + +# test with no version specified, then add an exclude version, then switch +# to include version and make sure the right unit tests are generated for each +class TestVersions: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model_a.sql": my_model_a_sql, + "my_model_b.sql": my_model_b_sql, + "my_model_v1.sql": my_model_v1_sql, + "my_model_v2.sql": my_model_v2_sql, + "my_model_v3.sql": my_model_v3_sql, + "schema.yml": my_model_versioned_yml, + "unit_tests.yml": test_my_model_all_versions_yml, + } + + def test_versions(self, project): + results = run_dbt(["run"]) + assert len(results) == 5 + + # "my_model" has three versions: 1, 2, 3 + # There is a single unit_test which doesn't specify a version, + # so it should run for all versions. + results = run_dbt(["test"]) + assert len(results) == 3 + unique_ids = get_unique_ids_in_results(results) + expected_ids = [ + "unit_test.test.my_model.test_my_model_v1", + "unit_test.test.my_model.test_my_model_v2", + "unit_test.test.my_model.test_my_model_v3", + ] + assert sorted(expected_ids) == sorted(unique_ids) + + # Select tests for a single versioned model + results = run_dbt(["test", "--select", "my_model.v2"]) + assert len(results) == 1 + unique_ids = get_unique_ids_in_results(results) + assert unique_ids == ["unit_test.test.my_model.test_my_model_v2"] + + # select tests for all my_models + results = run_dbt(["test", "--select", "my_model"]) + assert len(results) == 3 + unique_ids = get_unique_ids_in_results(results) + assert sorted(expected_ids) == sorted(unique_ids) + + # with an exclude version specified, should create a separate unit test + # for each version except the excluded version (v2) + write_file( + test_my_model_exclude_versions_yml, project.project_root, "models", "unit_tests.yml" + ) + + results = run_dbt(["test"]) + assert len(results) == 2 + unique_ids = get_unique_ids_in_results(results) + # v2 model should be excluded + expected_ids = [ + "unit_test.test.my_model.test_my_model_v1", + "unit_test.test.my_model.test_my_model_v3", + ] + assert sorted(expected_ids) == sorted(unique_ids) + + # test with an include version specified, should create a single unit test for + # only the version specified (2) + write_file( + test_my_model_include_versions_yml, project.project_root, "models", "unit_tests.yml" + ) + + results = run_dbt(["test"]) + assert len(results) == 1 + unique_ids = get_unique_ids_in_results(results) + # v2 model should be only one included + expected_ids = [ + "unit_test.test.my_model.test_my_model_v2", + ] + assert sorted(expected_ids) == sorted(unique_ids) + + # Change to remove version 2 of model and get an error + write_file(my_model_versioned_no_2_yml, project.project_root, "models", "schema.yml") + with pytest.raises(ParsingError): + run_dbt(["test"]) + + +# test with an include and exclude version specified, should raise an error +class TestIncludeExcludeSpecified: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model_a.sql": my_model_a_sql, + "my_model_b.sql": my_model_b_sql, + "my_model_v1.sql": my_model_v1_sql, + "my_model_v2.sql": my_model_v2_sql, + "my_model_v3.sql": my_model_v3_sql, + "schema.yml": my_model_versioned_yml, + "unit_tests.yml": test_my_model_include_exclude_versions_yml, + } + + def test_include_exclude_specified(self, project): + with pytest.raises(YamlParseDictError): + run_dbt(["parse"]) + + +# test with an include for an unversioned model, should error +class TestIncludeUnversioned: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model_a.sql": my_model_a_sql, + "my_model_b.sql": my_model_b_sql, + "my_model.sql": my_model_sql, + "unit_tests.yml": test_my_model_include_unversioned_yml, + } + + def test_include_unversioned(self, project): + with pytest.raises(ParsingError): + run_dbt(["parse"]) + + +# test specifying the fixture version with {{ ref(name, version) }} +class TestVersionedFixture: + @pytest.fixture(scope="class") + def models(self): + return { + "my_model_a.sql": my_model_a_sql, + "my_model_b.sql": my_model_b_sql, + "my_model_v1.sql": my_model_v1_sql, + "my_model_v2.sql": my_model_v2_sql, + "my_model_v3.sql": my_model_v3_sql, + "my_model_version_ref.sql": my_model_version_ref_sql, + "schema.yml": my_model_versioned_yml, + "unit_tests.yml": test_my_model_version_ref_yml, + } + + def test_versioned_fixture(self, project): + results = run_dbt(["run"]) + assert len(results) == 6 + + results = run_dbt(["test"]) + assert len(results) == 1 + + unique_ids = get_unique_ids_in_results(results) + # v2 model should be only one included + expected_ids = ["unit_test.test.my_model_version_ref.test_my_model_version_ref"] + assert expected_ids == unique_ids diff --git a/tests/functional/utils.py b/tests/functional/utils.py new file mode 100644 index 00000000000..93c79cf5345 --- /dev/null +++ b/tests/functional/utils.py @@ -0,0 +1,19 @@ +import os +from contextlib import contextmanager +from datetime import datetime +from pathlib import Path +from typing import Optional + + +@contextmanager +def up_one(return_path: Optional[Path] = None): + current_path = Path.cwd() + os.chdir("../") + try: + yield + finally: + os.chdir(return_path or current_path) + + +def is_aware(dt: datetime) -> bool: + return dt.tzinfo is not None and dt.tzinfo.utcoffset(dt) is not None diff --git a/tests/unit/README.md b/tests/unit/README.md index 7fa09fa1d42..e063e370b48 100644 --- a/tests/unit/README.md +++ b/tests/unit/README.md @@ -1 +1,21 @@ # Unit test README + + +### The Why +We need to ensure that we can go from objects to dictionaries and back without any +changes. If some property or property value of an object gets dropped, added, or modified +while transitioning between its different possible representations, that is problematic. + +### The How +The easiest way to ensure things don't get droped, added, or modified is by starting +with an object, dictifying it, moving back to an object, and then asserting that everything +is equivalent. There are many potential edge cases though: what about optional fields, what +about lists of things, and etc. To address this we use hypothesis, which will build multiple +versions of the object we're interested in testing, and run the different generated versions +of the object through the test. This gives us confidence that for any allowable configuration +of an object, state is not changed when moving back and forth betweeen the python object +version and the seralized version. + +### The What + +- We test concrete classes in the codebase and do not test abstract classes as they are implementation details. [reference](https://enterprisecraftsmanship.com/posts/how-to-unit-test-an-abstract-class/) diff --git a/tests/unit/artifacts/test_base_resource.py b/tests/unit/artifacts/test_base_resource.py new file mode 100644 index 00000000000..6809d524cd1 --- /dev/null +++ b/tests/unit/artifacts/test_base_resource.py @@ -0,0 +1,58 @@ +from dataclasses import dataclass + +import pytest + +from dbt.artifacts.resources.base import BaseResource +from dbt.artifacts.resources.types import NodeType + + +@dataclass +class BaseResourceWithDefaultField(BaseResource): + field_with_default: bool = True + + +class TestMinorSchemaChange: + @pytest.fixture + def base_resource(self): + return BaseResource( + name="test", + resource_type=NodeType.Model, + package_name="test_package", + path="test_path", + original_file_path="test_original_file_path", + unique_id="test_unique_id", + ) + + @pytest.fixture + def base_resource_new_default_field(self): + return BaseResourceWithDefaultField( + name="test", + resource_type=NodeType.Model, + package_name="test_package", + path="test_path", + original_file_path="test_original_file_path", + unique_id="test_unique_id", + field_with_default=False, + ) + + def test_serializing_new_default_field_is_backward_compatabile( + self, base_resource_new_default_field + ): + # old code (using old class) can create an instance of itself given new data (new class) + BaseResource.from_dict(base_resource_new_default_field.to_dict()) + + def test_serializing_new_default_field_is_forward_compatible(self, base_resource): + # new code (using new class) can create an instance of itself given old data (old class) + BaseResourceWithDefaultField.from_dict(base_resource.to_dict()) + + def test_serializing_removed_default_field_is_backward_compatabile(self, base_resource): + # old code (using old class with default field) can create an instance of itself given new data (class w/o default field) + old_resource = BaseResourceWithDefaultField.from_dict(base_resource.to_dict()) + # set to the default value when not provided in data + assert old_resource.field_with_default is True + + def test_serializing_removed_default_field_is_forward_compatible( + self, base_resource_new_default_field + ): + # new code (using class without default field) can create an instance of itself given old data (class with old field) + BaseResource.from_dict(base_resource_new_default_field.to_dict()) diff --git a/tests/unit/test_cli_flags.py b/tests/unit/cli/test_flags.py similarity index 67% rename from tests/unit/test_cli_flags.py rename to tests/unit/cli/test_flags.py index 8ff85dc144c..5169cae048b 100644 --- a/tests/unit/test_cli_flags.py +++ b/tests/unit/cli/test_flags.py @@ -1,25 +1,24 @@ -import pytest - -import click -from multiprocessing import get_context from pathlib import Path from typing import List, Optional +import click +import pytest + from dbt.cli.exceptions import DbtUsageException from dbt.cli.flags import Flags from dbt.cli.main import cli from dbt.cli.types import Command -from dbt.contracts.project import UserConfig -from dbt.exceptions import DbtInternalError -from dbt.helper_types import WarnErrorOptions +from dbt.contracts.project import ProjectFlags from dbt.tests.util import rm_file, write_file +from dbt_common.exceptions import DbtInternalError +from dbt_common.helper_types import WarnErrorOptions class TestFlags: def make_dbt_context( self, context_name: str, args: List[str], parent: Optional[click.Context] = None ) -> click.Context: - ctx = cli.make_context(context_name, args, parent) + ctx = cli.make_context(context_name, args.copy(), parent) return ctx @pytest.fixture(scope="class") @@ -27,17 +26,20 @@ def run_context(self) -> click.Context: return self.make_dbt_context("run", ["run"]) @pytest.fixture - def user_config(self) -> UserConfig: - return UserConfig() + def project_flags(self) -> ProjectFlags: + return ProjectFlags() + + def test_cli_args_unmodified(self): + args = ["--target", "my_target"] + args_before = args.copy() + self.make_dbt_context("context", args) + + assert args == args_before def test_which(self, run_context): flags = Flags(run_context) assert flags.WHICH == "run" - def test_mp_context(self, run_context): - flags = Flags(run_context) - assert flags.MP_CONTEXT == get_context("spawn") - @pytest.mark.parametrize("param", cli.params) def test_cli_group_flags_from_params(self, run_context, param): flags = Flags(run_context) @@ -110,35 +112,42 @@ def test_anonymous_usage_state( flags = Flags(run_context) assert flags.SEND_ANONYMOUS_USAGE_STATS == expected_anonymous_usage_stats - def test_empty_user_config_uses_default(self, run_context, user_config): - flags = Flags(run_context, user_config) + def test_resource_types(self, monkeypatch): + monkeypatch.setenv("DBT_RESOURCE_TYPES", "model") + build_context = self.make_dbt_context("build", ["build"]) + build_context.params["resource_types"] = ("unit_test",) + flags = Flags(build_context) + assert flags.resource_types == ("unit_test",) + + def test_empty_project_flags_uses_default(self, run_context, project_flags): + flags = Flags(run_context, project_flags) assert flags.USE_COLORS == run_context.params["use_colors"] - def test_none_user_config_uses_default(self, run_context): + def test_none_project_flags_uses_default(self, run_context): flags = Flags(run_context, None) assert flags.USE_COLORS == run_context.params["use_colors"] - def test_prefer_user_config_to_default(self, run_context, user_config): - user_config.use_colors = False + def test_prefer_project_flags_to_default(self, run_context, project_flags): + project_flags.use_colors = False # ensure default value is not the same as user config - assert run_context.params["use_colors"] is not user_config.use_colors + assert run_context.params["use_colors"] is not project_flags.use_colors - flags = Flags(run_context, user_config) - assert flags.USE_COLORS == user_config.use_colors + flags = Flags(run_context, project_flags) + assert flags.USE_COLORS == project_flags.use_colors - def test_prefer_param_value_to_user_config(self): - user_config = UserConfig(use_colors=False) + def test_prefer_param_value_to_project_flags(self): + project_flags = ProjectFlags(use_colors=False) context = self.make_dbt_context("run", ["--use-colors", "True", "run"]) - flags = Flags(context, user_config) + flags = Flags(context, project_flags) assert flags.USE_COLORS - def test_prefer_env_to_user_config(self, monkeypatch, user_config): - user_config.use_colors = False + def test_prefer_env_to_project_flags(self, monkeypatch, project_flags): + project_flags.use_colors = False monkeypatch.setenv("DBT_USE_COLORS", "True") context = self.make_dbt_context("run", ["run"]) - flags = Flags(context, user_config) + flags = Flags(context, project_flags) assert flags.USE_COLORS def test_mutually_exclusive_options_passed_separately(self): @@ -163,14 +172,14 @@ def test_mutually_exclusive_options_from_cli(self): Flags(context) @pytest.mark.parametrize("warn_error", [True, False]) - def test_mutually_exclusive_options_from_user_config(self, warn_error, user_config): - user_config.warn_error = warn_error + def test_mutually_exclusive_options_from_project_flags(self, warn_error, project_flags): + project_flags.warn_error = warn_error context = self.make_dbt_context( "run", ["--warn-error-options", '{"include": "all"}', "run"] ) with pytest.raises(DbtUsageException): - Flags(context, user_config) + Flags(context, project_flags) @pytest.mark.parametrize("warn_error", ["True", "False"]) def test_mutually_exclusive_options_from_envvar(self, warn_error, monkeypatch): @@ -182,14 +191,16 @@ def test_mutually_exclusive_options_from_envvar(self, warn_error, monkeypatch): Flags(context) @pytest.mark.parametrize("warn_error", [True, False]) - def test_mutually_exclusive_options_from_cli_and_user_config(self, warn_error, user_config): - user_config.warn_error = warn_error + def test_mutually_exclusive_options_from_cli_and_project_flags( + self, warn_error, project_flags + ): + project_flags.warn_error = warn_error context = self.make_dbt_context( "run", ["--warn-error-options", '{"include": "all"}', "run"] ) with pytest.raises(DbtUsageException): - Flags(context, user_config) + Flags(context, project_flags) @pytest.mark.parametrize("warn_error", ["True", "False"]) def test_mutually_exclusive_options_from_cli_and_envvar(self, warn_error, monkeypatch): @@ -202,15 +213,15 @@ def test_mutually_exclusive_options_from_cli_and_envvar(self, warn_error, monkey Flags(context) @pytest.mark.parametrize("warn_error", ["True", "False"]) - def test_mutually_exclusive_options_from_user_config_and_envvar( - self, user_config, warn_error, monkeypatch + def test_mutually_exclusive_options_from_project_flags_and_envvar( + self, project_flags, warn_error, monkeypatch ): - user_config.warn_error = warn_error + project_flags.warn_error = warn_error monkeypatch.setenv("DBT_WARN_ERROR_OPTIONS", '{"include": "all"}') context = self.make_dbt_context("run", ["run"]) with pytest.raises(DbtUsageException): - Flags(context, user_config) + Flags(context, project_flags) @pytest.mark.parametrize( "cli_colors,cli_colors_file,flag_colors,flag_colors_file", @@ -319,10 +330,10 @@ def test_log_format_interaction( assert flags.LOG_FORMAT_FILE == flag_log_format_file def test_log_settings_from_config(self): - """Test that values set in UserConfig for log settings will set flags as expected""" + """Test that values set in ProjectFlags for log settings will set flags as expected""" context = self.make_dbt_context("run", ["run"]) - config = UserConfig(log_format="json", log_level="warn", use_colors=False) + config = ProjectFlags(log_format="json", log_level="warn", use_colors=False) flags = Flags(context, config) @@ -334,11 +345,11 @@ def test_log_settings_from_config(self): assert flags.USE_COLORS_FILE is False def test_log_file_settings_from_config(self): - """Test that values set in UserConfig for log *file* settings will set flags as expected, leaving the console + """Test that values set in ProjectFlags for log *file* settings will set flags as expected, leaving the console logging flags with their default values""" context = self.make_dbt_context("run", ["run"]) - config = UserConfig(log_format_file="json", log_level_file="warn", use_colors_file=False) + config = ProjectFlags(log_format_file="json", log_level_file="warn", use_colors_file=False) flags = Flags(context, config) @@ -356,6 +367,43 @@ def test_duplicate_flags_raises_error(self): with pytest.raises(DbtUsageException): Flags(context) + def test_global_flag_at_child_context(self): + parent_context_a = self.make_dbt_context("parent_context_a", ["--no-use-colors"]) + child_context_a = self.make_dbt_context("child_context_a", ["run"], parent_context_a) + flags_a = Flags(child_context_a) + + parent_context_b = self.make_dbt_context("parent_context_b", ["run"]) + child_context_b = self.make_dbt_context( + "child_context_b", ["--no-use-colors"], parent_context_b + ) + flags_b = Flags(child_context_b) + + assert flags_a.USE_COLORS == flags_b.USE_COLORS + + def test_global_flag_with_env_var(self, monkeypatch): + # The environment variable is used for whichever parent or child + # does not have a cli command. + # Test that "child" global flag overrides env var + monkeypatch.setenv("DBT_QUIET", "0") + parent_context = self.make_dbt_context("parent", ["--no-use-colors"]) + child_context = self.make_dbt_context("child", ["--quiet"], parent_context) + flags = Flags(child_context) + assert flags.QUIET is True + + # Test that "parent" global flag overrides env var + parent_context = self.make_dbt_context("parent", ["--quiet"]) + child_context = self.make_dbt_context("child", ["--no-use-colors"], parent_context) + flags = Flags(child_context) + assert flags.QUIET is True + + def test_set_project_only_flags(self, project_flags, run_context): + flags = Flags(run_context, project_flags) + + for project_only_flag, project_only_flag_value in project_flags.project_only_flags.items(): + assert getattr(flags, project_only_flag) == project_only_flag_value + # sanity check: ensure project_only_flag is not part of the click context + assert project_only_flag not in run_context.params + def _create_flags_from_dict(self, cmd, d): write_file("", "profiles.yml") result = Flags.from_dict(cmd, d) @@ -370,16 +418,18 @@ def test_from_dict__run(self): } result = self._create_flags_from_dict(Command.RUN, args_dict) assert "model_one" in result.select[0] - assert "model_two" in result.select[0] + assert "model_two" in result.select[1] def test_from_dict__build(self): args_dict = { "print": True, "state": "some/path", + "defer_state": None, } result = self._create_flags_from_dict(Command.BUILD, args_dict) assert result.print is True assert "some/path" in str(result.state) + assert result.defer_state is None def test_from_dict__seed(self): args_dict = {"use_colors": False, "exclude": ["model_three"]} @@ -391,3 +441,43 @@ def test_from_dict__which_fails(self): args_dict = {"which": "some bad command"} with pytest.raises(DbtInternalError, match=r"does not match value of which"): self._create_flags_from_dict(Command.RUN, args_dict) + + def test_from_dict_0_value(self): + args_dict = {"log_file_max_bytes": 0} + flags = Flags.from_dict(Command.RUN, args_dict) + assert flags.LOG_FILE_MAX_BYTES == 0 + + +def test_project_flag_defaults(): + flags = ProjectFlags() + # From # 9183: Let's add a unit test that ensures that: + # every attribute of ProjectFlags that has a corresponding click option + # in params.py should be set to None by default (except for anon user + # tracking). Going forward, flags can have non-None defaults if they + # do not have a corresponding CLI option/env var. These will be used + # to control backwards incompatible interface or behaviour changes. + + # List of all flags except send_anonymous_usage_stats + project_flags = [ + "cache_selected_only", + "debug", + "fail_fast", + "indirect_selection", + "log_format", + "log_format_file", + "log_level", + "log_level_file", + "partial_parse", + "populate_cache", + "printer_width", + "static_parser", + "use_colors", + "use_colors_file", + "use_experimental_parser", + "version_check", + "warn_error", + "warn_error_options", + "write_json", + ] + for flag in project_flags: + assert getattr(flags, flag) is None diff --git a/tests/unit/test_cli.py b/tests/unit/cli/test_main.py similarity index 100% rename from tests/unit/test_cli.py rename to tests/unit/cli/test_main.py diff --git a/tests/unit/test_option_types.py b/tests/unit/cli/test_option_types.py similarity index 95% rename from tests/unit/test_option_types.py rename to tests/unit/cli/test_option_types.py index 67d3c5e941f..1067f64a3c3 100644 --- a/tests/unit/test_option_types.py +++ b/tests/unit/cli/test_option_types.py @@ -1,5 +1,5 @@ -from click import Option, BadParameter import pytest +from click import BadParameter, Option from dbt.cli.option_types import YAML diff --git a/tests/unit/clients/__init__.py b/tests/unit/clients/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/unit/clients/test_jinja.py b/tests/unit/clients/test_jinja.py new file mode 100644 index 00000000000..5f15d9e3f44 --- /dev/null +++ b/tests/unit/clients/test_jinja.py @@ -0,0 +1,416 @@ +from contextlib import contextmanager + +import pytest +import yaml + +from dbt.clients.jinja import get_rendered, get_template +from dbt_common.exceptions import JinjaRenderingError + + +@contextmanager +def returns(value): + yield value + + +@contextmanager +def raises(value): + with pytest.raises(value) as exc: + yield exc + + +def expected_id(arg): + if isinstance(arg, list): + return "_".join(arg) + + +jinja_tests = [ + # strings + ( + """foo: bar""", + returns("bar"), + returns("bar"), + ), + ( + '''foo: "bar"''', + returns("bar"), + returns("bar"), + ), + ( + '''foo: "'bar'"''', + returns("'bar'"), + returns("'bar'"), + ), + ( + """foo: '"bar"'""", + returns('"bar"'), + returns('"bar"'), + ), + ( + '''foo: "{{ 'bar' | as_text }}"''', + returns("bar"), + returns("bar"), + ), + ( + '''foo: "{{ 'bar' | as_bool }}"''', + returns("bar"), + raises(JinjaRenderingError), + ), + ( + '''foo: "{{ 'bar' | as_number }}"''', + returns("bar"), + raises(JinjaRenderingError), + ), + ( + '''foo: "{{ 'bar' | as_native }}"''', + returns("bar"), + returns("bar"), + ), + # ints + ( + """foo: 1""", + returns("1"), + returns("1"), + ), + ( + '''foo: "1"''', + returns("1"), + returns("1"), + ), + ( + '''foo: "'1'"''', + returns("'1'"), + returns("'1'"), + ), + ( + """foo: '"1"'""", + returns('"1"'), + returns('"1"'), + ), + ( + '''foo: "{{ 1 }}"''', + returns("1"), + returns("1"), + ), + ( + '''foo: "{{ '1' }}"''', + returns("1"), + returns("1"), + ), + ( + '''foo: "'{{ 1 }}'"''', + returns("'1'"), + returns("'1'"), + ), + ( + '''foo: "'{{ '1' }}'"''', + returns("'1'"), + returns("'1'"), + ), + ( + '''foo: "{{ 1 | as_text }}"''', + returns("1"), + returns("1"), + ), + ( + '''foo: "{{ 1 | as_bool }}"''', + returns("1"), + raises(JinjaRenderingError), + ), + ( + '''foo: "{{ 1 | as_number }}"''', + returns("1"), + returns(1), + ), + ( + '''foo: "{{ 1 | as_native }}"''', + returns("1"), + returns(1), + ), + ( + '''foo: "{{ '1' | as_text }}"''', + returns("1"), + returns("1"), + ), + ( + '''foo: "{{ '1' | as_bool }}"''', + returns("1"), + raises(JinjaRenderingError), + ), + ( + '''foo: "{{ '1' | as_number }}"''', + returns("1"), + returns(1), + ), + ( + '''foo: "{{ '1' | as_native }}"''', + returns("1"), + returns(1), + ), + # booleans. + # Note the discrepancy with true vs True: `true` is recognized by jinja but + # not literal_eval, but `True` is recognized by ast.literal_eval. + # For extra fun, yaml recognizes both. + # unquoted true + ( + '''foo: "{{ True }}"''', + returns("True"), + returns("True"), + ), + ( + '''foo: "{{ True | as_text }}"''', + returns("True"), + returns("True"), + ), + ( + '''foo: "{{ True | as_bool }}"''', + returns("True"), + returns(True), + ), + ( + '''foo: "{{ True | as_number }}"''', + returns("True"), + raises(JinjaRenderingError), + ), + ( + '''foo: "{{ True | as_native }}"''', + returns("True"), + returns(True), + ), + # unquoted true + ( + '''foo: "{{ true }}"''', + returns("True"), + returns("True"), + ), + ( + '''foo: "{{ true | as_text }}"''', + returns("True"), + returns("True"), + ), + ( + '''foo: "{{ true | as_bool }}"''', + returns("True"), + returns(True), + ), + ( + '''foo: "{{ true | as_number }}"''', + returns("True"), + raises(JinjaRenderingError), + ), + ( + '''foo: "{{ true | as_native }}"''', + returns("True"), + returns(True), + ), + ( + '''foo: "{{ 'true' | as_text }}"''', + returns("true"), + returns("true"), + ), + # quoted 'true' + ( + '''foo: "'{{ true }}'"''', + returns("'True'"), + returns("'True'"), + ), # jinja true -> python True -> str(True) -> "True" -> quoted + ( + '''foo: "'{{ true | as_text }}'"''', + returns("'True'"), + returns("'True'"), + ), + ( + '''foo: "'{{ true | as_bool }}'"''', + returns("'True'"), + returns("'True'"), + ), + ( + '''foo: "'{{ true | as_number }}'"''', + returns("'True'"), + returns("'True'"), + ), + ( + '''foo: "'{{ true | as_native }}'"''', + returns("'True'"), + returns("'True'"), + ), + # unquoted True + ( + '''foo: "{{ True }}"''', + returns("True"), + returns("True"), + ), + ( + '''foo: "{{ True | as_text }}"''', + returns("True"), + returns("True"), + ), # True -> string 'True' -> text -> str('True') -> 'True' + ( + '''foo: "{{ True | as_bool }}"''', + returns("True"), + returns(True), + ), + ( + '''foo: "{{ True | as_number }}"''', + returns("True"), + raises(JinjaRenderingError), + ), + ( + '''foo: "{{ True | as_native }}"''', + returns("True"), + returns(True), + ), + # quoted 'True' within rendering + ( + '''foo: "{{ 'True' | as_text }}"''', + returns("True"), + returns("True"), + ), + # 'True' -> string 'True' -> text -> str('True') -> 'True' + ( + '''foo: "{{ 'True' | as_bool }}"''', + returns("True"), + returns(True), + ), + # quoted 'True' outside rendering + ( + '''foo: "'{{ True }}'"''', + returns("'True'"), + returns("'True'"), + ), + ( + '''foo: "'{{ True | as_bool }}'"''', + returns("'True'"), + returns("'True'"), + ), + # yaml turns 'yes' into a boolean true + ( + """foo: yes""", + returns("True"), + returns("True"), + ), + ( + '''foo: "yes"''', + returns("yes"), + returns("yes"), + ), + # concatenation + ( + '''foo: "{{ (a_int + 100) | as_native }}"''', + returns("200"), + returns(200), + ), + ( + '''foo: "{{ (a_str ~ 100) | as_native }}"''', + returns("100100"), + returns(100100), + ), + ( + '''foo: "{{( a_int ~ 100) | as_native }}"''', + returns("100100"), + returns(100100), + ), + # multiple nodes -> always str + ( + '''foo: "{{ a_str | as_native }}{{ a_str | as_native }}"''', + returns("100100"), + returns("100100"), + ), + ( + '''foo: "{{ a_int | as_native }}{{ a_int | as_native }}"''', + returns("100100"), + returns("100100"), + ), + ( + '''foo: "'{{ a_int | as_native }}{{ a_int | as_native }}'"''', + returns("'100100'"), + returns("'100100'"), + ), + ( + """foo:""", + returns("None"), + returns("None"), + ), + ( + """foo: null""", + returns("None"), + returns("None"), + ), + ( + '''foo: ""''', + returns(""), + returns(""), + ), + ( + '''foo: "{{ '' | as_native }}"''', + returns(""), + returns(""), + ), + # very annoying, but jinja 'none' is yaml 'null'. + ( + '''foo: "{{ none | as_native }}"''', + returns("None"), + returns(None), + ), + # make sure we don't include comments in the output (see #2707) + ( + '''foo: "{# #}hello"''', + returns("hello"), + returns("hello"), + ), + ( + '''foo: "{% if false %}{% endif %}hello"''', + returns("hello"), + returns("hello"), + ), +] + + +@pytest.mark.parametrize("value,text_expectation,native_expectation", jinja_tests, ids=expected_id) +def test_jinja_rendering_string(value, text_expectation, native_expectation): + foo_value = yaml.safe_load(value)["foo"] + ctx = {"a_str": "100", "a_int": 100, "b_str": "hello"} + with text_expectation as text_result: + assert text_result == get_rendered(foo_value, ctx, native=False) + + with native_expectation as native_result: + assert native_result == get_rendered(foo_value, ctx, native=True) + + +def test_do(): + s = "{% set my_dict = {} %}\n{% do my_dict.update(a=1) %}" + + template = get_template(s, {}) + mod = template.make_module() + assert mod.my_dict == {"a": 1} + + +def test_regular_render(): + s = '{{ "some_value" | as_native }}' + value = get_rendered(s, {}, native=False) + assert value == "some_value" + s = "{{ 1991 | as_native }}" + value = get_rendered(s, {}, native=False) + assert value == "1991" + + s = '{{ "some_value" | as_text }}' + value = get_rendered(s, {}, native=False) + assert value == "some_value" + s = "{{ 1991 | as_text }}" + value = get_rendered(s, {}, native=False) + assert value == "1991" + + +def test_native_render(): + s = '{{ "some_value" | as_native }}' + value = get_rendered(s, {}, native=True) + assert value == "some_value" + s = "{{ 1991 | as_native }}" + value = get_rendered(s, {}, native=True) + assert value == 1991 + + s = '{{ "some_value" | as_text }}' + value = get_rendered(s, {}, native=True) + assert value == "some_value" + s = "{{ 1991 | as_text }}" + value = get_rendered(s, {}, native=True) + assert value == "1991" diff --git a/tests/unit/clients/test_jinja_static.py b/tests/unit/clients/test_jinja_static.py new file mode 100644 index 00000000000..171976a6b50 --- /dev/null +++ b/tests/unit/clients/test_jinja_static.py @@ -0,0 +1,79 @@ +import pytest + +from dbt.artifacts.resources import RefArgs +from dbt.clients.jinja_static import ( + statically_extract_macro_calls, + statically_parse_ref_or_source, +) +from dbt.context.base import generate_base_context +from dbt.exceptions import ParsingError + + +@pytest.mark.parametrize( + "macro_string,expected_possible_macro_calls", + [ + ( + "{% macro parent_macro() %} {% do return(nested_macro()) %} {% endmacro %}", + ["nested_macro"], + ), + ( + "{% macro lr_macro() %} {{ return(load_result('relations').table) }} {% endmacro %}", + ["load_result"], + ), + ( + "{% macro get_snapshot_unique_id() -%} {{ return(adapter.dispatch('get_snapshot_unique_id')()) }} {%- endmacro %}", + ["get_snapshot_unique_id"], + ), + ( + "{% macro get_columns_in_query(select_sql) -%} {{ return(adapter.dispatch('get_columns_in_query')(select_sql)) }} {% endmacro %}", + ["get_columns_in_query"], + ), + ( + """{% macro test_mutually_exclusive_ranges(model) %} + with base as ( + select {{ get_snapshot_unique_id() }} as dbt_unique_id, + * + from {{ model }} ) + {% endmacro %}""", + ["get_snapshot_unique_id"], + ), + ( + "{% macro test_my_test(model) %} select {{ current_timestamp_backcompat() }} {% endmacro %}", + ["current_timestamp_backcompat"], + ), + ( + "{% macro some_test(model) -%} {{ return(adapter.dispatch('test_some_kind4', 'foo_utils4')) }} {%- endmacro %}", + ["test_some_kind4", "foo_utils4.test_some_kind4"], + ), + ( + "{% macro some_test(model) -%} {{ return(adapter.dispatch('test_some_kind5', macro_namespace = 'foo_utils5')) }} {%- endmacro %}", + ["test_some_kind5", "foo_utils5.test_some_kind5"], + ), + ], +) +def test_extract_macro_calls(macro_string, expected_possible_macro_calls): + cli_vars = {"local_utils_dispatch_list": ["foo_utils4"]} + ctx = generate_base_context(cli_vars) + + possible_macro_calls = statically_extract_macro_calls(macro_string, ctx) + assert possible_macro_calls == expected_possible_macro_calls + + +class TestStaticallyParseRefOrSource: + def test_invalid_expression(self): + with pytest.raises(ParsingError): + statically_parse_ref_or_source("invalid") + + @pytest.mark.parametrize( + "expression,expected_ref_or_source", + [ + ("ref('model')", RefArgs(name="model")), + ("ref('package','model')", RefArgs(name="model", package="package")), + ("ref('model',v=3)", RefArgs(name="model", version=3)), + ("ref('package','model',v=3)", RefArgs(name="model", package="package", version=3)), + ("source('schema', 'table')", ["schema", "table"]), + ], + ) + def test_valid_ref_expression(self, expression, expected_ref_or_source): + ref_or_source = statically_parse_ref_or_source(expression) + assert ref_or_source == expected_ref_or_source diff --git a/tests/unit/test_registry_get_request_exception.py b/tests/unit/clients/test_registry.py similarity index 87% rename from tests/unit/test_registry_get_request_exception.py rename to tests/unit/clients/test_registry.py index 65985db113a..d6afd9c8f66 100644 --- a/tests/unit/test_registry_get_request_exception.py +++ b/tests/unit/clients/test_registry.py @@ -1,7 +1,7 @@ import unittest -from dbt.exceptions import ConnectionError from dbt.clients.registry import _get_with_retries +from dbt_common.exceptions import ConnectionError class testRegistryGetRequestException(unittest.TestCase): diff --git a/tests/unit/config/__init__.py b/tests/unit/config/__init__.py new file mode 100644 index 00000000000..b9f943c78e2 --- /dev/null +++ b/tests/unit/config/__init__.py @@ -0,0 +1,275 @@ +import os +import shutil +import tempfile +import unittest +from argparse import Namespace +from contextlib import contextmanager + +import yaml + +import dbt.config +import dbt.exceptions +from dbt import flags +from dbt.constants import PACKAGES_FILE_NAME +from dbt.flags import set_from_args +from tests.unit.utils import normalize + +INITIAL_ROOT = os.getcwd() + + +@contextmanager +def temp_cd(path): + current_path = os.getcwd() + os.chdir(path) + try: + yield + finally: + os.chdir(current_path) + + +@contextmanager +def raises_nothing(): + yield + + +def empty_profile_renderer(): + return dbt.config.renderer.ProfileRenderer({}) + + +def empty_project_renderer(): + return dbt.config.renderer.DbtProjectYamlRenderer() + + +model_config = { + "my_package_name": { + "enabled": True, + "adwords": { + "adwords_ads": {"materialized": "table", "enabled": True, "schema": "analytics"} + }, + "snowplow": { + "snowplow_sessions": { + "sort": "timestamp", + "materialized": "incremental", + "dist": "user_id", + "unique_key": "id", + }, + "base": { + "snowplow_events": { + "sort": ["timestamp", "userid"], + "materialized": "table", + "sort_type": "interleaved", + "dist": "userid", + } + }, + }, + } +} + +model_fqns = frozenset( + ( + ("my_package_name", "snowplow", "snowplow_sessions"), + ("my_package_name", "snowplow", "base", "snowplow_events"), + ("my_package_name", "adwords", "adwords_ads"), + ) +) + + +class Args: + def __init__( + self, + profiles_dir=None, + threads=None, + profile=None, + cli_vars=None, + version_check=None, + project_dir=None, + target=None, + ): + self.profile = profile + self.threads = threads + self.target = target + if profiles_dir is not None: + self.profiles_dir = profiles_dir + flags.PROFILES_DIR = profiles_dir + if cli_vars is not None: + self.vars = cli_vars + if version_check is not None: + self.version_check = version_check + if project_dir is not None: + self.project_dir = project_dir + + +class BaseConfigTest(unittest.TestCase): + """Subclass this, and before calling the superclass setUp, set + self.profiles_dir and self.project_dir. + """ + + def setUp(self): + # Write project + self.project_dir = normalize(tempfile.mkdtemp()) + self.default_project_data = { + "version": "0.0.1", + "name": "my_test_project", + "profile": "default", + } + self.write_project(self.default_project_data) + + # Write profile + self.profiles_dir = normalize(tempfile.mkdtemp()) + self.default_profile_data = { + "default": { + "outputs": { + "postgres": { + "type": "postgres", + "host": "postgres-db-hostname", + "port": 5555, + "user": "db_user", + "pass": "db_pass", + "dbname": "postgres-db-name", + "schema": "postgres-schema", + "threads": 7, + }, + "with-vars": { + "type": "{{ env_var('env_value_type') }}", + "host": "{{ env_var('env_value_host') }}", + "port": "{{ env_var('env_value_port') | as_number }}", + "user": "{{ env_var('env_value_user') }}", + "pass": "{{ env_var('env_value_pass') }}", + "dbname": "{{ env_var('env_value_dbname') }}", + "schema": "{{ env_var('env_value_schema') }}", + }, + "cli-and-env-vars": { + "type": "{{ env_var('env_value_type') }}", + "host": "{{ var('cli_value_host') }}", + "port": "{{ env_var('env_value_port') | as_number }}", + "user": "{{ env_var('env_value_user') }}", + "pass": "{{ env_var('env_value_pass') }}", + "dbname": "{{ env_var('env_value_dbname') }}", + "schema": "{{ env_var('env_value_schema') }}", + }, + }, + "target": "postgres", + }, + "other": { + "outputs": { + "other-postgres": { + "type": "postgres", + "host": "other-postgres-db-hostname", + "port": 4444, + "user": "other_db_user", + "pass": "other_db_pass", + "dbname": "other-postgres-db-name", + "schema": "other-postgres-schema", + "threads": 2, + } + }, + "target": "other-postgres", + }, + "empty_profile_data": {}, + } + self.write_profile(self.default_profile_data) + + self.args = Namespace( + profiles_dir=self.profiles_dir, + cli_vars={}, + version_check=True, + project_dir=self.project_dir, + target=None, + threads=None, + profile=None, + ) + set_from_args(self.args, None) + self.env_override = { + "env_value_type": "postgres", + "env_value_host": "env-postgres-host", + "env_value_port": "6543", + "env_value_user": "env-postgres-user", + "env_value_pass": "env-postgres-pass", + "env_value_dbname": "env-postgres-dbname", + "env_value_schema": "env-postgres-schema", + "env_value_profile": "default", + } + + def assertRaisesOrReturns(self, exc): + if exc is None: + return raises_nothing() + else: + return self.assertRaises(exc) + + def tearDown(self): + try: + shutil.rmtree(self.project_dir) + except EnvironmentError: + pass + try: + shutil.rmtree(self.profiles_dir) + except EnvironmentError: + pass + + def project_path(self, name): + return os.path.join(self.project_dir, name) + + def profile_path(self, name): + return os.path.join(self.profiles_dir, name) + + def write_project(self, project_data=None): + if project_data is None: + project_data = self.project_data + with open(self.project_path("dbt_project.yml"), "w") as fp: + yaml.dump(project_data, fp) + + def write_packages(self, package_data): + with open(self.project_path("packages.yml"), "w") as fp: + yaml.dump(package_data, fp) + + def write_profile(self, profile_data=None): + if profile_data is None: + profile_data = self.profile_data + with open(self.profile_path("profiles.yml"), "w") as fp: + yaml.dump(profile_data, fp) + + def write_empty_profile(self): + with open(self.profile_path("profiles.yml"), "w") as fp: + yaml.dump("", fp) + + +def project_from_config_norender( + cfg, packages=None, project_root="/invalid-root-path", verify_version=False +): + if packages is None: + packages = {} + partial = dbt.config.project.PartialProject.from_dicts( + project_root, + project_dict=cfg, + packages_dict=packages, + selectors_dict={}, + verify_version=verify_version, + ) + # no rendering ... Why? + partial.project_dict["project-root"] = project_root + rendered = dbt.config.project.RenderComponents( + project_dict=partial.project_dict, + packages_dict=partial.packages_dict, + selectors_dict=partial.selectors_dict, + ) + return partial.create_project(rendered) + + +def project_from_config_rendered( + cfg, + packages=None, + project_root="/invalid-root-path", + verify_version=False, + packages_specified_path=PACKAGES_FILE_NAME, +): + if packages is None: + packages = {} + partial = dbt.config.project.PartialProject.from_dicts( + project_root, + project_dict=cfg, + packages_dict=packages, + selectors_dict={}, + verify_version=verify_version, + packages_specified_path=packages_specified_path, + ) + return partial.render(empty_project_renderer()) diff --git a/tests/unit/config/test_profile.py b/tests/unit/config/test_profile.py new file mode 100644 index 00000000000..e79d0935582 --- /dev/null +++ b/tests/unit/config/test_profile.py @@ -0,0 +1,294 @@ +import os +from copy import deepcopy +from unittest import mock + +import dbt.config +import dbt.exceptions +from dbt.adapters.postgres import PostgresCredentials +from dbt.flags import set_from_args +from dbt.tests.util import safe_set_invocation_context +from tests.unit.config import ( + BaseConfigTest, + empty_profile_renderer, + project_from_config_norender, +) + + +class TestProfile(BaseConfigTest): + def from_raw_profiles(self): + renderer = empty_profile_renderer() + return dbt.config.Profile.from_raw_profiles(self.default_profile_data, "default", renderer) + + def test_from_raw_profiles(self): + profile = self.from_raw_profiles() + self.assertEqual(profile.profile_name, "default") + self.assertEqual(profile.target_name, "postgres") + self.assertEqual(profile.threads, 7) + self.assertTrue(isinstance(profile.credentials, PostgresCredentials)) + self.assertEqual(profile.credentials.type, "postgres") + self.assertEqual(profile.credentials.host, "postgres-db-hostname") + self.assertEqual(profile.credentials.port, 5555) + self.assertEqual(profile.credentials.user, "db_user") + self.assertEqual(profile.credentials.password, "db_pass") + self.assertEqual(profile.credentials.schema, "postgres-schema") + self.assertEqual(profile.credentials.database, "postgres-db-name") + + def test_missing_type(self): + del self.default_profile_data["default"]["outputs"]["postgres"]["type"] + with self.assertRaises(dbt.exceptions.DbtProfileError) as exc: + self.from_raw_profiles() + self.assertIn("type", str(exc.exception)) + self.assertIn("postgres", str(exc.exception)) + self.assertIn("default", str(exc.exception)) + + def test_bad_type(self): + self.default_profile_data["default"]["outputs"]["postgres"]["type"] = "invalid" + with self.assertRaises(dbt.exceptions.DbtProfileError) as exc: + self.from_raw_profiles() + self.assertIn("Credentials", str(exc.exception)) + self.assertIn("postgres", str(exc.exception)) + self.assertIn("default", str(exc.exception)) + + def test_invalid_credentials(self): + del self.default_profile_data["default"]["outputs"]["postgres"]["host"] + with self.assertRaises(dbt.exceptions.DbtProfileError) as exc: + self.from_raw_profiles() + self.assertIn("Credentials", str(exc.exception)) + self.assertIn("postgres", str(exc.exception)) + self.assertIn("default", str(exc.exception)) + + def test_missing_target(self): + profile = self.default_profile_data["default"] + del profile["target"] + profile["outputs"]["default"] = profile["outputs"]["postgres"] + profile = self.from_raw_profiles() + self.assertEqual(profile.profile_name, "default") + self.assertEqual(profile.target_name, "default") + self.assertEqual(profile.credentials.type, "postgres") + + def test_extra_path(self): + self.default_project_data.update( + { + "model-paths": ["models"], + "source-paths": ["other-models"], + } + ) + with self.assertRaises(dbt.exceptions.DbtProjectError) as exc: + project_from_config_norender(self.default_project_data, project_root=self.project_dir) + + self.assertIn("source-paths and model-paths", str(exc.exception)) + self.assertIn("cannot both be defined.", str(exc.exception)) + + def test_profile_invalid_project(self): + renderer = empty_profile_renderer() + with self.assertRaises(dbt.exceptions.DbtProjectError) as exc: + dbt.config.Profile.from_raw_profiles( + self.default_profile_data, "invalid-profile", renderer + ) + + self.assertEqual(exc.exception.result_type, "invalid_project") + self.assertIn("Could not find", str(exc.exception)) + self.assertIn("invalid-profile", str(exc.exception)) + + def test_profile_invalid_target(self): + renderer = empty_profile_renderer() + with self.assertRaises(dbt.exceptions.DbtProfileError) as exc: + dbt.config.Profile.from_raw_profiles( + self.default_profile_data, "default", renderer, target_override="nope" + ) + + self.assertIn("nope", str(exc.exception)) + self.assertIn("- postgres", str(exc.exception)) + self.assertIn("- with-vars", str(exc.exception)) + + def test_no_outputs(self): + renderer = empty_profile_renderer() + + with self.assertRaises(dbt.exceptions.DbtProfileError) as exc: + dbt.config.Profile.from_raw_profiles( + {"some-profile": {"target": "blah"}}, "some-profile", renderer + ) + self.assertIn("outputs not specified", str(exc.exception)) + self.assertIn("some-profile", str(exc.exception)) + + def test_neq(self): + profile = self.from_raw_profiles() + self.assertNotEqual(profile, object()) + + def test_eq(self): + renderer = empty_profile_renderer() + profile = dbt.config.Profile.from_raw_profiles( + deepcopy(self.default_profile_data), "default", renderer + ) + + other = dbt.config.Profile.from_raw_profiles( + deepcopy(self.default_profile_data), "default", renderer + ) + self.assertEqual(profile, other) + + def test_invalid_env_vars(self): + self.env_override["env_value_port"] = "hello" + with mock.patch.dict(os.environ, self.env_override): + with self.assertRaises(dbt.exceptions.DbtProfileError) as exc: + safe_set_invocation_context() + renderer = empty_profile_renderer() + dbt.config.Profile.from_raw_profile_info( + self.default_profile_data["default"], + "default", + renderer, + target_override="with-vars", + ) + self.assertIn("Could not convert value 'hello' into type 'number'", str(exc.exception)) + + +class TestProfileFile(BaseConfigTest): + def from_raw_profile_info(self, raw_profile=None, profile_name="default", **kwargs): + if raw_profile is None: + raw_profile = self.default_profile_data["default"] + renderer = empty_profile_renderer() + kw = { + "raw_profile": raw_profile, + "profile_name": profile_name, + "renderer": renderer, + } + kw.update(kwargs) + return dbt.config.Profile.from_raw_profile_info(**kw) + + def from_args(self, project_profile_name="default", **kwargs): + kw = { + "project_profile_name": project_profile_name, + "renderer": empty_profile_renderer(), + "threads_override": self.args.threads, + "target_override": self.args.target, + "profile_name_override": self.args.profile, + } + kw.update(kwargs) + return dbt.config.Profile.render(**kw) + + def test_profile_simple(self): + profile = self.from_args() + from_raw = self.from_raw_profile_info() + + self.assertEqual(profile.profile_name, "default") + self.assertEqual(profile.target_name, "postgres") + self.assertEqual(profile.threads, 7) + self.assertTrue(isinstance(profile.credentials, PostgresCredentials)) + self.assertEqual(profile.credentials.type, "postgres") + self.assertEqual(profile.credentials.host, "postgres-db-hostname") + self.assertEqual(profile.credentials.port, 5555) + self.assertEqual(profile.credentials.user, "db_user") + self.assertEqual(profile.credentials.password, "db_pass") + self.assertEqual(profile.credentials.schema, "postgres-schema") + self.assertEqual(profile.credentials.database, "postgres-db-name") + self.assertEqual(profile, from_raw) + + def test_profile_override(self): + self.args.profile = "other" + self.args.threads = 3 + set_from_args(self.args, None) + profile = self.from_args() + from_raw = self.from_raw_profile_info( + self.default_profile_data["other"], + "other", + threads_override=3, + ) + + self.assertEqual(profile.profile_name, "other") + self.assertEqual(profile.target_name, "other-postgres") + self.assertEqual(profile.threads, 3) + self.assertTrue(isinstance(profile.credentials, PostgresCredentials)) + self.assertEqual(profile.credentials.type, "postgres") + self.assertEqual(profile.credentials.host, "other-postgres-db-hostname") + self.assertEqual(profile.credentials.port, 4444) + self.assertEqual(profile.credentials.user, "other_db_user") + self.assertEqual(profile.credentials.password, "other_db_pass") + self.assertEqual(profile.credentials.schema, "other-postgres-schema") + self.assertEqual(profile.credentials.database, "other-postgres-db-name") + self.assertEqual(profile, from_raw) + + def test_env_vars(self): + self.args.target = "with-vars" + with mock.patch.dict(os.environ, self.env_override): + safe_set_invocation_context() # reset invocation context with new env + profile = self.from_args() + from_raw = self.from_raw_profile_info(target_override="with-vars") + + self.assertEqual(profile.profile_name, "default") + self.assertEqual(profile.target_name, "with-vars") + self.assertEqual(profile.threads, 1) + self.assertEqual(profile.credentials.type, "postgres") + self.assertEqual(profile.credentials.host, "env-postgres-host") + self.assertEqual(profile.credentials.port, 6543) + self.assertEqual(profile.credentials.user, "env-postgres-user") + self.assertEqual(profile.credentials.password, "env-postgres-pass") + self.assertEqual(profile, from_raw) + + def test_env_vars_env_target(self): + self.default_profile_data["default"]["target"] = "{{ env_var('env_value_target') }}" + self.write_profile(self.default_profile_data) + self.env_override["env_value_target"] = "with-vars" + with mock.patch.dict(os.environ, self.env_override): + safe_set_invocation_context() # reset invocation context with new env + profile = self.from_args() + from_raw = self.from_raw_profile_info(target_override="with-vars") + + self.assertEqual(profile.profile_name, "default") + self.assertEqual(profile.target_name, "with-vars") + self.assertEqual(profile.threads, 1) + self.assertEqual(profile.credentials.type, "postgres") + self.assertEqual(profile.credentials.host, "env-postgres-host") + self.assertEqual(profile.credentials.port, 6543) + self.assertEqual(profile.credentials.user, "env-postgres-user") + self.assertEqual(profile.credentials.password, "env-postgres-pass") + self.assertEqual(profile, from_raw) + + def test_invalid_env_vars(self): + self.env_override["env_value_port"] = "hello" + self.args.target = "with-vars" + with mock.patch.dict(os.environ, self.env_override): + with self.assertRaises(dbt.exceptions.DbtProfileError) as exc: + safe_set_invocation_context() # reset invocation context with new env + self.from_args() + + self.assertIn("Could not convert value 'hello' into type 'number'", str(exc.exception)) + + def test_cli_and_env_vars(self): + self.args.target = "cli-and-env-vars" + self.args.vars = {"cli_value_host": "cli-postgres-host"} + renderer = dbt.config.renderer.ProfileRenderer({"cli_value_host": "cli-postgres-host"}) + with mock.patch.dict(os.environ, self.env_override): + safe_set_invocation_context() # reset invocation context with new env + profile = self.from_args(renderer=renderer) + from_raw = self.from_raw_profile_info( + target_override="cli-and-env-vars", + renderer=renderer, + ) + + self.assertEqual(profile.profile_name, "default") + self.assertEqual(profile.target_name, "cli-and-env-vars") + self.assertEqual(profile.threads, 1) + self.assertEqual(profile.credentials.type, "postgres") + self.assertEqual(profile.credentials.host, "cli-postgres-host") + self.assertEqual(profile.credentials.port, 6543) + self.assertEqual(profile.credentials.user, "env-postgres-user") + self.assertEqual(profile.credentials.password, "env-postgres-pass") + self.assertEqual(profile, from_raw) + + def test_no_profile(self): + with self.assertRaises(dbt.exceptions.DbtProjectError) as exc: + self.from_args(project_profile_name=None) + self.assertIn("no profile was specified", str(exc.exception)) + + def test_empty_profile(self): + self.write_empty_profile() + with self.assertRaises(dbt.exceptions.DbtProfileError) as exc: + self.from_args() + self.assertIn("profiles.yml is empty", str(exc.exception)) + + def test_profile_with_empty_profile_data(self): + renderer = empty_profile_renderer() + with self.assertRaises(dbt.exceptions.DbtProfileError) as exc: + dbt.config.Profile.from_raw_profiles( + self.default_profile_data, "empty_profile_data", renderer + ) + self.assertIn("Profile empty_profile_data in profiles.yml is empty", str(exc.exception)) diff --git a/tests/unit/config/test_project.py b/tests/unit/config/test_project.py new file mode 100644 index 00000000000..ab842c164d7 --- /dev/null +++ b/tests/unit/config/test_project.py @@ -0,0 +1,587 @@ +import json +import os +import unittest +from copy import deepcopy +from typing import Any, Dict +from unittest import mock + +import pytest + +import dbt.config +import dbt.exceptions +from dbt.adapters.contracts.connection import DEFAULT_QUERY_COMMENT, QueryComment +from dbt.adapters.factory import load_plugin +from dbt.config.project import Project, _get_required_version +from dbt.constants import DEPENDENCIES_FILE_NAME +from dbt.contracts.project import GitPackage, LocalPackage, PackageConfig +from dbt.flags import set_from_args +from dbt.node_types import NodeType +from dbt.tests.util import safe_set_invocation_context +from dbt_common.exceptions import DbtRuntimeError +from dbt_common.semver import VersionSpecifier +from tests.unit.config import ( + BaseConfigTest, + empty_project_renderer, + project_from_config_norender, + project_from_config_rendered, +) + + +class TestProjectMethods: + def test_all_source_paths(self, project: Project): + assert ( + project.all_source_paths.sort() + == ["models", "seeds", "snapshots", "analyses", "macros"].sort() + ) + + def test_generic_test_paths(self, project: Project): + assert project.generic_test_paths == ["tests/generic"] + + def test_fixture_paths(self, project: Project): + assert project.fixture_paths == ["tests/fixtures"] + + def test__str__(self, project: Project): + assert ( + str(project) + == "{'name': 'test_project', 'version': 1.0, 'project-root': 'doesnt/actually/exist', 'profile': 'test_profile', 'model-paths': ['models'], 'macro-paths': ['macros'], 'seed-paths': ['seeds'], 'test-paths': ['tests'], 'analysis-paths': ['analyses'], 'docs-paths': ['docs'], 'asset-paths': ['assets'], 'target-path': 'target', 'snapshot-paths': ['snapshots'], 'clean-targets': ['target'], 'log-path': 'path/to/project/logs', 'quoting': {}, 'models': {}, 'on-run-start': [], 'on-run-end': [], 'dispatch': [{'macro_namespace': 'dbt_utils', 'search_order': ['test_project', 'dbt_utils']}], 'seeds': {}, 'snapshots': {}, 'sources': {}, 'data_tests': {}, 'unit_tests': {}, 'metrics': {}, 'semantic-models': {}, 'saved-queries': {}, 'exposures': {}, 'vars': {}, 'require-dbt-version': ['=0.0.0'], 'restrict-access': False, 'dbt-cloud': {}, 'flags': {}, 'query-comment': {'comment': \"\\n{%- set comment_dict = {} -%}\\n{%- do comment_dict.update(\\n app='dbt',\\n dbt_version=dbt_version,\\n profile_name=target.get('profile_name'),\\n target_name=target.get('target_name'),\\n) -%}\\n{%- if node is not none -%}\\n {%- do comment_dict.update(\\n node_id=node.unique_id,\\n ) -%}\\n{% else %}\\n {# in the node context, the connection name is the node_id #}\\n {%- do comment_dict.update(connection_name=connection_name) -%}\\n{%- endif -%}\\n{{ return(tojson(comment_dict)) }}\\n\", 'append': False, 'job-label': False}, 'packages': []}" + ) + + def test_get_selector(self, project: Project): + selector = project.get_selector("my_selector") + assert selector.raw == "give me cats" + + with pytest.raises(DbtRuntimeError): + project.get_selector("doesnt_exist") + + def test_get_default_selector_name(self, project: Project): + default_selector_name = project.get_default_selector_name() + assert default_selector_name == "my_selector" + + project.selectors["my_selector"]["default"] = False + default_selector_name = project.get_default_selector_name() + assert default_selector_name is None + + def test_get_macro_search_order(self, project: Project): + search_order = project.get_macro_search_order("dbt_utils") + assert search_order == ["test_project", "dbt_utils"] + + search_order = project.get_macro_search_order("doesnt_exist") + assert search_order is None + + def test_project_target_path(self, project: Project): + assert project.project_target_path == "doesnt/actually/exist/target" + + def test_eq(self, project: Project): + other = deepcopy(project) + assert project == other + + def test_neq(self, project: Project): + other = deepcopy(project) + other.project_name = "other project" + assert project != other + + def test_hashed_name(self, project: Project): + assert project.hashed_name() == "6e72a69d5c5cca8f0400338441c022e4" + + +class TestProjectInitialization(BaseConfigTest): + def test_defaults(self): + project = project_from_config_norender( + self.default_project_data, project_root=self.project_dir + ) + self.assertEqual(project.project_name, "my_test_project") + self.assertEqual(project.version, "0.0.1") + self.assertEqual(project.profile_name, "default") + self.assertEqual(project.project_root, self.project_dir) + self.assertEqual(project.model_paths, ["models"]) + self.assertEqual(project.macro_paths, ["macros"]) + self.assertEqual(project.seed_paths, ["seeds"]) + self.assertEqual(project.test_paths, ["tests"]) + self.assertEqual(project.analysis_paths, ["analyses"]) + self.assertEqual( + set(project.docs_paths), set(["models", "seeds", "snapshots", "analyses", "macros"]) + ) + self.assertEqual(project.asset_paths, []) + self.assertEqual(project.target_path, "target") + self.assertEqual(project.clean_targets, ["target"]) + self.assertEqual(project.log_path, "logs") + self.assertEqual(project.packages_install_path, "dbt_packages") + self.assertEqual(project.quoting, {}) + self.assertEqual(project.models, {}) + self.assertEqual(project.on_run_start, []) + self.assertEqual(project.on_run_end, []) + self.assertEqual(project.seeds, {}) + self.assertEqual(project.dbt_version, [VersionSpecifier.from_version_string(">=0.0.0")]) + self.assertEqual(project.packages, PackageConfig(packages=[])) + # just make sure str() doesn't crash anything, that's always + # embarrassing + str(project) + + def test_implicit_overrides(self): + self.default_project_data.update( + { + "model-paths": ["other-models"], + } + ) + project = project_from_config_norender( + self.default_project_data, project_root=self.project_dir + ) + self.assertEqual( + set(project.docs_paths), + set(["other-models", "seeds", "snapshots", "analyses", "macros"]), + ) + + def test_all_overrides(self): + # log-path is not tested because it is set exclusively from flags, not cfg + self.default_project_data.update( + { + "model-paths": ["other-models"], + "macro-paths": ["other-macros"], + "seed-paths": ["other-seeds"], + "test-paths": ["other-tests"], + "analysis-paths": ["other-analyses"], + "docs-paths": ["docs"], + "asset-paths": ["other-assets"], + "clean-targets": ["another-target"], + "packages-install-path": "other-dbt_packages", + "quoting": {"identifier": False}, + "models": { + "pre-hook": ["{{ logging.log_model_start_event() }}"], + "post-hook": ["{{ logging.log_model_end_event() }}"], + "my_test_project": { + "first": { + "enabled": False, + "sub": { + "enabled": True, + }, + }, + "second": { + "materialized": "table", + }, + }, + "third_party": { + "third": { + "materialized": "view", + }, + }, + }, + "on-run-start": [ + "{{ logging.log_run_start_event() }}", + ], + "on-run-end": [ + "{{ logging.log_run_end_event() }}", + ], + "seeds": { + "my_test_project": { + "enabled": True, + "schema": "seed_data", + "post-hook": "grant select on {{ this }} to bi_user", + }, + }, + "data_tests": {"my_test_project": {"fail_calc": "sum(failures)"}}, + "require-dbt-version": ">=0.1.0", + } + ) + packages = { + "packages": [ + { + "local": "foo", + }, + {"git": "git@example.com:dbt-labs/dbt-utils.git", "revision": "test-rev"}, + ], + } + project = project_from_config_norender( + self.default_project_data, project_root=self.project_dir, packages=packages + ) + self.assertEqual(project.project_name, "my_test_project") + self.assertEqual(project.version, "0.0.1") + self.assertEqual(project.profile_name, "default") + self.assertEqual(project.model_paths, ["other-models"]) + self.assertEqual(project.macro_paths, ["other-macros"]) + self.assertEqual(project.seed_paths, ["other-seeds"]) + self.assertEqual(project.test_paths, ["other-tests"]) + self.assertEqual(project.analysis_paths, ["other-analyses"]) + self.assertEqual(project.docs_paths, ["docs"]) + self.assertEqual(project.asset_paths, ["other-assets"]) + self.assertEqual(project.clean_targets, ["another-target"]) + self.assertEqual(project.packages_install_path, "other-dbt_packages") + self.assertEqual(project.quoting, {"identifier": False}) + self.assertEqual( + project.models, + { + "pre-hook": ["{{ logging.log_model_start_event() }}"], + "post-hook": ["{{ logging.log_model_end_event() }}"], + "my_test_project": { + "first": { + "enabled": False, + "sub": { + "enabled": True, + }, + }, + "second": { + "materialized": "table", + }, + }, + "third_party": { + "third": { + "materialized": "view", + }, + }, + }, + ) + self.assertEqual(project.on_run_start, ["{{ logging.log_run_start_event() }}"]) + self.assertEqual(project.on_run_end, ["{{ logging.log_run_end_event() }}"]) + self.assertEqual( + project.seeds, + { + "my_test_project": { + "enabled": True, + "schema": "seed_data", + "post-hook": "grant select on {{ this }} to bi_user", + }, + }, + ) + self.assertEqual( + project.data_tests, + { + "my_test_project": {"fail_calc": "sum(failures)"}, + }, + ) + self.assertEqual(project.dbt_version, [VersionSpecifier.from_version_string(">=0.1.0")]) + self.assertEqual( + project.packages, + PackageConfig( + packages=[ + LocalPackage(local="foo", unrendered={"local": "foo"}), + GitPackage( + git="git@example.com:dbt-labs/dbt-utils.git", + revision="test-rev", + unrendered={ + "git": "git@example.com:dbt-labs/dbt-utils.git", + "revision": "test-rev", + }, + ), + ] + ), + ) + str(project) # this does the equivalent of project.to_project_config(with_packages=True) + json.dumps(project.to_project_config()) + + def test_string_run_hooks(self): + self.default_project_data.update( + { + "on-run-start": "{{ logging.log_run_start_event() }}", + "on-run-end": "{{ logging.log_run_end_event() }}", + } + ) + project = project_from_config_rendered(self.default_project_data) + self.assertEqual(project.on_run_start, ["{{ logging.log_run_start_event() }}"]) + self.assertEqual(project.on_run_end, ["{{ logging.log_run_end_event() }}"]) + + def test_invalid_project_name(self): + self.default_project_data["name"] = "invalid-project-name" + with self.assertRaises(dbt.exceptions.DbtProjectError) as exc: + project_from_config_norender(self.default_project_data, project_root=self.project_dir) + + self.assertIn("invalid-project-name", str(exc.exception)) + + def test_no_project(self): + os.remove(os.path.join(self.project_dir, "dbt_project.yml")) + renderer = empty_project_renderer() + with self.assertRaises(dbt.exceptions.DbtProjectError) as exc: + dbt.config.Project.from_project_root(self.project_dir, renderer) + + self.assertIn("No dbt_project.yml", str(exc.exception)) + + def test_invalid_version(self): + self.default_project_data["require-dbt-version"] = "hello!" + with self.assertRaises(dbt.exceptions.DbtProjectError): + project_from_config_norender(self.default_project_data, project_root=self.project_dir) + + def test_unsupported_version(self): + self.default_project_data["require-dbt-version"] = ">99999.0.0" + # allowed, because the RuntimeConfig checks, not the Project itself + project_from_config_norender(self.default_project_data, project_root=self.project_dir) + + def test_none_values(self): + self.default_project_data.update( + { + "models": None, + "seeds": None, + "on-run-end": None, + "on-run-start": None, + } + ) + project = project_from_config_rendered(self.default_project_data) + self.assertEqual(project.models, {}) + self.assertEqual(project.on_run_start, []) + self.assertEqual(project.on_run_end, []) + self.assertEqual(project.seeds, {}) + + def test_nested_none_values(self): + self.default_project_data.update( + { + "models": {"vars": None, "pre-hook": None, "post-hook": None}, + "seeds": {"vars": None, "pre-hook": None, "post-hook": None, "column_types": None}, + } + ) + project = project_from_config_rendered(self.default_project_data) + self.assertEqual(project.models, {"vars": {}, "pre-hook": [], "post-hook": []}) + self.assertEqual( + project.seeds, {"vars": {}, "pre-hook": [], "post-hook": [], "column_types": {}} + ) + + @pytest.mark.skipif(os.name == "nt", reason="crashes CI for Windows") + def test_cycle(self): + models = {} + models["models"] = models + self.default_project_data.update( + { + "models": models, + } + ) + with self.assertRaises(dbt.exceptions.DbtProjectError) as exc: + project_from_config_rendered(self.default_project_data) + + assert "Cycle detected" in str(exc.exception) + + def test_query_comment_disabled(self): + self.default_project_data.update( + { + "query-comment": None, + } + ) + project = project_from_config_norender( + self.default_project_data, project_root=self.project_dir + ) + self.assertEqual(project.query_comment.comment, "") + self.assertEqual(project.query_comment.append, False) + + self.default_project_data.update( + { + "query-comment": "", + } + ) + project = project_from_config_norender( + self.default_project_data, project_root=self.project_dir + ) + self.assertEqual(project.query_comment.comment, "") + self.assertEqual(project.query_comment.append, False) + + def test_default_query_comment(self): + project = project_from_config_norender( + self.default_project_data, project_root=self.project_dir + ) + self.assertEqual(project.query_comment, QueryComment()) + + def test_default_query_comment_append(self): + self.default_project_data.update( + { + "query-comment": {"append": True}, + } + ) + project = project_from_config_norender( + self.default_project_data, project_root=self.project_dir + ) + self.assertEqual(project.query_comment.comment, DEFAULT_QUERY_COMMENT) + self.assertEqual(project.query_comment.append, True) + + def test_custom_query_comment_append(self): + self.default_project_data.update( + { + "query-comment": {"comment": "run by user test", "append": True}, + } + ) + project = project_from_config_norender( + self.default_project_data, project_root=self.project_dir + ) + self.assertEqual(project.query_comment.comment, "run by user test") + self.assertEqual(project.query_comment.append, True) + + def test_packages_from_dependencies(self): + packages = { + "packages": [ + { + "git": "{{ env_var('some_package') }}", + "warn-unpinned": True, + } + ], + } + + project = project_from_config_rendered( + self.default_project_data, packages, packages_specified_path=DEPENDENCIES_FILE_NAME + ) + git_package = project.packages.packages[0] + # packages did not render because packages_specified_path=DEPENDENCIES_FILE_NAME + assert git_package.git == "{{ env_var('some_package') }}" + + +class TestProjectFile(BaseConfigTest): + def test_from_project_root(self): + renderer = empty_project_renderer() + project = dbt.config.Project.from_project_root(self.project_dir, renderer) + from_config = project_from_config_norender( + self.default_project_data, project_root=self.project_dir + ) + self.assertEqual(project, from_config) + self.assertEqual(project.version, "0.0.1") + self.assertEqual(project.project_name, "my_test_project") + + def test_with_invalid_package(self): + renderer = empty_project_renderer() + self.write_packages({"invalid": ["not a package of any kind"]}) + with self.assertRaises(dbt.exceptions.DbtProjectError): + dbt.config.Project.from_project_root(self.project_dir, renderer) + + +class TestVariableProjectFile(BaseConfigTest): + def setUp(self): + super().setUp() + self.default_project_data["version"] = "{{ var('cli_version') }}" + self.default_project_data["name"] = "blah" + self.default_project_data["profile"] = "{{ env_var('env_value_profile') }}" + self.write_project(self.default_project_data) + + def test_cli_and_env_vars(self): + renderer = dbt.config.renderer.DbtProjectYamlRenderer(None, {"cli_version": "0.1.2"}) + with mock.patch.dict(os.environ, self.env_override): + safe_set_invocation_context() # reset invocation context with new env + project = dbt.config.Project.from_project_root( + self.project_dir, + renderer, + ) + + self.assertEqual(renderer.ctx_obj.env_vars, {"env_value_profile": "default"}) + self.assertEqual(project.version, "0.1.2") + self.assertEqual(project.project_name, "blah") + self.assertEqual(project.profile_name, "default") + + +class TestVarLookups(unittest.TestCase): + def setUp(self): + self.initial_src_vars = { + # globals + "foo": 123, + "bar": "hello", + # project-scoped + "my_project": { + "bar": "goodbye", + "baz": True, + }, + "other_project": { + "foo": 456, + }, + } + self.src_vars = deepcopy(self.initial_src_vars) + self.dst = {"vars": deepcopy(self.initial_src_vars)} + + self.projects = ["my_project", "other_project", "third_project"] + load_plugin("postgres") + self.local_var_search = mock.MagicMock( + fqn=["my_project", "my_model"], resource_type=NodeType.Model, package_name="my_project" + ) + self.other_var_search = mock.MagicMock( + fqn=["other_project", "model"], + resource_type=NodeType.Model, + package_name="other_project", + ) + self.third_var_search = mock.MagicMock( + fqn=["third_project", "third_model"], + resource_type=NodeType.Model, + package_name="third_project", + ) + + def test_lookups(self): + vars_provider = dbt.config.project.VarProvider(self.initial_src_vars) + + expected = [ + (self.local_var_search, "foo", 123), + (self.other_var_search, "foo", 456), + (self.third_var_search, "foo", 123), + (self.local_var_search, "bar", "goodbye"), + (self.other_var_search, "bar", "hello"), + (self.third_var_search, "bar", "hello"), + (self.local_var_search, "baz", True), + (self.other_var_search, "baz", None), + (self.third_var_search, "baz", None), + ] + for node, key, expected_value in expected: + value = vars_provider.vars_for(node, "postgres").get(key) + assert value == expected_value + + +class TestMultipleProjectFlags(BaseConfigTest): + def setUp(self): + super().setUp() + + self.default_project_data.update( + { + "flags": { + "send_anonymous_usage_data": False, + } + } + ) + self.write_project(self.default_project_data) + + self.default_profile_data.update( + { + "config": { + "send_anonymous_usage_data": False, + } + } + ) + self.write_profile(self.default_profile_data) + + def test_setting_multiple_flags(self): + with pytest.raises(dbt.exceptions.DbtProjectError): + set_from_args(self.args, None) + + +class TestGetRequiredVersion: + @pytest.fixture + def project_dict(self) -> Dict[str, Any]: + return { + "name": "test_project", + "require-dbt-version": ">0.0.0", + } + + def test_supported_version(self, project_dict: Dict[str, Any]) -> None: + specifiers = _get_required_version(project_dict=project_dict, verify_version=True) + assert set(x.to_version_string() for x in specifiers) == {">0.0.0"} + + def test_unsupported_version(self, project_dict: Dict[str, Any]) -> None: + project_dict["require-dbt-version"] = ">99999.0.0" + with pytest.raises( + dbt.exceptions.DbtProjectError, match="This version of dbt is not supported" + ): + _get_required_version(project_dict=project_dict, verify_version=True) + + def test_unsupported_version_no_check(self, project_dict: Dict[str, Any]) -> None: + project_dict["require-dbt-version"] = ">99999.0.0" + specifiers = _get_required_version(project_dict=project_dict, verify_version=False) + assert set(x.to_version_string() for x in specifiers) == {">99999.0.0"} + + def test_supported_version_range(self, project_dict: Dict[str, Any]) -> None: + project_dict["require-dbt-version"] = [">0.0.0", "<=99999.0.0"] + specifiers = _get_required_version(project_dict=project_dict, verify_version=True) + assert set(x.to_version_string() for x in specifiers) == {">0.0.0", "<=99999.0.0"} + + def test_unsupported_version_range(self, project_dict: Dict[str, Any]) -> None: + project_dict["require-dbt-version"] = [">0.0.0", "<=0.0.1"] + with pytest.raises( + dbt.exceptions.DbtProjectError, match="This version of dbt is not supported" + ): + _get_required_version(project_dict=project_dict, verify_version=True) + + def test_unsupported_version_range_no_check(self, project_dict: Dict[str, Any]) -> None: + project_dict["require-dbt-version"] = [">0.0.0", "<=0.0.1"] + specifiers = _get_required_version(project_dict=project_dict, verify_version=False) + assert set(x.to_version_string() for x in specifiers) == {">0.0.0", "<=0.0.1"} + + def test_impossible_version_range(self, project_dict: Dict[str, Any]) -> None: + project_dict["require-dbt-version"] = [">99999.0.0", "<=0.0.1"] + with pytest.raises( + dbt.exceptions.DbtProjectError, + match="The package version requirement can never be satisfied", + ): + _get_required_version(project_dict=project_dict, verify_version=True) diff --git a/tests/unit/config/test_runtime.py b/tests/unit/config/test_runtime.py new file mode 100644 index 00000000000..816ec8f98c3 --- /dev/null +++ b/tests/unit/config/test_runtime.py @@ -0,0 +1,200 @@ +import os +import tempfile +from argparse import Namespace +from typing import Any, Dict +from unittest import mock + +import pytest +from pytest_mock import MockerFixture + +import dbt.config +import dbt.exceptions +from dbt import tracking +from dbt.config.profile import Profile +from dbt.config.project import Project +from dbt.config.runtime import RuntimeConfig +from dbt.contracts.project import PackageConfig +from dbt.events.types import UnusedResourceConfigPath +from dbt.flags import set_from_args +from dbt.tests.util import safe_set_invocation_context +from dbt_common.events.event_manager_client import add_callback_to_manager +from tests.unit.config import BaseConfigTest, temp_cd +from tests.utils import EventCatcher + + +class TestRuntimeConfig: + @pytest.fixture + def args(self) -> Namespace: + return Namespace( + profiles_dir=tempfile.mkdtemp(), + cli_vars={}, + version_check=True, + project_dir=tempfile.mkdtemp(), + target=None, + threads=None, + profile=None, + ) + + def test_str(self, profile: Profile, project: Project) -> None: + config = dbt.config.RuntimeConfig.from_parts(project, profile, {}) + + # to make sure nothing terrible happens + str(config) + + def test_from_parts(self, args: Namespace, profile: Profile, project: Project): + config = dbt.config.RuntimeConfig.from_parts(project, profile, args) + + assert config.cli_vars == {} + assert config.to_profile_info() == profile.to_profile_info() + # we should have the default quoting set in the full config, but not in + # the project + # TODO(jeb): Adapters must assert that quoting is populated? + expected_project = project.to_project_config() + assert expected_project["quoting"] == {} + + expected_project["quoting"] = { + "database": True, + "identifier": True, + "schema": True, + } + assert config.to_project_config() == expected_project + + def test_get_metadata(self, mocker: MockerFixture, runtime_config: RuntimeConfig) -> None: + mock_user = mocker.patch.object(tracking, "active_user") + mock_user.id = "cfc9500f-dc7f-4c83-9ea7-2c581c1b38cf" + set_from_args(Namespace(SEND_ANONYMOUS_USAGE_STATS=False), None) + + metadata = runtime_config.get_metadata() + # ensure user_id and send_anonymous_usage_stats are set correctly + assert metadata.user_id == mock_user.id + assert not metadata.send_anonymous_usage_stats + + @pytest.fixture + def used_fqns(self) -> Dict[str, Any]: + return {"models": frozenset((("my_test_project", "foo", "bar"),))} + + def test_warn_for_unused_resource_config_paths( + self, + runtime_config: RuntimeConfig, + used_fqns: Dict[str, Any], + ): + catcher = EventCatcher(event_to_catch=UnusedResourceConfigPath) + add_callback_to_manager(catcher.catch) + + runtime_config.models = { + "my_test_project": { + "foo": { + "materialized": "view", + "bar": { + "materialized": "table", + }, + "baz": { + "materialized": "table", + }, + } + } + } + + runtime_config.warn_for_unused_resource_config_paths(used_fqns, []) + len(catcher.caught_events) == 1 + expected_msg = "models.my_test_project.foo.baz" + assert expected_msg in str(catcher.caught_events[0].data) + + def test_warn_for_unused_resource_config_paths_empty_models( + self, + runtime_config: RuntimeConfig, + used_fqns: Dict[str, Any], + ) -> None: + catcher = EventCatcher(event_to_catch=UnusedResourceConfigPath) + add_callback_to_manager(catcher.catch) + + # models should already be empty, but lets ensure it + runtime_config.models = {} + + runtime_config.warn_for_unused_resource_config_paths(used_fqns, ()) + assert len(catcher.caught_events) == 0 + + +class TestRuntimeConfigFiles(BaseConfigTest): + def test_from_args(self): + with temp_cd(self.project_dir): + config = dbt.config.RuntimeConfig.from_args(self.args) + self.assertEqual(config.version, "0.0.1") + self.assertEqual(config.profile_name, "default") + # on osx, for example, these are not necessarily equal due to /private + self.assertTrue(os.path.samefile(config.project_root, self.project_dir)) + self.assertEqual(config.model_paths, ["models"]) + self.assertEqual(config.macro_paths, ["macros"]) + self.assertEqual(config.seed_paths, ["seeds"]) + self.assertEqual(config.test_paths, ["tests"]) + self.assertEqual(config.analysis_paths, ["analyses"]) + self.assertEqual( + set(config.docs_paths), set(["models", "seeds", "snapshots", "analyses", "macros"]) + ) + self.assertEqual(config.asset_paths, []) + self.assertEqual(config.target_path, "target") + self.assertEqual(config.clean_targets, ["target"]) + self.assertEqual(config.log_path, "logs") + self.assertEqual(config.packages_install_path, "dbt_packages") + self.assertEqual(config.quoting, {"database": True, "identifier": True, "schema": True}) + self.assertEqual(config.models, {}) + self.assertEqual(config.on_run_start, []) + self.assertEqual(config.on_run_end, []) + self.assertEqual(config.seeds, {}) + self.assertEqual(config.packages, PackageConfig(packages=[])) + self.assertEqual(config.project_name, "my_test_project") + + +class TestVariableRuntimeConfigFiles(BaseConfigTest): + def setUp(self): + super().setUp() + self.default_project_data.update( + { + "version": "{{ var('cli_version') }}", + "name": "blah", + "profile": "{{ env_var('env_value_profile') }}", + "on-run-end": [ + "{{ env_var('env_value_profile') }}", + ], + "models": { + "foo": { + "post-hook": "{{ env_var('env_value_profile') }}", + }, + "bar": { + # just gibberish, make sure it gets interpreted + "materialized": "{{ env_var('env_value_profile') }}", + }, + }, + "seeds": { + "foo": { + "post-hook": "{{ env_var('env_value_profile') }}", + }, + "bar": { + # just gibberish, make sure it gets interpreted + "materialized": "{{ env_var('env_value_profile') }}", + }, + }, + } + ) + self.write_project(self.default_project_data) + + def test_cli_and_env_vars(self): + self.args.target = "cli-and-env-vars" + self.args.vars = {"cli_value_host": "cli-postgres-host", "cli_version": "0.1.2"} + self.args.project_dir = self.project_dir + set_from_args(self.args, None) + with mock.patch.dict(os.environ, self.env_override): + safe_set_invocation_context() # reset invocation context with new env + config = dbt.config.RuntimeConfig.from_args(self.args) + + self.assertEqual(config.version, "0.1.2") + self.assertEqual(config.project_name, "blah") + self.assertEqual(config.profile_name, "default") + self.assertEqual(config.credentials.host, "cli-postgres-host") + self.assertEqual(config.credentials.user, "env-postgres-user") + # make sure hooks are not interpreted + self.assertEqual(config.on_run_end, ["{{ env_var('env_value_profile') }}"]) + self.assertEqual(config.models["foo"]["post-hook"], "{{ env_var('env_value_profile') }}") + self.assertEqual(config.models["bar"]["materialized"], "default") # rendered! + self.assertEqual(config.seeds["foo"]["post-hook"], "{{ env_var('env_value_profile') }}") + self.assertEqual(config.seeds["bar"]["materialized"], "default") # rendered! diff --git a/tests/unit/test_selector_errors.py b/tests/unit/config/test_selectors.py similarity index 50% rename from tests/unit/test_selector_errors.py rename to tests/unit/config/test_selectors.py index 017c7dee22b..d306fb55282 100644 --- a/tests/unit/test_selector_errors.py +++ b/tests/unit/config/test_selectors.py @@ -1,10 +1,11 @@ -import dbt.exceptions import textwrap -import yaml import unittest -from dbt.config.selectors import selector_config_from_data -from dbt.config.selectors import SelectorConfig +import yaml + +import dbt.exceptions +from dbt.config.selectors import SelectorConfig, SelectorDict, selector_config_from_data +from dbt.exceptions import DbtSelectorsError def get_selector_dict(txt: str) -> dict: @@ -201,3 +202,188 @@ def test_multiple_default_true(self): dbt.exceptions.DbtSelectorsError, "Found multiple selectors with `default: true`:" ): selector_config_from_data(dct) + + def test_compare_cli_non_cli(self): + dct = get_selector_dict( + """\ + selectors: + - name: nightly_diet_snowplow + description: "This uses more CLI-style syntax" + definition: + union: + - intersection: + - '@source:snowplow' + - 'tag:nightly' + - 'models/export' + - exclude: + - intersection: + - 'package:snowplow' + - 'config.materialized:incremental' + - export_performance_timing + - name: nightly_diet_snowplow_full + description: "This is a fuller YAML specification" + definition: + union: + - intersection: + - method: source + value: snowplow + childrens_parents: true + - method: tag + value: nightly + - method: path + value: models/export + - exclude: + - intersection: + - method: package + value: snowplow + - method: config.materialized + value: incremental + - method: fqn + value: export_performance_timing + """ + ) + + sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) + assert sel_dict + with_strings = sel_dict["nightly_diet_snowplow"]["definition"] + no_strings = sel_dict["nightly_diet_snowplow_full"]["definition"] + self.assertEqual(with_strings, no_strings) + + def test_single_string_definition(self): + dct = get_selector_dict( + """\ + selectors: + - name: nightly_selector + definition: + 'tag:nightly' + """ + ) + + sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) + assert sel_dict + expected = {"method": "tag", "value": "nightly"} + definition = sel_dict["nightly_selector"]["definition"] + self.assertEqual(expected, definition) + + def test_single_key_value_definition(self): + dct = get_selector_dict( + """\ + selectors: + - name: nightly_selector + definition: + tag: nightly + """ + ) + + sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) + assert sel_dict + expected = {"method": "tag", "value": "nightly"} + definition = sel_dict["nightly_selector"]["definition"] + self.assertEqual(expected, definition) + + def test_parent_definition(self): + dct = get_selector_dict( + """\ + selectors: + - name: kpi_nightly_selector + definition: + '+exposure:kpi_nightly' + """ + ) + + sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) + assert sel_dict + expected = {"method": "exposure", "value": "kpi_nightly", "parents": True} + definition = sel_dict["kpi_nightly_selector"]["definition"] + self.assertEqual(expected, definition) + + def test_plus_definition(self): + dct = get_selector_dict( + """\ + selectors: + - name: my_model_children_selector + definition: + 'my_model+2' + """ + ) + + sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) + assert sel_dict + expected = {"method": "fqn", "value": "my_model", "children": True, "children_depth": "2"} + definition = sel_dict["my_model_children_selector"]["definition"] + self.assertEqual(expected, definition) + + def test_selector_definition(self): + dct = get_selector_dict( + """\ + selectors: + - name: default + definition: + union: + - intersection: + - tag: foo + - tag: bar + - name: inherited + definition: + method: selector + value: default + """ + ) + + sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) + assert sel_dict + definition = sel_dict["default"]["definition"] + expected = sel_dict["inherited"]["definition"] + self.assertEqual(expected, definition) + + def test_selector_definition_with_exclusion(self): + dct = get_selector_dict( + """\ + selectors: + - name: default + definition: + union: + - intersection: + - tag: foo + - tag: bar + - name: inherited + definition: + union: + - method: selector + value: default + - exclude: + - tag: bar + - name: comparison + definition: + union: + - union: + - intersection: + - tag: foo + - tag: bar + - exclude: + - tag: bar + """ + ) + + sel_dict = SelectorDict.parse_from_selectors_list((dct["selectors"])) + assert sel_dict + definition = sel_dict["inherited"]["definition"] + expected = sel_dict["comparison"]["definition"] + self.assertEqual(expected, definition) + + def test_missing_selector(self): + dct = get_selector_dict( + """\ + selectors: + - name: inherited + definition: + method: selector + value: default + """ + ) + with self.assertRaises(DbtSelectorsError) as err: + SelectorDict.parse_from_selectors_list((dct["selectors"])) + + self.assertEqual( + "Existing selector definition for default not found.", str(err.exception.msg) + ) diff --git a/tests/unit/config/test_utils.py b/tests/unit/config/test_utils.py new file mode 100644 index 00000000000..88f73ad2fdb --- /dev/null +++ b/tests/unit/config/test_utils.py @@ -0,0 +1,74 @@ +import pytest + +from dbt.config.utils import ( + exclusive_primary_alt_value_setting, + normalize_warn_error_options, +) +from dbt.exceptions import DbtExclusivePropertyUseError + + +class TestExclusivePrimaryAltValueSetting: + @pytest.fixture(scope="class") + def primary_key(self) -> str: + return "key_a" + + @pytest.fixture(scope="class") + def alt_key(self) -> str: + return "key_b" + + @pytest.fixture(scope="class") + def value(self) -> str: + return "I LIKE CATS" + + def test_primary_set(self, primary_key: str, alt_key: str, value: str): + test_dict = {primary_key: value} + exclusive_primary_alt_value_setting(test_dict, primary_key, alt_key) + assert test_dict.get(primary_key) == value + assert test_dict.get(alt_key) is None + + def test_alt_set(self, primary_key: str, alt_key: str, value: str): + test_dict = {alt_key: value} + exclusive_primary_alt_value_setting(test_dict, primary_key, alt_key) + assert test_dict.get(primary_key) == value + + def test_primary_and_alt_set(self, primary_key: str, alt_key: str, value: str): + test_dict = {primary_key: value, alt_key: value} + with pytest.raises(DbtExclusivePropertyUseError): + exclusive_primary_alt_value_setting(test_dict, primary_key, alt_key) + + def test_neither_primary_nor_alt_set(self, primary_key: str, alt_key: str): + test_dict = {} + exclusive_primary_alt_value_setting(test_dict, primary_key, alt_key) + assert test_dict.get(primary_key) is None + assert test_dict.get(alt_key) is None + + +class TestNormalizeWarnErrorOptions: + def test_primary_set(self): + test_dict = { + "error": ["SomeWarning"], + } + normalize_warn_error_options(test_dict) + assert len(test_dict) == 1 + assert test_dict["include"] == ["SomeWarning"] + + def test_convert(self): + test_dict = {"warn": None, "silence": None, "include": ["SomeWarning"]} + normalize_warn_error_options(test_dict) + assert test_dict["exclude"] == [] + assert test_dict["include"] == ["SomeWarning"] + assert test_dict["silence"] == [] + + def test_both_keys_set(self): + test_dict = { + "warn": ["SomeWarning"], + "exclude": ["SomeWarning"], + } + with pytest.raises(DbtExclusivePropertyUseError): + normalize_warn_error_options(test_dict) + + def test_empty_dict(self): + test_dict = {} + normalize_warn_error_options(test_dict) + assert test_dict.get("include") is None + assert test_dict.get("exclude") is None diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py new file mode 100644 index 00000000000..7c14e8dee5b --- /dev/null +++ b/tests/unit/conftest.py @@ -0,0 +1,37 @@ +import pytest + +from dbt.artifacts.resources import Quoting, SourceConfig +from dbt.artifacts.resources.types import NodeType +from dbt.contracts.graph.nodes import SourceDefinition + +# All manifest related fixtures. +from tests.unit.utils.adapter import * # noqa +from tests.unit.utils.config import * # noqa +from tests.unit.utils.event_manager import * # noqa +from tests.unit.utils.flags import * # noqa +from tests.unit.utils.manifest import * # noqa +from tests.unit.utils.project import * # noqa + + +@pytest.fixture +def basic_parsed_source_definition_object(): + return SourceDefinition( + columns={}, + database="some_db", + description="", + fqn=["test", "source", "my_source", "my_source_table"], + identifier="my_source_table", + loader="stitch", + name="my_source_table", + original_file_path="/root/models/sources.yml", + package_name="test", + path="/root/models/sources.yml", + quoting=Quoting(), + resource_type=NodeType.Source, + schema="some_schema", + source_description="my source description", + source_name="my_source", + unique_id="test.source.my_source.my_source_table", + tags=[], + config=SourceConfig(), + ) diff --git a/tests/unit/context/__init__.py b/tests/unit/context/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/unit/context/test_base.py b/tests/unit/context/test_base.py new file mode 100644 index 00000000000..5a9e8538ea2 --- /dev/null +++ b/tests/unit/context/test_base.py @@ -0,0 +1,54 @@ +import os + +from jinja2.runtime import Undefined + +from dbt.context.base import BaseContext + + +class TestBaseContext: + def test_log_jinja_undefined(self): + # regression test for CT-2259 + try: + os.environ["DBT_ENV_SECRET_LOG_TEST"] = "cats_are_cool" + BaseContext.log(msg=Undefined(), info=True) + except Exception as e: + assert False, f"Logging an jinja2.Undefined object raises an exception: {e}" + + def test_log_with_dbt_env_secret(self): + # regression test for CT-1783 + try: + os.environ["DBT_ENV_SECRET_LOG_TEST"] = "cats_are_cool" + BaseContext.log({"fact1": "I like cats"}, info=True) + except Exception as e: + assert False, f"Logging while a `DBT_ENV_SECRET` was set raised an exception: {e}" + + def test_flags(self): + expected_context_flags = { + "use_experimental_parser", + "static_parser", + "warn_error", + "warn_error_options", + "write_json", + "partial_parse", + "use_colors", + "profiles_dir", + "debug", + "log_format", + "version_check", + "fail_fast", + "send_anonymous_usage_stats", + "printer_width", + "indirect_selection", + "log_cache_events", + "quiet", + "no_print", + "cache_selected_only", + "introspect", + "target_path", + "log_path", + "invocation_command", + "empty", + } + flags = BaseContext(cli_vars={}).flags + for expected_flag in expected_context_flags: + assert hasattr(flags, expected_flag.upper()) diff --git a/tests/unit/test_context.py b/tests/unit/context/test_context.py similarity index 59% rename from tests/unit/test_context.py rename to tests/unit/context/test_context.py index b51e8e76de5..10e591093ee 100644 --- a/tests/unit/test_context.py +++ b/tests/unit/context/test_context.py @@ -1,41 +1,33 @@ -import itertools -import unittest import os -from typing import Set, Dict, Any +from typing import Any, Dict, Set from unittest import mock import pytest -from dbt.adapters import postgres -from dbt.adapters import factory +import dbt_common.exceptions +from dbt.adapters import factory, postgres from dbt.clients.jinja import MacroStack +from dbt.config.project import VarProvider +from dbt.context import base, docs, macros, providers, query_header +from dbt.contracts.files import FileHash from dbt.contracts.graph.nodes import ( - ModelNode, - NodeConfig, DependsOn, Macro, + ModelNode, + NodeConfig, + UnitTestNode, + UnitTestOverrides, ) -from dbt.config.project import VarProvider -from dbt.context import base, providers, docs, manifest, macros -from dbt.contracts.files import FileHash -from dbt.events.functions import reset_metadata_vars from dbt.node_types import NodeType -import dbt.exceptions -from .utils import ( - config_from_parts_or_dicts, - inject_adapter, - clear_plugin, -) -from .mock_adapter import adapter_factory -from dbt.flags import set_from_args -from argparse import Namespace - -set_from_args(Namespace(WARN_ERROR=False), None) +from dbt_common.events.functions import reset_metadata_vars +from tests.unit.mock_adapter import adapter_factory +from tests.unit.utils import clear_plugin, config_from_parts_or_dicts, inject_adapter -class TestVar(unittest.TestCase): - def setUp(self): - self.model = ModelNode( +class TestVar: + @pytest.fixture + def model(self): + return ModelNode( alias="model_one", name="model_one", database="dbt", @@ -69,89 +61,114 @@ def setUp(self): columns={}, checksum=FileHash.from_contents(""), ) - self.context = mock.MagicMock() - self.provider = VarProvider({}) - self.config = mock.MagicMock( - config_version=2, vars=self.provider, cli_vars={}, project_name="root" - ) - def test_var_default_something(self): - self.config.cli_vars = {"foo": "baz"} - var = providers.RuntimeVar(self.context, self.config, self.model) - self.assertEqual(var("foo"), "baz") - self.assertEqual(var("foo", "bar"), "baz") + @pytest.fixture + def context(self): + return mock.MagicMock() + + @pytest.fixture + def provider(self): + return VarProvider({}) + + @pytest.fixture + def config(self, provider): + return mock.MagicMock(config_version=2, vars=provider, cli_vars={}, project_name="root") + + def test_var_default_something(self, model, config, context): + config.cli_vars = {"foo": "baz"} + var = providers.RuntimeVar(context, config, model) + + assert var("foo") == "baz" + assert var("foo", "bar") == "baz" - def test_var_default_none(self): - self.config.cli_vars = {"foo": None} - var = providers.RuntimeVar(self.context, self.config, self.model) - self.assertEqual(var("foo"), None) - self.assertEqual(var("foo", "bar"), None) + def test_var_default_none(self, model, config, context): + config.cli_vars = {"foo": None} + var = providers.RuntimeVar(context, config, model) - def test_var_not_defined(self): - var = providers.RuntimeVar(self.context, self.config, self.model) + assert var("foo") is None + assert var("foo", "bar") is None - self.assertEqual(var("foo", "bar"), "bar") - with self.assertRaises(dbt.exceptions.CompilationError): + def test_var_not_defined(self, model, config, context): + var = providers.RuntimeVar(self.context, config, model) + + assert var("foo", "bar") == "bar" + with pytest.raises(dbt_common.exceptions.CompilationError): var("foo") - def test_parser_var_default_something(self): - self.config.cli_vars = {"foo": "baz"} - var = providers.ParseVar(self.context, self.config, self.model) - self.assertEqual(var("foo"), "baz") - self.assertEqual(var("foo", "bar"), "baz") + def test_parser_var_default_something(self, model, config, context): + config.cli_vars = {"foo": "baz"} + var = providers.ParseVar(context, config, model) + assert var("foo") == "baz" + assert var("foo", "bar") == "baz" - def test_parser_var_default_none(self): - self.config.cli_vars = {"foo": None} - var = providers.ParseVar(self.context, self.config, self.model) - self.assertEqual(var("foo"), None) - self.assertEqual(var("foo", "bar"), None) + def test_parser_var_default_none(self, model, config, context): + config.cli_vars = {"foo": None} + var = providers.ParseVar(context, config, model) + assert var("foo") is None + assert var("foo", "bar") is None - def test_parser_var_not_defined(self): + def test_parser_var_not_defined(self, model, config, context): # at parse-time, we should not raise if we encounter a missing var # that way disabled models don't get parse errors - var = providers.ParseVar(self.context, self.config, self.model) + var = providers.ParseVar(context, config, model) - self.assertEqual(var("foo", "bar"), "bar") - self.assertEqual(var("foo"), None) + assert var("foo", "bar") == "bar" + assert var("foo") is None -class TestParseWrapper(unittest.TestCase): - def setUp(self): - self.mock_config = mock.MagicMock() +class TestParseWrapper: + @pytest.fixture + def mock_adapter(self): + mock_config = mock.MagicMock() + mock_mp_context = mock.MagicMock() adapter_class = adapter_factory() - self.mock_adapter = adapter_class(self.mock_config) - self.namespace = mock.MagicMock() - self.wrapper = providers.ParseDatabaseWrapper(self.mock_adapter, self.namespace) - self.responder = self.mock_adapter.responder + return adapter_class(mock_config, mock_mp_context) + + @pytest.fixture + def wrapper(self, mock_adapter): + namespace = mock.MagicMock() + return providers.ParseDatabaseWrapper(mock_adapter, namespace) - def test_unwrapped_method(self): - self.assertEqual(self.wrapper.quote("test_value"), '"test_value"') - self.responder.quote.assert_called_once_with("test_value") + @pytest.fixture + def responder(self, mock_adapter): + return mock_adapter.responder - def test_wrapped_method(self): - found = self.wrapper.get_relation("database", "schema", "identifier") - self.assertEqual(found, None) - self.responder.get_relation.assert_not_called() + def test_unwrapped_method(self, wrapper, responder): + assert wrapper.quote("test_value") == '"test_value"' + responder.quote.assert_called_once_with("test_value") + def test_wrapped_method(self, wrapper, responder): + found = wrapper.get_relation("database", "schema", "identifier") + assert found is None + responder.get_relation.assert_not_called() -class TestRuntimeWrapper(unittest.TestCase): - def setUp(self): - self.mock_config = mock.MagicMock() - self.mock_config.quoting = { + +class TestRuntimeWrapper: + @pytest.fixture + def mock_adapter(self): + mock_config = mock.MagicMock() + mock_config.quoting = { "database": True, "schema": True, "identifier": True, } + mock_mp_context = mock.MagicMock() adapter_class = adapter_factory() - self.mock_adapter = adapter_class(self.mock_config) - self.namespace = mock.MagicMock() - self.wrapper = providers.RuntimeDatabaseWrapper(self.mock_adapter, self.namespace) - self.responder = self.mock_adapter.responder + return adapter_class(mock_config, mock_mp_context) + + @pytest.fixture + def wrapper(self, mock_adapter): + namespace = mock.MagicMock() + return providers.RuntimeDatabaseWrapper(mock_adapter, namespace) - def test_unwrapped_method(self): + @pytest.fixture + def responder(self, mock_adapter): + return mock_adapter.responder + + def test_unwrapped_method(self, wrapper, responder): # the 'quote' method isn't wrapped, we should get our expected inputs - self.assertEqual(self.wrapper.quote("test_value"), '"test_value"') - self.responder.quote.assert_called_once_with("test_value") + assert wrapper.quote("test_value") == '"test_value"' + responder.quote.assert_called_once_with("test_value") def assert_has_keys(required_keys: Set[str], maybe_keys: Set[str], ctx: Dict[str, Any]): @@ -313,12 +330,27 @@ def mock_macro(name, package_name): return macro -def mock_manifest(config): +def mock_manifest(config, additional_macros=None): + default_macro_names = ["macro_a", "macro_b"] + default_macros = [mock_macro(name, config.project_name) for name in default_macro_names] + additional_macros = additional_macros or [] + all_macros = default_macros + additional_macros + manifest_macros = {} - for name in ["macro_a", "macro_b"]: - macro = mock_macro(name, config.project_name) + macros_by_package = {} + for macro in all_macros: manifest_macros[macro.unique_id] = macro - return mock.MagicMock(macros=manifest_macros) + if macro.package_name not in macros_by_package: + macros_by_package[macro.package_name] = {} + macro_package = macros_by_package[macro.package_name] + macro_package[macro.name] = macro + + def gmbp(): + return macros_by_package + + m = mock.MagicMock(macros=manifest_macros) + m.get_macros_by_package = gmbp + return m def mock_model(): @@ -358,6 +390,14 @@ def mock_model(): ) +def mock_unit_test_node(): + return mock.MagicMock( + __class__=UnitTestNode, + resource_type=NodeType.Unit, + tested_node_unique_id="model.root.model_one", + ) + + @pytest.fixture def get_adapter(): with mock.patch.object(providers, "get_adapter") as patch: @@ -391,7 +431,7 @@ def postgres_adapter(config_postgres, get_adapter): def test_query_header_context(config_postgres, manifest_fx): - ctx = manifest.generate_query_header_context( + ctx = query_header.generate_query_header_context( config=config_postgres, manifest=manifest_fx, ) @@ -457,7 +497,7 @@ def test_macro_namespace_duplicates(config_postgres, manifest_fx): mn.add_macros(manifest_fx.macros.values(), {}) # same pkg, same name: error - with pytest.raises(dbt.exceptions.CompilationError): + with pytest.raises(dbt_common.exceptions.CompilationError): mn.add_macro(mock_macro("macro_a", "root"), {}) # different pkg, same name: no error @@ -467,15 +507,19 @@ def test_macro_namespace_duplicates(config_postgres, manifest_fx): def test_macro_namespace(config_postgres, manifest_fx): mn = macros.MacroNamespaceBuilder("root", "search", MacroStack(), ["dbt_postgres", "dbt"]) + mbp = manifest_fx.get_macros_by_package() dbt_macro = mock_macro("some_macro", "dbt") + mbp["dbt"] = {"some_macro": dbt_macro} + # same namespace, same name, different pkg! pg_macro = mock_macro("some_macro", "dbt_postgres") + mbp["dbt_postgres"] = {"some_macro": pg_macro} + # same name, different package package_macro = mock_macro("some_macro", "root") + mbp["root"]["some_macro"] = package_macro - all_macros = itertools.chain(manifest_fx.macros.values(), [dbt_macro, pg_macro, package_macro]) - - namespace = mn.build_namespace(all_macros, {}) + namespace = mn.build_namespace(mbp, {}) dct = dict(namespace) for result in [dct, namespace]: assert "dbt" in result @@ -518,3 +562,84 @@ def test_dbt_metadata_envs( # cleanup reset_metadata_vars() + + +def test_unit_test_runtime_context(config_postgres, manifest_fx, get_adapter, get_include_paths): + ctx = providers.generate_runtime_unit_test_context( + unit_test=mock_unit_test_node(), + config=config_postgres, + manifest=manifest_fx, + ) + assert_has_keys(REQUIRED_MODEL_KEYS, MAYBE_KEYS, ctx) + + +def test_unit_test_runtime_context_macro_overrides_global( + config_postgres, manifest_fx, get_adapter, get_include_paths +): + unit_test = mock_unit_test_node() + unit_test.overrides = UnitTestOverrides(macros={"macro_a": "override"}) + ctx = providers.generate_runtime_unit_test_context( + unit_test=unit_test, + config=config_postgres, + manifest=manifest_fx, + ) + assert ctx["macro_a"]() == "override" + + +def test_unit_test_runtime_context_macro_overrides_package( + config_postgres, manifest_fx, get_adapter, get_include_paths +): + unit_test = mock_unit_test_node() + unit_test.overrides = UnitTestOverrides(macros={"some_package.some_macro": "override"}) + + dbt_macro = mock_macro("some_macro", "some_package") + manifest_with_dbt_macro = mock_manifest(config_postgres, additional_macros=[dbt_macro]) + + ctx = providers.generate_runtime_unit_test_context( + unit_test=unit_test, + config=config_postgres, + manifest=manifest_with_dbt_macro, + ) + assert ctx["some_package"]["some_macro"]() == "override" + + +@pytest.mark.parametrize( + "overrides,expected_override_value", + [ + # override dbt macro at global level + ({"some_macro": "override"}, "override"), + # # override dbt macro at dbt-namespaced level level + ({"dbt.some_macro": "override"}, "override"), + # override dbt macro at both levels - global override should win + ( + {"some_macro": "dbt_global_override", "dbt.some_macro": "dbt_namespaced_override"}, + "dbt_global_override", + ), + # override dbt macro at both levels - global override should win, regardless of order + ( + {"dbt.some_macro": "dbt_namespaced_override", "some_macro": "dbt_global_override"}, + "dbt_global_override", + ), + ], +) +def test_unit_test_runtime_context_macro_overrides_dbt_macro( + overrides, + expected_override_value, + config_postgres, + manifest_fx, + get_adapter, + get_include_paths, +): + unit_test = mock_unit_test_node() + unit_test.overrides = UnitTestOverrides(macros=overrides) + + dbt_macro = mock_macro("some_macro", "dbt") + manifest_with_dbt_macro = mock_manifest(config_postgres, additional_macros=[dbt_macro]) + + ctx = providers.generate_runtime_unit_test_context( + unit_test=unit_test, + config=config_postgres, + manifest=manifest_with_dbt_macro, + ) + assert ctx["some_macro"]() == expected_override_value + assert ctx["dbt"]["some_macro"]() == expected_override_value diff --git a/tests/unit/test_macro_resolver.py b/tests/unit/context/test_macro_resolver.py similarity index 100% rename from tests/unit/test_macro_resolver.py rename to tests/unit/context/test_macro_resolver.py index 57e8e9e47db..4611d4dc949 100644 --- a/tests/unit/test_macro_resolver.py +++ b/tests/unit/context/test_macro_resolver.py @@ -1,8 +1,8 @@ import unittest from unittest import mock -from dbt.contracts.graph.nodes import Macro from dbt.context.macro_resolver import MacroResolver +from dbt.contracts.graph.nodes import Macro def mock_macro(name, package_name): diff --git a/tests/unit/context/test_providers.py b/tests/unit/context/test_providers.py new file mode 100644 index 00000000000..46c29254a9a --- /dev/null +++ b/tests/unit/context/test_providers.py @@ -0,0 +1,161 @@ +import os +from unittest import mock + +import pytest +from pytest_mock import MockerFixture + +from dbt.adapters.base import BaseRelation +from dbt.artifacts.resources import NodeConfig, Quoting +from dbt.artifacts.resources.types import BatchSize +from dbt.context.providers import ( + BaseResolver, + EventTimeFilter, + RuntimeRefResolver, + RuntimeSourceResolver, +) + + +class TestBaseResolver: + class ResolverSubclass(BaseResolver): + def __call__(self, *args: str): + pass + + @pytest.fixture + def resolver(self): + return self.ResolverSubclass( + db_wrapper=mock.Mock(), + model=mock.Mock(), + config=mock.Mock(), + manifest=mock.Mock(), + ) + + @pytest.mark.parametrize( + "empty,expected_resolve_limit", + [(False, None), (True, 0)], + ) + def test_resolve_limit(self, resolver, empty, expected_resolve_limit): + resolver.config.args.EMPTY = empty + + assert resolver.resolve_limit == expected_resolve_limit + + @pytest.mark.parametrize( + "dbt_experimental_microbatch,materialized,incremental_strategy,expect_filter", + [ + (True, "incremental", "microbatch", True), + (False, "incremental", "microbatch", False), + (True, "table", "microbatch", False), + (True, "incremental", "merge", False), + ], + ) + def test_resolve_event_time_filter( + self, + mocker: MockerFixture, + resolver: ResolverSubclass, + dbt_experimental_microbatch: bool, + materialized: str, + incremental_strategy: str, + expect_filter: bool, + ) -> None: + if dbt_experimental_microbatch: + mocker.patch.dict(os.environ, {"DBT_EXPERIMENTAL_MICROBATCH": "True"}) + + # Target mocking + target = mock.Mock() + target.config = mock.MagicMock(NodeConfig) + target.config.event_time = "created_at" + + # Resolver mocking + resolver.config.args.EVENT_TIME_END = None + resolver.config.args.EVENT_TIME_START = None + resolver.model.config = mock.MagicMock(NodeConfig) + resolver.model.config.materialized = materialized + resolver.model.config.incremental_strategy = incremental_strategy + resolver.model.config.batch_size = BatchSize.day + resolver.model.config.lookback = 0 + + # Try to get an EventTimeFilter + event_time_filter = resolver.resolve_event_time_filter(target=target) + + if expect_filter: + assert isinstance(event_time_filter, EventTimeFilter) + else: + assert event_time_filter is None + + +class TestRuntimeRefResolver: + @pytest.fixture + def resolver(self): + mock_db_wrapper = mock.Mock() + mock_db_wrapper.Relation = BaseRelation + + return RuntimeRefResolver( + db_wrapper=mock_db_wrapper, + model=mock.Mock(), + config=mock.Mock(), + manifest=mock.Mock(), + ) + + @pytest.mark.parametrize( + "empty,is_ephemeral_model,expected_limit", + [ + (False, False, None), + (True, False, 0), + (False, True, None), + (True, True, 0), + ], + ) + def test_create_relation_with_empty(self, resolver, empty, is_ephemeral_model, expected_limit): + # setup resolver and input node + resolver.config.args.EMPTY = empty + resolver.config.quoting = {} + mock_node = mock.Mock() + mock_node.database = "test" + mock_node.schema = "test" + mock_node.identifier = "test" + mock_node.quoting_dict = {} + mock_node.alias = "test" + mock_node.is_ephemeral_model = is_ephemeral_model + mock_node.defer_relation = None + + # create limited relation + with mock.patch("dbt.contracts.graph.nodes.ParsedNode", new=mock.Mock): + relation = resolver.create_relation(mock_node) + assert relation.limit == expected_limit + + +class TestRuntimeSourceResolver: + @pytest.fixture + def resolver(self): + mock_db_wrapper = mock.Mock() + mock_db_wrapper.Relation = BaseRelation + + return RuntimeSourceResolver( + db_wrapper=mock_db_wrapper, + model=mock.Mock(), + config=mock.Mock(), + manifest=mock.Mock(), + ) + + @pytest.mark.parametrize( + "empty,expected_limit", + [ + (False, None), + (True, 0), + ], + ) + def test_create_relation_with_empty(self, resolver, empty, expected_limit): + # setup resolver and input source + resolver.config.args.EMPTY = empty + resolver.config.quoting = {} + + mock_source = mock.Mock() + mock_source.database = "test" + mock_source.schema = "test" + mock_source.identifier = "test" + mock_source.quoting = Quoting() + mock_source.quoting_dict = {} + resolver.manifest.resolve_source.return_value = mock_source + + # create limited relation + relation = resolver.resolve("test", "test") + assert relation.limit == expected_limit diff --git a/tests/unit/context/test_query_header.py b/tests/unit/context/test_query_header.py new file mode 100644 index 00000000000..f14d28d40c4 --- /dev/null +++ b/tests/unit/context/test_query_header.py @@ -0,0 +1,64 @@ +import re +from unittest import mock + +import pytest + +from dbt.adapters.base.query_headers import MacroQueryStringSetter +from dbt.context.query_header import generate_query_header_context +from tests.unit.utils import config_from_parts_or_dicts + + +class TestQueryHeaderContext: + @pytest.fixture + def profile_cfg(self): + return { + "outputs": { + "test": { + "type": "postgres", + "dbname": "postgres", + "user": "test", + "host": "test", + "pass": "test", + "port": 5432, + "schema": "test", + }, + }, + "target": "test", + } + + @pytest.fixture + def project_cfg(self): + return { + "name": "query_headers", + "version": "0.1", + "profile": "test", + "config-version": 2, + } + + @pytest.fixture + def query(self): + return "SELECT 1;" + + def test_comment_should_prepend_query_by_default(self, profile_cfg, project_cfg, query): + config = config_from_parts_or_dicts(project_cfg, profile_cfg) + + query_header_context = generate_query_header_context(config, mock.MagicMock(macros={})) + query_header = MacroQueryStringSetter(config, query_header_context) + sql = query_header.add(query) + assert re.match(f"^\/\*.*\*\/\n{query}$", sql) # noqa: [W605] + + def test_append_comment(self, profile_cfg, project_cfg, query): + project_cfg.update({"query-comment": {"comment": "executed by dbt", "append": True}}) + config = config_from_parts_or_dicts(project_cfg, profile_cfg) + + query_header_context = generate_query_header_context(config, mock.MagicMock(macros={})) + query_header = MacroQueryStringSetter(config, query_header_context) + sql = query_header.add(query) + + assert sql == f"{query[:-1]}\n/* executed by dbt */;" + + def test_disable_query_comment(self, profile_cfg, project_cfg, query): + project_cfg.update({"query-comment": ""}) + config = config_from_parts_or_dicts(project_cfg, profile_cfg) + query_header = MacroQueryStringSetter(config, mock.MagicMock(macros={})) + assert query_header.add(query) == query diff --git a/tests/unit/contracts/__init__.py b/tests/unit/contracts/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/unit/contracts/files/test_schema_source_file.py b/tests/unit/contracts/files/test_schema_source_file.py new file mode 100644 index 00000000000..6886c262bf6 --- /dev/null +++ b/tests/unit/contracts/files/test_schema_source_file.py @@ -0,0 +1,116 @@ +from dbt.contracts.files import SchemaSourceFile + + +def test_fix_metrics_from_measure(): + # This is a test for converting "generated_metrics" to "metrics_from_measures" + schema_source_file = { + "path": { + "searched_path": "models", + "relative_path": "schema.yml", + "modification_time": 1721228094.7544806, + "project_root": "/Users/a_user/sample_project", + }, + "checksum": { + "name": "sha256", + "checksum": "63130d480a44a481aa0adc0a8469dccbb72ea36cc09f06683a584a31339f362e", + }, + "project_name": "test", + "parse_file_type": "schema", + "dfy": { + "models": [{"name": "fct_revenue", "description": "This is the model fct_revenue."}], + "semantic_models": [ + { + "name": "revenue", + "description": "This is the FIRST semantic model.", + "model": "ref('fct_revenue')", + "defaults": {"agg_time_dimension": "ds"}, + "measures": [ + { + "name": "txn_revenue", + "expr": "revenue", + "agg": "sum", + "agg_time_dimension": "ds", + "create_metric": True, + }, + { + "name": "sum_of_things", + "expr": 2, + "agg": "sum", + "agg_time_dimension": "ds", + }, + ], + "dimensions": [ + { + "name": "ds", + "type": "time", + "expr": "created_at", + "type_params": {"time_granularity": "day"}, + } + ], + "entities": [ + {"name": "user", "type": "foreign", "expr": "user_id"}, + {"name": "id", "type": "primary"}, + ], + }, + { + "name": "alt_revenue", + "description": "This is the second revenue semantic model.", + "model": "ref('fct_revenue')", + "defaults": {"agg_time_dimension": "ads"}, + "measures": [ + { + "name": "alt_txn_revenue", + "expr": "revenue", + "agg": "sum", + "agg_time_dimension": "ads", + "create_metric": True, + }, + { + "name": "alt_sum_of_things", + "expr": 2, + "agg": "sum", + "agg_time_dimension": "ads", + }, + ], + "dimensions": [ + { + "name": "ads", + "type": "time", + "expr": "created_at", + "type_params": {"time_granularity": "day"}, + } + ], + "entities": [ + {"name": "user", "type": "foreign", "expr": "user_id"}, + {"name": "id", "type": "primary"}, + ], + }, + ], + "metrics": [ + { + "name": "simple_metric", + "label": "Simple Metric", + "type": "simple", + "type_params": {"measure": "sum_of_things"}, + } + ], + }, + "data_tests": {}, + "metrics": ["metric.test.simple_metric"], + "generated_metrics": ["metric.test.txn_revenue", "metric.test.alt_txn_revenue"], + "metrics_from_measures": {}, + "ndp": ["model.test.fct_revenue"], + "semantic_models": ["semantic_model.test.revenue", "semantic_model.test.alt_revenue"], + "mcp": {}, + "env_vars": {}, + } + + expected_metrics_from_measures = { + "revenue": ["metric.test.txn_revenue"], + "alt_revenue": ["metric.test.alt_txn_revenue"], + } + ssf = SchemaSourceFile.from_dict(schema_source_file) + assert ssf + ssf.fix_metrics_from_measures() + assert ssf.generated_metrics == [] + assert ssf.metrics_from_measures == expected_metrics_from_measures diff --git a/tests/unit/contracts/graph/__init__.py b/tests/unit/contracts/graph/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/unit/test_manifest.py b/tests/unit/contracts/graph/test_manifest.py similarity index 78% rename from tests/unit/test_manifest.py rename to tests/unit/contracts/graph/test_manifest.py index 843d2c9bf90..d8d1df0d900 100644 --- a/tests/unit/test_manifest.py +++ b/tests/unit/contracts/graph/test_manifest.py @@ -12,43 +12,45 @@ import dbt.flags import dbt.version +import dbt_common.invocation from dbt import tracking from dbt.adapters.base.plugin import AdapterPlugin +from dbt.artifacts.resources import ( + ExposureType, + MaturityType, + MetricInputMeasure, + MetricTypeParams, + Owner, + RefArgs, + WhereFilter, + WhereFilterIntersection, +) from dbt.contracts.files import FileHash -from dbt.contracts.graph.manifest import Manifest, ManifestMetadata +from dbt.contracts.graph.manifest import DisabledLookup, Manifest, ManifestMetadata from dbt.contracts.graph.nodes import ( - ModelNode, DependsOn, - NodeConfig, - SeedNode, - SourceDefinition, Exposure, - Metric, - MetricInputMeasure, - MetricTypeParams, - WhereFilter, Group, - RefArgs, -) -from dbt.contracts.graph.unparsed import ( - ExposureType, - Owner, - MaturityType, + Metric, + ModelConfig, + ModelNode, + SeedNode, + SourceDefinition, ) -from dbt.events.functions import reset_metadata_vars -from dbt.exceptions import AmbiguousResourceNameRefError +from dbt.exceptions import AmbiguousResourceNameRefError, ParsingError from dbt.flags import set_from_args from dbt.node_types import NodeType +from dbt_common.events.functions import reset_metadata_vars from dbt_semantic_interfaces.type_enums import MetricType - -from .utils import ( - MockMacro, +from tests.unit.utils import ( MockDocumentation, - MockSource, - MockNode, - MockMaterialization, MockGenerateMacro, + MockMacro, + MockMaterialization, + MockNode, + MockSource, inject_plugin, + make_manifest, ) REQUIRED_PARSED_NODE_KEYS = frozenset( @@ -73,13 +75,13 @@ "raw_code", "language", "description", + "primary_key", "columns", "fqn", "build_path", "compiled_path", "patch_path", "docs", - "deferred", "checksum", "unrendered_config", "created_at", @@ -92,6 +94,7 @@ "constraints", "deprecation_date", "defer_relation", + "time_spine", } ) @@ -112,7 +115,7 @@ def setUp(self): self.maxDiff = None - self.model_config = NodeConfig.from_dict( + self.model_config = ModelConfig.from_dict( { "enabled": True, "materialized": "view", @@ -156,7 +159,10 @@ def setUp(self): type=MetricType.SIMPLE, type_params=MetricTypeParams( measure=MetricInputMeasure( - name="customers", filter=WhereFilter(where_sql_template="is_new = True") + name="customers", + filter=WhereFilterIntersection( + [WhereFilter(where_sql_template="is_new = True")] + ), ) ), resource_type=NodeType.Metric, @@ -337,6 +343,7 @@ def setUp(self): } self.semantic_models = {} + self.saved_queries = {} for exposure in self.exposures.values(): exposure.validate(exposure.to_dict(omit_none=True)) @@ -353,8 +360,9 @@ def tearDown(self): del os.environ["DBT_ENV_CUSTOM_ENV_key"] reset_metadata_vars() + @mock.patch.object(tracking, "active_user") @freezegun.freeze_time("2018-02-14T09:15:13Z") - def test_no_nodes(self): + def test_no_nodes(self, mock_user): manifest = Manifest( nodes={}, sources={}, @@ -367,9 +375,12 @@ def test_no_nodes(self): selectors={}, metadata=ManifestMetadata(generated_at=datetime.utcnow()), semantic_models={}, + saved_queries={}, ) - invocation_id = dbt.events.functions.EVENT_MANAGER.invocation_id + invocation_id = dbt_common.invocation._INVOCATION_ID + mock_user.id = "cfc9500f-dc7f-4c83-9ea7-2c581c1b38cf" + set_from_args(Namespace(SEND_ANONYMOUS_USAGE_STATS=False), None) self.assertEqual( manifest.writable_manifest().to_dict(omit_none=True), { @@ -385,19 +396,26 @@ def test_no_nodes(self): "group_map": {}, "metadata": { "generated_at": "2018-02-14T09:15:13Z", - "dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v10.json", + "dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v12.json", "dbt_version": dbt.version.__version__, "env": {ENV_KEY_NAME: "value"}, "invocation_id": invocation_id, + "send_anonymous_usage_stats": False, + "user_id": "cfc9500f-dc7f-4c83-9ea7-2c581c1b38cf", }, "docs": {}, "disabled": {}, "semantic_models": {}, + "unit_tests": {}, + "saved_queries": {}, }, ) @freezegun.freeze_time("2018-02-14T09:15:13Z") - def test_nested_nodes(self): + @mock.patch.object(tracking, "active_user") + def test_nested_nodes(self, mock_user): + set_from_args(Namespace(SEND_ANONYMOUS_USAGE_STATS=False), None) + mock_user.id = "cfc9500f-dc7f-4c83-9ea7-2c581c1b38cf" nodes = deepcopy(self.nested_nodes) manifest = Manifest( nodes=nodes, @@ -413,6 +431,8 @@ def test_nested_nodes(self): ) serialized = manifest.writable_manifest().to_dict(omit_none=True) self.assertEqual(serialized["metadata"]["generated_at"], "2018-02-14T09:15:13Z") + self.assertEqual(serialized["metadata"]["user_id"], mock_user.id) + self.assertFalse(serialized["metadata"]["send_anonymous_usage_stats"]) self.assertEqual(serialized["docs"], {}) self.assertEqual(serialized["disabled"], {}) parent_map = serialized["parent_map"] @@ -477,6 +497,7 @@ def test_build_flat_graph(self): flat_nodes = flat_graph["nodes"] flat_sources = flat_graph["sources"] flat_semantic_models = flat_graph["semantic_models"] + flat_saved_queries = flat_graph["saved_queries"] self.assertEqual( set(flat_graph), set( @@ -487,6 +508,7 @@ def test_build_flat_graph(self): "sources", "metrics", "semantic_models", + "saved_queries", ] ), ) @@ -496,41 +518,22 @@ def test_build_flat_graph(self): self.assertEqual(set(flat_nodes), set(self.nested_nodes)) self.assertEqual(set(flat_sources), set(self.sources)) self.assertEqual(set(flat_semantic_models), set(self.semantic_models)) + self.assertEqual(set(flat_saved_queries), set(self.saved_queries)) for node in flat_nodes.values(): self.assertEqual(frozenset(node), REQUIRED_PARSED_NODE_KEYS) - @mock.patch.object(tracking, "active_user") - def test_metadata(self, mock_user): - mock_user.id = "cfc9500f-dc7f-4c83-9ea7-2c581c1b38cf" - dbt.events.functions.EVENT_MANAGER.invocation_id = "01234567-0123-0123-0123-0123456789ab" - set_from_args(Namespace(SEND_ANONYMOUS_USAGE_STATS=False), None) - now = datetime.utcnow() - self.assertEqual( - ManifestMetadata( - project_id="098f6bcd4621d373cade4e832627b4f6", - adapter_type="postgres", - generated_at=now, - ), - ManifestMetadata( - project_id="098f6bcd4621d373cade4e832627b4f6", - user_id="cfc9500f-dc7f-4c83-9ea7-2c581c1b38cf", - send_anonymous_usage_stats=False, - adapter_type="postgres", - generated_at=now, - invocation_id="01234567-0123-0123-0123-0123456789ab", - ), - ) - @mock.patch.object(tracking, "active_user") @freezegun.freeze_time("2018-02-14T09:15:13Z") def test_no_nodes_with_metadata(self, mock_user): mock_user.id = "cfc9500f-dc7f-4c83-9ea7-2c581c1b38cf" - dbt.events.functions.EVENT_MANAGER.invocation_id = "01234567-0123-0123-0123-0123456789ab" + dbt_common.invocation._INVOCATION_ID = "01234567-0123-0123-0123-0123456789ab" set_from_args(Namespace(SEND_ANONYMOUS_USAGE_STATS=False), None) metadata = ManifestMetadata( project_id="098f6bcd4621d373cade4e832627b4f6", adapter_type="postgres", generated_at=datetime.utcnow(), + user_id="cfc9500f-dc7f-4c83-9ea7-2c581c1b38cf", + send_anonymous_usage_stats=False, ) manifest = Manifest( nodes={}, @@ -543,6 +546,7 @@ def test_no_nodes_with_metadata(self, mock_user): files={}, exposures={}, semantic_models={}, + saved_queries={}, ) self.assertEqual( @@ -561,7 +565,7 @@ def test_no_nodes_with_metadata(self, mock_user): "docs": {}, "metadata": { "generated_at": "2018-02-14T09:15:13Z", - "dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v10.json", + "dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v12.json", "dbt_version": dbt.version.__version__, "project_id": "098f6bcd4621d373cade4e832627b4f6", "user_id": "cfc9500f-dc7f-4c83-9ea7-2c581c1b38cf", @@ -572,6 +576,8 @@ def test_no_nodes_with_metadata(self, mock_user): }, "disabled": {}, "semantic_models": {}, + "unit_tests": {}, + "saved_queries": {}, }, ) @@ -669,7 +675,7 @@ class MixedManifestTest(unittest.TestCase): def setUp(self): self.maxDiff = None - self.model_config = NodeConfig.from_dict( + self.model_config = ModelConfig.from_dict( { "enabled": True, "materialized": "view", @@ -869,8 +875,11 @@ def setUp(self): def tearDown(self): del os.environ["DBT_ENV_CUSTOM_ENV_key"] + @mock.patch.object(tracking, "active_user") @freezegun.freeze_time("2018-02-14T09:15:13Z") - def test_no_nodes(self): + def test_no_nodes(self, mock_user): + mock_user.id = "cfc9500f-dc7f-4c83-9ea7-2c581c1b38cf" + set_from_args(Namespace(SEND_ANONYMOUS_USAGE_STATS=False), None) metadata = ManifestMetadata( generated_at=datetime.utcnow(), invocation_id="01234567-0123-0123-0123-0123456789ab" ) @@ -885,6 +894,7 @@ def test_no_nodes(self): files={}, exposures={}, semantic_models={}, + saved_queries={}, ) self.assertEqual( manifest.writable_manifest().to_dict(omit_none=True), @@ -901,14 +911,18 @@ def test_no_nodes(self): "group_map": {}, "metadata": { "generated_at": "2018-02-14T09:15:13Z", - "dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v10.json", + "dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v12.json", "dbt_version": dbt.version.__version__, "invocation_id": "01234567-0123-0123-0123-0123456789ab", "env": {ENV_KEY_NAME: "value"}, + "send_anonymous_usage_stats": False, + "user_id": "cfc9500f-dc7f-4c83-9ea7-2c581c1b38cf", }, "docs": {}, "disabled": {}, "semantic_models": {}, + "unit_tests": {}, + "saved_queries": {}, }, ) @@ -977,6 +991,7 @@ def test_build_flat_graph(self): files={}, exposures={}, semantic_models={}, + saved_queries={}, ) manifest.build_flat_graph() flat_graph = manifest.flat_graph @@ -991,6 +1006,7 @@ def test_build_flat_graph(self): "nodes", "sources", "semantic_models", + "saved_queries", ] ), ) @@ -1004,7 +1020,7 @@ def test_build_flat_graph(self): self.assertEqual(frozenset(node), REQUIRED_PARSED_NODE_KEYS) self.assertEqual(compiled_count, 2) - def test_add_from_artifact(self): + def test_merge_from_artifact(self): original_nodes = deepcopy(self.nested_nodes) other_nodes = deepcopy(self.nested_nodes) @@ -1026,15 +1042,15 @@ def test_add_from_artifact(self): original_manifest = Manifest(nodes=original_nodes) other_manifest = Manifest(nodes=other_nodes) - original_manifest.add_from_artifact(other_manifest.writable_manifest()) + original_manifest.merge_from_artifact(other_manifest) # new node added should not be in original manifest assert "model.root.nested2" not in original_manifest.nodes - # old node removed should not have state relation in original manifest + # old node removed should not have defer_relation in original manifest assert original_manifest.nodes["model.root.nested"].defer_relation is None - # for all other nodes, check that state relation is updated + # for all other nodes, check that defer_relation is updated for k, v in original_manifest.nodes.items(): if v.defer_relation: self.assertEqual("other_" + v.database, v.defer_relation.database) @@ -1077,20 +1093,6 @@ def setUp(self): ) -def make_manifest(nodes=[], sources=[], macros=[], docs=[]): - return Manifest( - nodes={n.unique_id: n for n in nodes}, - macros={m.unique_id: m for m in macros}, - sources={s.unique_id: s for s in sources}, - docs={d.unique_id: d for d in docs}, - disabled={}, - files={}, - exposures={}, - metrics={}, - selectors={}, - ) - - FindMacroSpec = namedtuple("FindMacroSpec", "macros,expected") macro_parameter_sets = [ @@ -1224,27 +1226,26 @@ def test_find_generate_macros_by_name(macros, expectations): FindMaterializationSpec = namedtuple("FindMaterializationSpec", "macros,adapter_type,expected") -def _materialization_parameter_sets(): +def _materialization_parameter_sets_legacy(): # inject the plugins used for materialization parameter tests - with mock.patch("dbt.adapters.base.plugin.project_name_from_path") as get_name: - get_name.return_value = "foo" - FooPlugin = AdapterPlugin( - adapter=mock.MagicMock(), - credentials=mock.MagicMock(), - include_path="/path/to/root/plugin", - ) - FooPlugin.adapter.type.return_value = "foo" - inject_plugin(FooPlugin) - - get_name.return_value = "bar" - BarPlugin = AdapterPlugin( - adapter=mock.MagicMock(), - credentials=mock.MagicMock(), - include_path="/path/to/root/plugin", - dependencies=["foo"], - ) - BarPlugin.adapter.type.return_value = "bar" - inject_plugin(BarPlugin) + FooPlugin = AdapterPlugin( + adapter=mock.MagicMock(), + credentials=mock.MagicMock(), + include_path="/path/to/root/plugin", + project_name="foo", + ) + FooPlugin.adapter.type.return_value = "foo" + inject_plugin(FooPlugin) + + BarPlugin = AdapterPlugin( + adapter=mock.MagicMock(), + credentials=mock.MagicMock(), + include_path="/path/to/root/plugin", + dependencies=["foo"], + project_name="bar", + ) + BarPlugin.adapter.type.return_value = "bar" + inject_plugin(BarPlugin) sets = [ FindMaterializationSpec(macros=[], adapter_type="foo", expected=None), @@ -1371,12 +1372,187 @@ def id_mat(arg): return "_".join(arg) +@pytest.mark.parametrize( + "macros,adapter_type,expected", + _materialization_parameter_sets_legacy(), + ids=id_mat, +) +def test_find_materialization_by_name_legacy(macros, adapter_type, expected): + set_from_args( + Namespace( + SEND_ANONYMOUS_USAGE_STATS=False, + REQUIRE_EXPLICIT_PACKAGE_OVERRIDES_FOR_BUILTIN_MATERIALIZATIONS=False, + ), + None, + ) + + manifest = make_manifest(macros=macros) + result = manifest.find_materialization_macro_by_name( + project_name="root", + materialization_name="my_materialization", + adapter_type=adapter_type, + ) + if expected is None: + assert result is expected + else: + expected_package, expected_adapter_type = expected + assert result.adapter_type == expected_adapter_type + assert result.package_name == expected_package + + +def _materialization_parameter_sets(): + # inject the plugins used for materialization parameter tests + FooPlugin = AdapterPlugin( + adapter=mock.MagicMock(), + credentials=mock.MagicMock(), + include_path="/path/to/root/plugin", + project_name="foo", + ) + FooPlugin.adapter.type.return_value = "foo" + inject_plugin(FooPlugin) + + BarPlugin = AdapterPlugin( + adapter=mock.MagicMock(), + credentials=mock.MagicMock(), + include_path="/path/to/root/plugin", + dependencies=["foo"], + project_name="bar", + ) + BarPlugin.adapter.type.return_value = "bar" + inject_plugin(BarPlugin) + + sets = [ + FindMaterializationSpec(macros=[], adapter_type="foo", expected=None), + ] + + # default only, each project + sets.extend( + FindMaterializationSpec( + macros=[MockMaterialization(project, adapter_type=None)], + adapter_type="foo", + expected=(project, "default"), + ) + for project in ["root", "dep", "dbt"] + ) + + # other type only, each project + sets.extend( + FindMaterializationSpec( + macros=[MockMaterialization(project, adapter_type="bar")], + adapter_type="foo", + expected=None, + ) + for project in ["root", "dep", "dbt"] + ) + + # matching type only, each project + sets.extend( + FindMaterializationSpec( + macros=[MockMaterialization(project, adapter_type="foo")], + adapter_type="foo", + expected=(project, "foo"), + ) + for project in ["root", "dep", "dbt"] + ) + + sets.extend( + [ + # matching type and default everywhere + FindMaterializationSpec( + macros=[ + MockMaterialization(project, adapter_type=atype) + for (project, atype) in product(["root", "dep", "dbt"], ["foo", None]) + ], + adapter_type="foo", + expected=("root", "foo"), + ), + # default in core, override is in dep, and root has unrelated override + # should find the dbt default because default materializations cannot be overwritten by packages. + FindMaterializationSpec( + macros=[ + MockMaterialization("root", adapter_type="bar"), + MockMaterialization("dep", adapter_type="foo"), + MockMaterialization("dbt", adapter_type=None), + ], + adapter_type="foo", + expected=("dbt", "default"), + ), + # default in core, unrelated override is in dep, and root has an override + # should find the root override. + FindMaterializationSpec( + macros=[ + MockMaterialization("root", adapter_type="foo"), + MockMaterialization("dep", adapter_type="bar"), + MockMaterialization("dbt", adapter_type=None), + ], + adapter_type="foo", + expected=("root", "foo"), + ), + # default in core, override is in dep, and root has an override too. + # should find the root override. + FindMaterializationSpec( + macros=[ + MockMaterialization("root", adapter_type="foo"), + MockMaterialization("dep", adapter_type="foo"), + MockMaterialization("dbt", adapter_type=None), + ], + adapter_type="foo", + expected=("root", "foo"), + ), + # core has default + adapter, dep has adapter, root has default + # should find the default adapter implementation, because it's the most specific + # and default materializations cannot be overwritten by packages + FindMaterializationSpec( + macros=[ + MockMaterialization("root", adapter_type=None), + MockMaterialization("dep", adapter_type="foo"), + MockMaterialization("dbt", adapter_type=None), + MockMaterialization("dbt", adapter_type="foo"), + ], + adapter_type="foo", + expected=("dbt", "foo"), + ), + ] + ) + + # inherit from parent adapter + sets.extend( + FindMaterializationSpec( + macros=[MockMaterialization(project, adapter_type="foo")], + adapter_type="bar", + expected=(project, "foo"), + ) + for project in ["root", "dep", "dbt"] + ) + sets.extend( + FindMaterializationSpec( + macros=[ + MockMaterialization(project, adapter_type="foo"), + MockMaterialization(project, adapter_type="bar"), + ], + adapter_type="bar", + expected=(project, "bar"), + ) + for project in ["root", "dep", "dbt"] + ) + + return sets + + @pytest.mark.parametrize( "macros,adapter_type,expected", _materialization_parameter_sets(), ids=id_mat, ) def test_find_materialization_by_name(macros, adapter_type, expected): + set_from_args( + Namespace( + SEND_ANONYMOUS_USAGE_STATS=False, + REQUIRE_EXPLICIT_PACKAGE_OVERRIDES_FOR_BUILTIN_MATERIALIZATIONS=True, + ), + None, + ) + manifest = make_manifest(macros=macros) result = manifest.find_materialization_macro_by_name( project_name="root", @@ -1787,3 +1963,176 @@ def test_resolve_doc(docs, package, expected): expected_package, expected_name = expected assert result.name == expected_name assert result.package_name == expected_package + + +class TestManifestFindNodeFromRefOrSource: + @pytest.fixture + def mock_node(self): + return MockNode("my_package", "my_model") + + @pytest.fixture + def mock_disabled_node(self): + return MockNode("my_package", "disabled_node", config={"enabled": False}) + + @pytest.fixture + def mock_source(self): + return MockSource("root", "my_source", "source_table") + + @pytest.fixture + def mock_disabled_source(self): + return MockSource("root", "my_source", "disabled_source_table", config={"enabled": False}) + + @pytest.fixture + def mock_manifest(self, mock_node, mock_source, mock_disabled_node, mock_disabled_source): + return make_manifest( + nodes=[mock_node, mock_disabled_node], sources=[mock_source, mock_disabled_source] + ) + + @pytest.mark.parametrize( + "expression,expected_node", + [ + ("ref('my_package', 'my_model')", "mock_node"), + ("ref('my_package', 'doesnt_exist')", None), + ("ref('my_package', 'disabled_node')", "mock_disabled_node"), + ("source('my_source', 'source_table')", "mock_source"), + ("source('my_source', 'doesnt_exist')", None), + ("source('my_source', 'disabled_source_table')", "mock_disabled_source"), + ], + ) + def test_find_node_from_ref_or_source(self, expression, expected_node, mock_manifest, request): + node = mock_manifest.find_node_from_ref_or_source(expression) + + if expected_node is None: + assert node is None + else: + assert node == request.getfixturevalue(expected_node) + + @pytest.mark.parametrize("invalid_expression", ["invalid", "ref(')"]) + def test_find_node_from_ref_or_source_invalid_expression( + self, invalid_expression, mock_manifest + ): + with pytest.raises(ParsingError): + mock_manifest.find_node_from_ref_or_source(invalid_expression) + + +class TestDisabledLookup: + @pytest.fixture(scope="class") + def manifest(self): + return Manifest( + nodes={}, + sources={}, + macros={}, + docs={}, + disabled={}, + files={}, + exposures={}, + selectors={}, + ) + + @pytest.fixture(scope="class") + def mock_model(self): + return MockNode("package", "name", NodeType.Model) + + @pytest.fixture(scope="class") + def mock_model_with_version(self): + return MockNode("package", "name", NodeType.Model, version=3) + + @pytest.fixture(scope="class") + def mock_seed(self): + return MockNode("package", "name", NodeType.Seed) + + def test_find(self, manifest, mock_model): + manifest.disabled = {"model.package.name": [mock_model]} + lookup = DisabledLookup(manifest) + + assert lookup.find("name", "package") == [mock_model] + + def test_find_wrong_name(self, manifest, mock_model): + manifest.disabled = {"model.package.name": [mock_model]} + lookup = DisabledLookup(manifest) + + assert lookup.find("missing_name", "package") is None + + def test_find_wrong_package(self, manifest, mock_model): + manifest.disabled = {"model.package.name": [mock_model]} + lookup = DisabledLookup(manifest) + + assert lookup.find("name", "missing_package") is None + + def test_find_wrong_version(self, manifest, mock_model): + manifest.disabled = {"model.package.name": [mock_model]} + lookup = DisabledLookup(manifest) + + assert lookup.find("name", "package", version=3) is None + + def test_find_wrong_resource_types(self, manifest, mock_model): + manifest.disabled = {"model.package.name": [mock_model]} + lookup = DisabledLookup(manifest) + + assert lookup.find("name", "package", resource_types=[NodeType.Analysis]) is None + + def test_find_no_package(self, manifest, mock_model): + manifest.disabled = {"model.package.name": [mock_model]} + lookup = DisabledLookup(manifest) + + assert lookup.find("name", None) == [mock_model] + + def test_find_versioned_node(self, manifest, mock_model_with_version): + manifest.disabled = {"model.package.name": [mock_model_with_version]} + lookup = DisabledLookup(manifest) + + assert lookup.find("name", "package", version=3) == [mock_model_with_version] + + def test_find_versioned_node_no_package(self, manifest, mock_model_with_version): + manifest.disabled = {"model.package.name": [mock_model_with_version]} + lookup = DisabledLookup(manifest) + + assert lookup.find("name", None, version=3) == [mock_model_with_version] + + def test_find_versioned_node_no_version(self, manifest, mock_model_with_version): + manifest.disabled = {"model.package.name": [mock_model_with_version]} + lookup = DisabledLookup(manifest) + + assert lookup.find("name", "package") is None + + def test_find_versioned_node_wrong_version(self, manifest, mock_model_with_version): + manifest.disabled = {"model.package.name": [mock_model_with_version]} + lookup = DisabledLookup(manifest) + + assert lookup.find("name", "package", version=2) is None + + def test_find_versioned_node_wrong_name(self, manifest, mock_model_with_version): + manifest.disabled = {"model.package.name": [mock_model_with_version]} + lookup = DisabledLookup(manifest) + + assert lookup.find("wrong_name", "package", version=3) is None + + def test_find_versioned_node_wrong_package(self, manifest, mock_model_with_version): + manifest.disabled = {"model.package.name": [mock_model_with_version]} + lookup = DisabledLookup(manifest) + + assert lookup.find("name", "wrong_package", version=3) is None + + def test_find_multiple_nodes(self, manifest, mock_model, mock_seed): + manifest.disabled = {"model.package.name": [mock_model, mock_seed]} + lookup = DisabledLookup(manifest) + + assert lookup.find("name", "package") == [mock_model, mock_seed] + + def test_find_multiple_nodes_with_resource_types(self, manifest, mock_model, mock_seed): + manifest.disabled = {"model.package.name": [mock_model, mock_seed]} + lookup = DisabledLookup(manifest) + + assert lookup.find("name", "package", resource_types=[NodeType.Model]) == [mock_model] + + def test_find_multiple_nodes_with_wrong_resource_types(self, manifest, mock_model, mock_seed): + manifest.disabled = {"model.package.name": [mock_model, mock_seed]} + lookup = DisabledLookup(manifest) + + assert lookup.find("name", "package", resource_types=[NodeType.Analysis]) is None + + def test_find_multiple_nodes_with_resource_types_empty(self, manifest, mock_model, mock_seed): + manifest.disabled = {"model.package.name": [mock_model, mock_seed]} + lookup = DisabledLookup(manifest) + + assert lookup.find("name", "package", resource_types=[]) is None diff --git a/tests/unit/contracts/graph/test_node_args.py b/tests/unit/contracts/graph/test_node_args.py new file mode 100644 index 00000000000..958dfa11d72 --- /dev/null +++ b/tests/unit/contracts/graph/test_node_args.py @@ -0,0 +1,48 @@ +from dbt.contracts.graph.node_args import ModelNodeArgs + + +class TestModelNodeArgs: + def test_model_node_args_unique_id(self) -> None: + model_node_args = ModelNodeArgs( + name="name", package_name="package", identifier="identifier", schema="schema" + ) + assert model_node_args.unique_id == "model.package.name" + + def test_model_node_args_unique_id_with_version(self) -> None: + model_node_args = ModelNodeArgs( + name="name", + package_name="package", + identifier="identifier", + schema="schema", + version=1, + ) + assert model_node_args.unique_id == "model.package.name.v1" + + def test_model_node_args_fqn(self) -> None: + model_node_args = ModelNodeArgs( + name="name", + package_name="package", + identifier="identifier", + schema="schema", + ) + assert model_node_args.fqn == ["package", "name"] + + def test_model_node_args_fqn_with_version(self) -> None: + model_node_args = ModelNodeArgs( + name="name", + package_name="package", + identifier="identifier", + schema="schema", + version=1, + ) + assert model_node_args.fqn == ["package", "name", "v1"] + + def test_model_node_args_fqn_with_version_zero(self) -> None: + model_node_args = ModelNodeArgs( + name="name", + package_name="package", + identifier="identifier", + schema="schema", + version=0, + ) + assert model_node_args.fqn == ["package", "name", "v0"] diff --git a/tests/unit/test_contracts_graph_compiled.py b/tests/unit/contracts/graph/test_nodes.py similarity index 63% rename from tests/unit/test_contracts_graph_compiled.py rename to tests/unit/contracts/graph/test_nodes.py index 0dc26d3a38b..a67ca1f5efc 100644 --- a/tests/unit/test_contracts_graph_compiled.py +++ b/tests/unit/contracts/graph/test_nodes.py @@ -1,28 +1,35 @@ import pickle +import re +from dataclasses import replace + import pytest +from dbt.artifacts.resources import ColumnInfo, TestConfig, TestMetadata +from dbt.compilation import inject_ctes_into_sql from dbt.contracts.files import FileHash from dbt.contracts.graph.nodes import ( - ColumnInfo, DependsOn, GenericTestNode, InjectedCTE, + ModelConfig, ModelNode, - NodeConfig, - TestConfig, - TestMetadata, - Contract, ) from dbt.node_types import NodeType - -from .utils import ( - assert_symmetric, - assert_from_dict, +from tests.unit.fixtures import generic_test_node, model_node +from tests.unit.utils import ( assert_fails_validation, + assert_from_dict, + assert_symmetric, replace_config, ) +def norm_whitespace(string): + _RE_COMBINE_WHITESPACE = re.compile(r"\s+") + string = _RE_COMBINE_WHITESPACE.sub(" ", string).strip() + return string + + @pytest.fixture def basic_uncompiled_model(): return ModelNode( @@ -39,13 +46,12 @@ def basic_uncompiled_model(): sources=[], metrics=[], depends_on=DependsOn(), - deferred=False, description="", database="test_db", schema="test_schema", alias="bar", tags=[], - config=NodeConfig(), + config=ModelConfig(), meta={}, compiled=False, extra_ctes=[], @@ -57,36 +63,7 @@ def basic_uncompiled_model(): @pytest.fixture def basic_compiled_model(): - return ModelNode( - package_name="test", - path="/root/models/foo.sql", - original_file_path="models/foo.sql", - language="sql", - raw_code='select * from {{ ref("other") }}', - name="foo", - resource_type=NodeType.Model, - unique_id="model.test.foo", - fqn=["test", "models", "foo"], - refs=[], - sources=[], - metrics=[], - depends_on=DependsOn(), - deferred=True, - description="", - database="test_db", - schema="test_schema", - alias="bar", - tags=[], - config=NodeConfig(), - contract=Contract(), - meta={}, - compiled=True, - extra_ctes=[InjectedCTE("whatever", "select * from other")], - extra_ctes_injected=True, - compiled_code="with whatever as (select * from other) select * from whatever", - checksum=FileHash.from_contents(""), - unrendered_config={}, - ) + return model_node() @pytest.fixture @@ -132,7 +109,6 @@ def basic_uncompiled_dict(): "metrics": [], "depends_on": {"macros": [], "nodes": []}, "database": "test_db", - "deferred": False, "description": "", "schema": "test_schema", "alias": "bar", @@ -185,8 +161,8 @@ def basic_compiled_dict(): "metrics": [], "depends_on": {"macros": [], "nodes": []}, "database": "test_db", - "deferred": True, "description": "", + "primary_key": [], "schema": "test_schema", "alias": "bar", "tags": [], @@ -204,12 +180,14 @@ def basic_compiled_dict(): "meta": {}, "grants": {}, "packages": [], - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, "docs": {"show": True}, + "access": "protected", + "lookback": 0, }, "docs": {"show": True}, "columns": {}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, "meta": {}, "compiled": True, "extra_ctes": [{"id": "whatever", "sql": "select * from other"}], @@ -263,44 +241,46 @@ def test_invalid_bad_type_model(minimal_uncompiled_dict): unchanged_compiled_models = [ - lambda u: (u, u.replace(description="a description")), - lambda u: (u, u.replace(tags=["mytag"])), - lambda u: (u, u.replace(meta={"cool_key": "cool value"})), + lambda u: (u, replace(u, description="a description")), + lambda u: (u, replace(u, tags=["mytag"])), + lambda u: (u, replace(u, meta={"cool_key": "cool value"})), # changing the final alias/schema/datbase isn't a change - could just be target changing! - lambda u: (u, u.replace(database="nope")), - lambda u: (u, u.replace(schema="nope")), - lambda u: (u, u.replace(alias="nope")), + lambda u: (u, replace(u, database="nope")), + lambda u: (u, replace(u, schema="nope")), + lambda u: (u, replace(u, alias="nope")), # None -> False is a config change even though it's pretty much the same lambda u: ( - u.replace(config=u.config.replace(persist_docs={"relation": False})), - u.replace(config=u.config.replace(persist_docs={"relation": False})), + replace(u, config=replace(u.config, persist_docs={"relation": False})), + replace(u, config=replace(u.config, persist_docs={"relation": False})), ), lambda u: ( - u.replace(config=u.config.replace(persist_docs={"columns": False})), - u.replace(config=u.config.replace(persist_docs={"columns": False})), + replace(u, config=replace(u.config, persist_docs={"columns": False})), + replace(u, config=replace(u.config, persist_docs={"columns": False})), ), # True -> True lambda u: ( - u.replace(config=u.config.replace(persist_docs={"relation": True})), - u.replace(config=u.config.replace(persist_docs={"relation": True})), + replace(u, config=replace(u.config, persist_docs={"relation": True})), + replace(u, config=replace(u.config, persist_docs={"relation": True})), ), lambda u: ( - u.replace(config=u.config.replace(persist_docs={"columns": True})), - u.replace(config=u.config.replace(persist_docs={"columns": True})), + replace(u, config=replace(u.config, persist_docs={"columns": True})), + replace(u, config=replace(u.config, persist_docs={"columns": True})), ), # only columns docs enabled, but description changed lambda u: ( - u.replace(config=u.config.replace(persist_docs={"columns": True})), - u.replace( - config=u.config.replace(persist_docs={"columns": True}), + replace(u, config=replace(u.config, persist_docs={"columns": True})), + replace( + u, + config=replace(u.config, persist_docs={"columns": True}), description="a model description", ), ), # only relation docs eanbled, but columns changed lambda u: ( - u.replace(config=u.config.replace(persist_docs={"relation": True})), - u.replace( - config=u.config.replace(persist_docs={"relation": True}), + replace(u, config=replace(u.config, persist_docs={"relation": True})), + replace( + u, + config=replace(u.config, persist_docs={"relation": True}), columns={"a": ColumnInfo(name="a", description="a column description")}, ), ), @@ -309,10 +289,11 @@ def test_invalid_bad_type_model(minimal_uncompiled_dict): changed_compiled_models = [ lambda u: (u, None), - lambda u: (u, u.replace(raw_code="select * from wherever")), + lambda u: (u, replace(u, raw_code="select * from wherever")), lambda u: ( u, - u.replace( + replace( + u, fqn=["test", "models", "subdir", "foo"], original_file_path="models/subdir/foo.sql", path="/root/models/subdir/foo.sql", @@ -408,7 +389,6 @@ def basic_uncompiled_schema_test_node(): refs=[], sources=[], metrics=[], - deferred=False, depends_on=DependsOn(), description="", database="test_db", @@ -428,40 +408,7 @@ def basic_uncompiled_schema_test_node(): @pytest.fixture def basic_compiled_schema_test_node(): - return GenericTestNode( - package_name="test", - path="/root/x/path.sql", - original_file_path="/root/path.sql", - language="sql", - raw_code='select * from {{ ref("other") }}', - name="foo", - resource_type=NodeType.Test, - unique_id="model.test.foo", - fqn=["test", "models", "foo"], - refs=[], - sources=[], - metrics=[], - depends_on=DependsOn(), - deferred=False, - description="", - database="test_db", - schema="dbt_test__audit", - alias="bar", - tags=[], - config=TestConfig(severity="warn"), - contract=Contract(), - meta={}, - compiled=True, - extra_ctes=[InjectedCTE("whatever", "select * from other")], - extra_ctes_injected=True, - compiled_code="with whatever as (select * from other) select * from whatever", - column_name="id", - test_metadata=TestMetadata(namespace=None, name="foo", kwargs={}), - checksum=FileHash.from_contents(""), - unrendered_config={ - "severity": "warn", - }, - ) + return generic_test_node() @pytest.fixture @@ -497,7 +444,6 @@ def basic_uncompiled_schema_test_dict(): "fail_calc": "count(*)", "meta": {}, }, - "deferred": False, "docs": {"show": True}, "columns": {}, "meta": {}, @@ -534,7 +480,6 @@ def basic_compiled_schema_test_dict(): "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, - "deferred": False, "database": "test_db", "description": "", "schema": "dbt_test__audit", @@ -553,7 +498,7 @@ def basic_compiled_schema_test_dict(): }, "docs": {"show": True}, "columns": {}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, "meta": {}, "compiled": True, "extra_ctes": [{"id": "whatever", "sql": "select * from other"}], @@ -616,17 +561,17 @@ def test_invalid_resource_type_schema_test(minimal_schema_test_dict): unchanged_schema_tests = [ # for tests, raw_code isn't a change (because it's always the same for a given test macro) - lambda u: u.replace(raw_code="select * from wherever"), - lambda u: u.replace(description="a description"), - lambda u: u.replace(tags=["mytag"]), - lambda u: u.replace(meta={"cool_key": "cool value"}), + lambda u: replace(u, raw_code="select * from wherever"), + lambda u: replace(u, description="a description"), + lambda u: replace(u, tags=["mytag"]), + lambda u: replace(u, meta={"cool_key": "cool value"}), # these values don't even mean anything on schema tests! lambda u: replace_config(u, alias="nope"), lambda u: replace_config(u, database="nope"), lambda u: replace_config(u, schema="nope"), - lambda u: u.replace(database="other_db"), - lambda u: u.replace(schema="other_schema"), - lambda u: u.replace(alias="foo"), + lambda u: replace(u, database="other_db"), + lambda u: replace(u, schema="other_schema"), + lambda u: replace(u, alias="foo"), lambda u: replace_config(u, full_refresh=True), lambda u: replace_config(u, post_hook=["select 1 as id"]), lambda u: replace_config(u, pre_hook=["select 1 as id"]), @@ -636,16 +581,17 @@ def test_invalid_resource_type_schema_test(minimal_schema_test_dict): changed_schema_tests = [ lambda u: None, - lambda u: u.replace( + lambda u: replace( + u, fqn=["test", "models", "subdir", "foo"], original_file_path="models/subdir/foo.sql", path="/root/models/subdir/foo.sql", ), lambda u: replace_config(u, severity="warn"), # If we checked test metadata, these would caount. But we don't, because these changes would all change the unique ID, so it's irrelevant. - # lambda u: u.replace(test_metadata=u.test_metadata.replace(namespace='something')), - # lambda u: u.replace(test_metadata=u.test_metadata.replace(name='bar')), - # lambda u: u.replace(test_metadata=u.test_metadata.replace(kwargs={'arg': 'value'})), + # lambda u: replace(u, test_metadata=replace(u.test_metadata, namespace='something')), + # lambda u: replace(u, test_metadata=replace(u.test_metadata, name='bar')), + # lambda u: replace(u, test_metadata=replace(u.test_metadata, kwargs={'arg': 'value'})), ] @@ -666,8 +612,196 @@ def test_compare_to_compiled(basic_uncompiled_schema_test_node, basic_compiled_s uncompiled = basic_uncompiled_schema_test_node compiled = basic_compiled_schema_test_node assert not uncompiled.same_contents(compiled, "postgres") - fixed_config = compiled.config.replace(severity=uncompiled.config.severity) - fixed_compiled = compiled.replace( - config=fixed_config, unrendered_config=uncompiled.unrendered_config + fixed_config = replace(compiled.config, severity=uncompiled.config.severity) + fixed_compiled = replace( + compiled, config=fixed_config, unrendered_config=uncompiled.unrendered_config ) assert uncompiled.same_contents(fixed_compiled, "postgres") + + +def test_inject_ctes_simple1(): + starting_sql = "select * from __dbt__cte__base" + ctes = [ + InjectedCTE( + id="model.test.base", + sql=" __dbt__cte__base as (\n\n\nselect * from test16873767336887004702_test_ephemeral.seed\n)", + ) + ] + expected_sql = """with __dbt__cte__base as ( + select * from test16873767336887004702_test_ephemeral.seed + ) select * from __dbt__cte__base""" + + generated_sql = inject_ctes_into_sql(starting_sql, ctes) + assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) + + +def test_inject_ctes_simple2(): + starting_sql = "select * from __dbt__cte__ephemeral_level_two" + ctes = [ + InjectedCTE( + id="model.test.ephemeral_level_two", + sql=' __dbt__cte__ephemeral_level_two as (\n\nselect * from "dbt"."test16873757769710148165_test_ephemeral"."source_table"\n)', + ) + ] + expected_sql = """with __dbt__cte__ephemeral_level_two as ( + select * from "dbt"."test16873757769710148165_test_ephemeral"."source_table" + ) select * from __dbt__cte__ephemeral_level_two""" + + generated_sql = inject_ctes_into_sql(starting_sql, ctes) + assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) + + +def test_inject_ctes_multiple_ctes(): + + starting_sql = "select * from __dbt__cte__ephemeral" + ctes = [ + InjectedCTE( + id="model.test.ephemeral_level_two", + sql=' __dbt__cte__ephemeral_level_two as (\n\nselect * from "dbt"."test16873735573223965828_test_ephemeral"."source_table"\n)', + ), + InjectedCTE( + id="model.test.ephemeral", + sql=" __dbt__cte__ephemeral as (\n\nselect * from __dbt__cte__ephemeral_level_two\n)", + ), + ] + expected_sql = """with __dbt__cte__ephemeral_level_two as ( + select * from "dbt"."test16873735573223965828_test_ephemeral"."source_table" + ), __dbt__cte__ephemeral as ( + select * from __dbt__cte__ephemeral_level_two + ) select * from __dbt__cte__ephemeral""" + + generated_sql = inject_ctes_into_sql(starting_sql, ctes) + assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) + + +def test_inject_ctes_multiple_ctes_more_complex(): + starting_sql = """select * from __dbt__cte__female_only + union all + select * from "dbt"."test16873757723266827902_test_ephemeral"."double_dependent" where gender = 'Male'""" + ctes = [ + InjectedCTE( + id="model.test.base", + sql=" __dbt__cte__base as (\n\n\nselect * from test16873757723266827902_test_ephemeral.seed\n)", + ), + InjectedCTE( + id="model.test.base_copy", + sql=" __dbt__cte__base_copy as (\n\n\nselect * from __dbt__cte__base\n)", + ), + InjectedCTE( + id="model.test.female_only", + sql=" __dbt__cte__female_only as (\n\n\nselect * from __dbt__cte__base_copy where gender = 'Female'\n)", + ), + ] + expected_sql = """with __dbt__cte__base as ( + select * from test16873757723266827902_test_ephemeral.seed + ), __dbt__cte__base_copy as ( + select * from __dbt__cte__base + ), __dbt__cte__female_only as ( + select * from __dbt__cte__base_copy where gender = 'Female' + ) select * from __dbt__cte__female_only + union all + select * from "dbt"."test16873757723266827902_test_ephemeral"."double_dependent" where gender = 'Male'""" + + generated_sql = inject_ctes_into_sql(starting_sql, ctes) + assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) + + +def test_inject_ctes_starting_with1(): + starting_sql = """ + with internal_cte as (select * from sessions) + select * from internal_cte + """ + ctes = [ + InjectedCTE( + id="cte_id_1", + sql="__dbt__cte__ephemeral as (select * from table)", + ), + InjectedCTE( + id="cte_id_2", + sql="__dbt__cte__events as (select id, type from events)", + ), + ] + expected_sql = """with __dbt__cte__ephemeral as (select * from table), + __dbt__cte__events as (select id, type from events), + internal_cte as (select * from sessions) + select * from internal_cte""" + + generated_sql = inject_ctes_into_sql(starting_sql, ctes) + assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) + + +def test_inject_ctes_starting_with2(): + starting_sql = """with my_other_cool_cte as ( + select id, name from __dbt__cte__ephemeral + where id > 1000 + ) + select name, id from my_other_cool_cte""" + ctes = [ + InjectedCTE( + id="model.singular_tests_ephemeral.ephemeral", + sql=' __dbt__cte__ephemeral as (\n\n\nwith my_cool_cte as (\n select name, id from "dbt"."test16873917221900185954_test_singular_tests_ephemeral"."base"\n)\nselect id, name from my_cool_cte where id is not null\n)', + ) + ] + expected_sql = """with __dbt__cte__ephemeral as ( + with my_cool_cte as ( + select name, id from "dbt"."test16873917221900185954_test_singular_tests_ephemeral"."base" + ) + select id, name from my_cool_cte where id is not null + ), my_other_cool_cte as ( + select id, name from __dbt__cte__ephemeral + where id > 1000 + ) + select name, id from my_other_cool_cte""" + + generated_sql = inject_ctes_into_sql(starting_sql, ctes) + assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) + + +def test_inject_ctes_comment_with(): + # Test injection with a comment containing "with" + starting_sql = """ + --- This is sql with a comment + select * from __dbt__cte__base + """ + ctes = [ + InjectedCTE( + id="model.test.base", + sql=" __dbt__cte__base as (\n\n\nselect * from test16873767336887004702_test_ephemeral.seed\n)", + ) + ] + expected_sql = """with __dbt__cte__base as ( + select * from test16873767336887004702_test_ephemeral.seed + ) --- This is sql with a comment + select * from __dbt__cte__base""" + + generated_sql = inject_ctes_into_sql(starting_sql, ctes) + assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) + + +def test_inject_ctes_with_recursive(): + # Test injection with "recursive" keyword + starting_sql = """ + with recursive t(n) as ( + select * from __dbt__cte__first_ephemeral_model + union all + select n+1 from t where n < 100 + ) + select sum(n) from t + """ + ctes = [ + InjectedCTE( + id="model.test.first_ephemeral_model", + sql=" __dbt__cte__first_ephemeral_model as (\n\nselect 1 as fun\n)", + ) + ] + expected_sql = """with recursive __dbt__cte__first_ephemeral_model as ( + select 1 as fun + ), t(n) as ( + select * from __dbt__cte__first_ephemeral_model + union all + select n+1 from t where n < 100 + ) + select sum(n) from t + """ + generated_sql = inject_ctes_into_sql(starting_sql, ctes) + assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) diff --git a/tests/unit/test_contracts_graph_parsed.py b/tests/unit/contracts/graph/test_nodes_parsed.py similarity index 86% rename from tests/unit/test_contracts_graph_parsed.py rename to tests/unit/contracts/graph/test_nodes_parsed.py index 7a05c1363f7..7acd4c1f02a 100644 --- a/tests/unit/test_contracts_graph_parsed.py +++ b/tests/unit/contracts/graph/test_nodes_parsed.py @@ -1,68 +1,77 @@ import pickle +from argparse import Namespace +from dataclasses import replace + import pytest +from hypothesis import given +from hypothesis.strategies import builds, lists -from dbt.node_types import NodeType, AccessType +from dbt.artifacts.resources import ( + ColumnInfo, + Dimension, + Entity, + ExposureConfig, + ExposureType, + FreshnessThreshold, + Hook, + MacroDependsOn, + MaturityType, + Measure, + MetricInputMeasure, + MetricTypeParams, + Owner, + Quoting, + RefArgs, + SourceConfig, +) +from dbt.artifacts.resources import SourceDefinition as SourceDefinitionResource +from dbt.artifacts.resources import TestMetadata, Time +from dbt.artifacts.resources.types import TimePeriod from dbt.contracts.files import FileHash from dbt.contracts.graph.model_config import ( + ModelConfig, NodeConfig, SeedConfig, - TestConfig, SnapshotConfig, - SourceConfig, - ExposureConfig, - EmptySnapshotConfig, - Hook, + TestConfig, ) from dbt.contracts.graph.nodes import ( - ModelNode, DependsOn, - ColumnInfo, + Docs, + Documentation, + Exposure, GenericTestNode, - SnapshotNode, - IntermediateSnapshotNode, + HookNode, Macro, - Exposure, Metric, - MetricTypeParams, - MetricInputMeasure, + ModelNode, SeedNode, - Docs, - MacroDependsOn, + SemanticModel, + SnapshotNode, SourceDefinition, - Documentation, - HookNode, - Owner, - TestMetadata, -) -from dbt.contracts.graph.unparsed import ( - ExposureType, - FreshnessThreshold, - MaturityType, - Quoting, - Time, - TimePeriod, ) -from dbt import flags -from argparse import Namespace - -from dbt.dataclass_schema import ValidationError +from dbt.node_types import AccessType, NodeType +from dbt_common.dataclass_schema import ValidationError from dbt_semantic_interfaces.type_enums import MetricType -from .utils import ( +from tests.unit.utils import ( ContractTestCase, - assert_symmetric, + assert_fails_validation, assert_from_dict, + assert_symmetric, compare_dicts, - assert_fails_validation, dict_replace, replace_config, ) -flags.set_from_args(Namespace(SEND_ANONYMOUS_USAGE_STATS=False), None) + +@pytest.fixture +def flags_for_args() -> Namespace: + return Namespace(SEND_ANONYMOUS_USAGE_STATS=False) @pytest.fixture def populated_node_config_object(): - result = NodeConfig( + result = ModelConfig( column_types={"a": "text"}, materialized="table", post_hook=[Hook(sql='insert into blah(a, b) select "1", 1')], @@ -89,12 +98,14 @@ def populated_node_config_dict(): "grants": {}, "packages": [], "docs": {"show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, + "access": "protected", + "lookback": 0, } def test_config_populated(populated_node_config_object, populated_node_config_dict): - assert_symmetric(populated_node_config_object, populated_node_config_dict, NodeConfig) + assert_symmetric(populated_node_config_object, populated_node_config_dict, ModelConfig) pickle.loads(pickle.dumps(populated_node_config_object)) @@ -127,14 +138,14 @@ def unrendered_node_config_dict(): @pytest.mark.parametrize("func", different_node_configs) def test_config_different(unrendered_node_config_dict, func): value = func(unrendered_node_config_dict) - assert not NodeConfig.same_contents(unrendered_node_config_dict, value) + assert not ModelConfig.same_contents(unrendered_node_config_dict, value) @pytest.mark.parametrize("func", same_node_configs) def test_config_same(unrendered_node_config_dict, func): value = func(unrendered_node_config_dict) assert unrendered_node_config_dict != value - assert NodeConfig.same_contents(unrendered_node_config_dict, value) + assert ModelConfig.same_contents(unrendered_node_config_dict, value) @pytest.fixture @@ -156,6 +167,7 @@ def base_parsed_model_dict(): "depends_on": {"macros": [], "nodes": []}, "database": "test_db", "description": "", + "primary_key": [], "schema": "test_schema", "alias": "bar", "tags": [], @@ -173,12 +185,13 @@ def base_parsed_model_dict(): "meta": {}, "grants": {}, "docs": {"show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, "packages": [], + "access": "protected", + "lookback": 0, }, - "deferred": False, "docs": {"show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, "columns": {}, "meta": {}, "checksum": { @@ -209,11 +222,12 @@ def basic_parsed_model_object(): metrics=[], depends_on=DependsOn(), description="", + primary_key=[], database="test_db", schema="test_schema", alias="bar", tags=[], - config=NodeConfig(), + config=ModelConfig(), meta={}, checksum=FileHash.from_contents(""), created_at=1.0, @@ -262,8 +276,8 @@ def complex_parsed_model_dict(): "metrics": [], "depends_on": {"macros": [], "nodes": ["model.test.bar"]}, "database": "test_db", - "deferred": True, "description": "My parsed node", + "primary_key": [], "schema": "test_schema", "alias": "bar", "tags": ["tag"], @@ -282,11 +296,13 @@ def complex_parsed_model_dict(): "meta": {}, "grants": {}, "docs": {"show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, "packages": [], + "access": "protected", + "lookback": 0, }, "docs": {"show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, "columns": { "a": { "name": "a", @@ -327,14 +343,13 @@ def complex_parsed_model_object(): sources=[], metrics=[], depends_on=DependsOn(nodes=["model.test.bar"]), - deferred=True, description="My parsed node", database="test_db", schema="test_schema", alias="bar", tags=["tag"], meta={}, - config=NodeConfig( + config=ModelConfig( column_types={"a": "text"}, materialized="ephemeral", post_hook=[Hook(sql='insert into blah(a, b) select "1", 1')], @@ -349,42 +364,6 @@ def complex_parsed_model_object(): ) -{ - "enabled": True, - "tags": [], - "meta": {}, - "materialized": "ephemeral", - "persist_docs": {}, - "quoting": {}, - "column_types": {"a": "text"}, - "on_schema_change": "ignore", - "on_configuration_change": "apply", - "grants": {}, - "packages": [], - "docs": {"show": True}, - "contract": {"enforced": False}, - "post-hook": [{"sql": 'insert into blah(a, b) select "1", 1', "transaction": True}], - "pre-hook": [], -} - -{ - "column_types": {"a": "text"}, - "enabled": True, - "materialized": "ephemeral", - "persist_docs": {}, - "post-hook": [{"sql": 'insert into blah(a, b) select "1", 1', "transaction": True}], - "pre-hook": [], - "quoting": {}, - "tags": [], - "on_schema_change": "ignore", - "on_configuration_change": "apply", - "meta": {}, - "grants": {}, - "docs": {"show": True}, - "packages": [], -} - - def test_model_basic(basic_parsed_model_object, base_parsed_model_dict, minimal_parsed_model_dict): node = basic_parsed_model_object node_dict = base_parsed_model_dict @@ -423,8 +402,8 @@ def test_invalid_bad_materialized(base_parsed_model_dict): unchanged_nodes = [ - lambda u: (u, u.replace(tags=["mytag"])), - lambda u: (u, u.replace(meta={"something": 1000})), + lambda u: (u, replace(u, tags=["mytag"])), + lambda u: (u, replace(u, meta={"something": 1000})), # True -> True lambda u: ( replace_config(u, persist_docs={"relation": True}), @@ -437,28 +416,32 @@ def test_invalid_bad_materialized(base_parsed_model_dict): # only columns docs enabled, but description changed lambda u: ( replace_config(u, persist_docs={"columns": True}), - replace_config(u, persist_docs={"columns": True}).replace( - description="a model description" + replace( + replace_config(u, persist_docs={"columns": True}), description="a model description" ), ), # only relation docs eanbled, but columns changed lambda u: ( replace_config(u, persist_docs={"relation": True}), - replace_config(u, persist_docs={"relation": True}).replace( - columns={"a": ColumnInfo(name="a", description="a column description")} + replace( + replace_config(u, persist_docs={"relation": True}), + columns={"a": ColumnInfo(name="a", description="a column description")}, ), ), # not tracked, we track config.alias/config.schema/config.database - lambda u: (u, u.replace(alias="other")), - lambda u: (u, u.replace(schema="other")), - lambda u: (u, u.replace(database="other")), + lambda u: (u, replace(u, alias="other")), + lambda u: (u, replace(u, schema="other")), + lambda u: (u, replace(u, database="other")), + # unchanged ref representations - protected is default + lambda u: (u, replace(u, access=AccessType.Protected)), ] changed_nodes = [ lambda u: ( u, - u.replace( + replace( + u, fqn=["test", "models", "subdir", "foo"], original_file_path="models/subdir/foo.sql", path="/root/models/subdir/foo.sql", @@ -470,21 +453,26 @@ def test_invalid_bad_materialized(base_parsed_model_dict): # persist docs was true for the relation and we changed the model description lambda u: ( replace_config(u, persist_docs={"relation": True}), - replace_config(u, persist_docs={"relation": True}).replace( - description="a model description" + replace( + replace_config(u, persist_docs={"relation": True}), description="a model description" ), ), # persist docs was true for columns and we changed the model description lambda u: ( replace_config(u, persist_docs={"columns": True}), - replace_config(u, persist_docs={"columns": True}).replace( - columns={"a": ColumnInfo(name="a", description="a column description")} + replace( + replace_config(u, persist_docs={"columns": True}), + columns={"a": ColumnInfo(name="a", description="a column description")}, ), ), # not tracked, we track config.alias/config.schema/config.database lambda u: (u, replace_config(u, alias="other")), lambda u: (u, replace_config(u, schema="other")), lambda u: (u, replace_config(u, database="other")), + # changed ref representations + lambda u: (u, replace_config(u, access=AccessType.Public)), + lambda u: (u, replace_config(u, latest_version=2)), + lambda u: (u, replace_config(u, version=2)), ] @@ -520,6 +508,7 @@ def basic_parsed_seed_dict(): "alias": "foo", "config": { "column_types": {}, + "delimiter": ",", "enabled": True, "materialized": "seed", "persist_docs": {}, @@ -532,10 +521,10 @@ def basic_parsed_seed_dict(): "meta": {}, "grants": {}, "docs": {"show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, "packages": [], + "lookback": 0, }, - "deferred": False, "docs": {"show": True}, "columns": {}, "meta": {}, @@ -563,7 +552,6 @@ def basic_parsed_seed_object(): alias="foo", config=SeedConfig(), # config=SeedConfig(quote_columns=True), - deferred=False, docs=Docs(show=True), columns={}, meta={}, @@ -611,6 +599,7 @@ def complex_parsed_seed_dict(): "alias": "foo", "config": { "column_types": {}, + "delimiter": ",", "enabled": True, "materialized": "seed", "persist_docs": {"relation": True, "columns": True}, @@ -624,10 +613,10 @@ def complex_parsed_seed_dict(): "meta": {}, "grants": {}, "docs": {"show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, "packages": [], + "lookback": 0, }, - "deferred": False, "docs": {"show": True}, "columns": { "a": { @@ -669,9 +658,9 @@ def complex_parsed_seed_object(): alias="foo", config=SeedConfig( quote_columns=True, + delimiter=",", persist_docs={"relation": True, "columns": True}, ), - deferred=False, docs=Docs(show=True), columns={"a": ColumnInfo(name="a", description="a column description")}, meta={"foo": 1000}, @@ -696,8 +685,8 @@ def test_seed_complex(complex_parsed_seed_dict, complex_parsed_seed_object): unchanged_seeds = [ - lambda u: (u, u.replace(tags=["mytag"])), - lambda u: (u, u.replace(meta={"something": 1000})), + lambda u: (u, replace(u, tags=["mytag"])), + lambda u: (u, replace(u, meta={"something": 1000})), # True -> True lambda u: ( replace_config(u, persist_docs={"relation": True}), @@ -710,27 +699,29 @@ def test_seed_complex(complex_parsed_seed_dict, complex_parsed_seed_object): # only columns docs enabled, but description changed lambda u: ( replace_config(u, persist_docs={"columns": True}), - replace_config(u, persist_docs={"columns": True}).replace( - description="a model description" + replace( + replace_config(u, persist_docs={"columns": True}), description="a model description" ), ), # only relation docs eanbled, but columns changed lambda u: ( replace_config(u, persist_docs={"relation": True}), - replace_config(u, persist_docs={"relation": True}).replace( - columns={"a": ColumnInfo(name="a", description="a column description")} + replace( + replace_config(u, persist_docs={"relation": True}), + columns={"a": ColumnInfo(name="a", description="a column description")}, ), ), - lambda u: (u, u.replace(alias="other")), - lambda u: (u, u.replace(schema="other")), - lambda u: (u, u.replace(database="other")), + lambda u: (u, replace(u, alias="other")), + lambda u: (u, replace(u, schema="other")), + lambda u: (u, replace(u, database="other")), ] changed_seeds = [ lambda u: ( u, - u.replace( + replace( + u, fqn=["test", "models", "subdir", "foo"], original_file_path="models/subdir/foo.sql", path="/root/models/subdir/foo.sql", @@ -742,15 +733,16 @@ def test_seed_complex(complex_parsed_seed_dict, complex_parsed_seed_object): # persist docs was true for the relation and we changed the model description lambda u: ( replace_config(u, persist_docs={"relation": True}), - replace_config(u, persist_docs={"relation": True}).replace( - description="a model description" + replace( + replace_config(u, persist_docs={"relation": True}), description="a model description" ), ), # persist docs was true for columns and we changed the model description lambda u: ( replace_config(u, persist_docs={"columns": True}), - replace_config(u, persist_docs={"columns": True}).replace( - columns={"a": ColumnInfo(name="a", description="a column description")} + replace( + replace_config(u, persist_docs={"columns": True}), + columns={"a": ColumnInfo(name="a", description="a column description")}, ), ), lambda u: (u, replace_config(u, alias="other")), @@ -811,7 +803,6 @@ def base_parsed_hook_dict(): "metrics": [], "depends_on": {"macros": [], "nodes": []}, "database": "test_db", - "deferred": False, "description": "", "schema": "test_schema", "alias": "bar", @@ -830,11 +821,12 @@ def base_parsed_hook_dict(): "meta": {}, "grants": {}, "docs": {"show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, "packages": [], + "lookback": 0, }, "docs": {"show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, "columns": {}, "meta": {}, "checksum": { @@ -863,7 +855,6 @@ def base_parsed_hook_object(): metrics=[], depends_on=DependsOn(), description="", - deferred=False, database="test_db", schema="test_schema", alias="bar", @@ -892,7 +883,6 @@ def complex_parsed_hook_dict(): "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.test.bar"]}, - "deferred": False, "database": "test_db", "description": "My parsed node", "schema": "test_schema", @@ -913,11 +903,12 @@ def complex_parsed_hook_dict(): "meta": {}, "grants": {}, "docs": {"show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, "packages": [], + "lookback": 0, }, "docs": {"show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, "columns": { "a": { "name": "a", @@ -957,7 +948,6 @@ def complex_parsed_hook_object(): metrics=[], depends_on=DependsOn(nodes=["model.test.bar"]), description="My parsed node", - deferred=False, database="test_db", schema="test_schema", alias="bar", @@ -1051,7 +1041,6 @@ def basic_parsed_schema_test_dict(): "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, - "deferred": False, "database": "test_db", "description": "", "schema": "test_schema", @@ -1070,7 +1059,7 @@ def basic_parsed_schema_test_dict(): "schema": "dbt_test__audit", }, "docs": {"show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, "columns": {}, "test_metadata": { "name": "foo", @@ -1131,7 +1120,6 @@ def complex_parsed_schema_test_dict(): "metrics": [], "depends_on": {"macros": [], "nodes": ["model.test.bar"]}, "database": "test_db", - "deferred": False, "description": "My parsed node", "schema": "test_schema", "alias": "bar", @@ -1150,7 +1138,7 @@ def complex_parsed_schema_test_dict(): "schema": "dbt_test__audit", }, "docs": {"show": False}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, "columns": { "a": { "name": "a", @@ -1261,6 +1249,7 @@ def basic_timestamp_snapshot_config_dict(): "quoting": {}, "tags": [], "unique_key": "id", + "snapshot_meta_column_names": {}, "strategy": "timestamp", "updated_at": "last_update", "target_database": "some_snapshot_db", @@ -1271,7 +1260,8 @@ def basic_timestamp_snapshot_config_dict(): "grants": {}, "packages": [], "docs": {"show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, + "lookback": 0, } @@ -1296,6 +1286,7 @@ def complex_timestamp_snapshot_config_dict(): "post-hook": [{"sql": 'insert into blah(a, b) select "1", 1', "transaction": True}], "pre-hook": [], "quoting": {}, + "snapshot_meta_column_names": {}, "tags": [], "target_database": "some_snapshot_db", "target_schema": "some_snapshot_schema", @@ -1309,7 +1300,8 @@ def complex_timestamp_snapshot_config_dict(): "grants": {}, "packages": [], "docs": {"show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, + "lookback": 0, } @@ -1350,7 +1342,7 @@ def test_invalid_missing_updated_at(basic_timestamp_snapshot_config_dict): bad_fields = basic_timestamp_snapshot_config_dict del bad_fields["updated_at"] bad_fields["check_cols"] = "all" - assert_fails_validation(bad_fields, SnapshotConfig) + assert_snapshot_config_fails_validation(bad_fields) @pytest.fixture @@ -1363,6 +1355,7 @@ def basic_check_snapshot_config_dict(): "post-hook": [], "pre-hook": [], "quoting": {}, + "snapshot_meta_column_names": {}, "tags": [], "target_database": "some_snapshot_db", "target_schema": "some_snapshot_schema", @@ -1375,7 +1368,8 @@ def basic_check_snapshot_config_dict(): "grants": {}, "packages": [], "docs": {"show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, + "lookback": 0, } @@ -1400,6 +1394,7 @@ def complex_set_snapshot_config_dict(): "post-hook": [{"sql": 'insert into blah(a, b) select "1", 1', "transaction": True}], "pre-hook": [], "quoting": {}, + "snapshot_meta_column_names": {}, "tags": [], "target_database": "some_snapshot_db", "target_schema": "some_snapshot_schema", @@ -1413,7 +1408,8 @@ def complex_set_snapshot_config_dict(): "grants": {}, "packages": [], "docs": {"show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, + "lookback": 0, } @@ -1454,7 +1450,7 @@ def test_complex_snapshot_config( def test_invalid_check_wrong_strategy(basic_check_snapshot_config_dict): wrong_strategy = basic_check_snapshot_config_dict wrong_strategy["strategy"] = "timestamp" - assert_fails_validation(wrong_strategy, SnapshotConfig) + assert_snapshot_config_fails_validation(wrong_strategy) def test_invalid_missing_check_cols(basic_check_snapshot_config_dict): @@ -1462,6 +1458,8 @@ def test_invalid_missing_check_cols(basic_check_snapshot_config_dict): del wrong_fields["check_cols"] with pytest.raises(ValidationError, match=r"A snapshot configured with the check strategy"): SnapshotConfig.validate(wrong_fields) + cfg = SnapshotConfig.from_dict(wrong_fields) + cfg.final_validate() def test_missing_snapshot_configs(basic_check_snapshot_config_dict): @@ -1469,22 +1467,28 @@ def test_missing_snapshot_configs(basic_check_snapshot_config_dict): del wrong_fields["strategy"] with pytest.raises(ValidationError, match=r"Snapshots must be configured with a 'strategy'"): SnapshotConfig.validate(wrong_fields) + cfg = SnapshotConfig.from_dict(wrong_fields) + cfg.final_validate() wrong_fields["strategy"] = "timestamp" del wrong_fields["unique_key"] with pytest.raises(ValidationError, match=r"Snapshots must be configured with a 'strategy'"): SnapshotConfig.validate(wrong_fields) + cfg = SnapshotConfig.from_dict(wrong_fields) + cfg.final_validate() - wrong_fields["unique_key"] = "id" - del wrong_fields["target_schema"] - with pytest.raises(ValidationError, match=r"Snapshots must be configured with a 'strategy'"): - SnapshotConfig.validate(wrong_fields) + +def assert_snapshot_config_fails_validation(dct): + with pytest.raises(ValidationError): + SnapshotConfig.validate(dct) + obj = SnapshotConfig.from_dict(dct) + obj.final_validate() def test_invalid_check_value(basic_check_snapshot_config_dict): invalid_check_type = basic_check_snapshot_config_dict invalid_check_type["check_cols"] = "some" - assert_fails_validation(invalid_check_type, SnapshotConfig) + assert_snapshot_config_fails_validation(invalid_check_type) @pytest.fixture @@ -1504,7 +1508,6 @@ def basic_timestamp_snapshot_dict(): "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, - "deferred": False, "database": "test_db", "description": "", "schema": "test_schema", @@ -1518,6 +1521,7 @@ def basic_timestamp_snapshot_dict(): "post-hook": [], "pre-hook": [], "quoting": {}, + "snapshot_meta_column_names": {}, "tags": [], "target_database": "some_snapshot_db", "target_schema": "some_snapshot_schema", @@ -1529,11 +1533,12 @@ def basic_timestamp_snapshot_dict(): "meta": {}, "grants": {}, "docs": {"show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, "packages": [], + "lookback": 0, }, "docs": {"show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, "columns": {}, "meta": {}, "checksum": { @@ -1590,51 +1595,6 @@ def basic_timestamp_snapshot_object(): ) -@pytest.fixture -def basic_intermediate_timestamp_snapshot_object(): - cfg = EmptySnapshotConfig() - cfg._extra.update( - { - "strategy": "timestamp", - "unique_key": "id", - "updated_at": "last_update", - "target_database": "some_snapshot_db", - "target_schema": "some_snapshot_schema", - } - ) - - return IntermediateSnapshotNode( - package_name="test", - path="/root/x/path.sql", - original_file_path="/root/path.sql", - language="sql", - raw_code="select * from wherever", - name="foo", - resource_type=NodeType.Snapshot, - unique_id="model.test.foo", - fqn=["test", "models", "foo"], - refs=[], - sources=[], - metrics=[], - depends_on=DependsOn(), - description="", - database="test_db", - schema="test_schema", - alias="bar", - tags=[], - config=cfg, - checksum=FileHash.from_contents(""), - created_at=1, - unrendered_config={ - "strategy": "timestamp", - "unique_key": "id", - "updated_at": "last_update", - "target_database": "some_snapshot_db", - "target_schema": "some_snapshot_schema", - }, - ) - - @pytest.fixture def basic_check_snapshot_dict(): return { @@ -1653,7 +1613,6 @@ def basic_check_snapshot_dict(): "metrics": [], "depends_on": {"macros": [], "nodes": []}, "database": "test_db", - "deferred": False, "description": "", "schema": "test_schema", "alias": "bar", @@ -1666,6 +1625,7 @@ def basic_check_snapshot_dict(): "post-hook": [], "pre-hook": [], "quoting": {}, + "snapshot_meta_column_names": {}, "tags": [], "target_database": "some_snapshot_db", "target_schema": "some_snapshot_schema", @@ -1677,11 +1637,12 @@ def basic_check_snapshot_dict(): "meta": {}, "grants": {}, "docs": {"show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, "packages": [], + "lookback": 0, }, "docs": {"show": True}, - "contract": {"enforced": False}, + "contract": {"enforced": False, "alias_types": True}, "columns": {}, "meta": {}, "checksum": { @@ -1738,64 +1699,14 @@ def basic_check_snapshot_object(): ) -@pytest.fixture -def basic_intermediate_check_snapshot_object(): - cfg = EmptySnapshotConfig() - cfg._extra.update( - { - "unique_key": "id", - "strategy": "check", - "check_cols": "all", - "target_database": "some_snapshot_db", - "target_schema": "some_snapshot_schema", - } - ) - - return IntermediateSnapshotNode( - package_name="test", - path="/root/x/path.sql", - original_file_path="/root/path.sql", - language="sql", - raw_code="select * from wherever", - name="foo", - resource_type=NodeType.Snapshot, - unique_id="model.test.foo", - fqn=["test", "models", "foo"], - refs=[], - sources=[], - metrics=[], - depends_on=DependsOn(), - description="", - database="test_db", - schema="test_schema", - alias="bar", - tags=[], - config=cfg, - checksum=FileHash.from_contents(""), - created_at=1.0, - unrendered_config={ - "target_database": "some_snapshot_db", - "target_schema": "some_snapshot_schema", - "unique_key": "id", - "strategy": "check", - "check_cols": "all", - }, - ) - - def test_timestamp_snapshot_ok( basic_timestamp_snapshot_dict, basic_timestamp_snapshot_object, - basic_intermediate_timestamp_snapshot_object, ): node_dict = basic_timestamp_snapshot_dict node = basic_timestamp_snapshot_object - inter = basic_intermediate_timestamp_snapshot_object assert_symmetric(node, node_dict, SnapshotNode) - # node_from_dict = SnapshotNode.from_dict(inter.to_dict(omit_none=True)) - # node_from_dict.created_at = 1 - assert SnapshotNode.from_dict(inter.to_dict(omit_none=True)) == node assert node.is_refable is True assert node.is_ephemeral is False pickle.loads(pickle.dumps(node)) @@ -1804,14 +1715,11 @@ def test_timestamp_snapshot_ok( def test_check_snapshot_ok( basic_check_snapshot_dict, basic_check_snapshot_object, - basic_intermediate_check_snapshot_object, ): node_dict = basic_check_snapshot_dict node = basic_check_snapshot_object - inter = basic_intermediate_check_snapshot_object assert_symmetric(node, node_dict, SnapshotNode) - assert SnapshotNode.from_dict(inter.to_dict(omit_none=True)) == node assert node.is_refable is True assert node.is_ephemeral is False pickle.loads(pickle.dumps(node)) @@ -1961,30 +1869,6 @@ def basic_parsed_source_definition_dict(): } -@pytest.fixture -def basic_parsed_source_definition_object(): - return SourceDefinition( - columns={}, - database="some_db", - description="", - fqn=["test", "source", "my_source", "my_source_table"], - identifier="my_source_table", - loader="stitch", - name="my_source_table", - original_file_path="/root/models/sources.yml", - package_name="test", - path="/root/models/sources.yml", - quoting=Quoting(), - resource_type=NodeType.Source, - schema="some_schema", - source_description="my source description", - source_name="my_source", - unique_id="test.source.my_source.my_source_table", - tags=[], - config=SourceConfig(), - ) - - @pytest.fixture def complex_parsed_source_definition_dict(): return { @@ -2052,13 +1936,13 @@ def test_basic_source_definition( node_dict = basic_parsed_source_definition_dict minimum = minimum_parsed_source_definition_dict - assert_symmetric(node, node_dict, SourceDefinition) + assert_symmetric(node.to_resource(), node_dict, SourceDefinitionResource) assert node.is_ephemeral is False assert node.is_refable is False assert node.has_freshness is False - assert_from_dict(node, minimum, SourceDefinition) + assert_from_dict(node.to_resource(), minimum, SourceDefinitionResource) pickle.loads(pickle.dumps(node)) @@ -2079,7 +1963,7 @@ def test_complex_source_definition( ): node = complex_parsed_source_definition_object node_dict = complex_parsed_source_definition_dict - assert_symmetric(node, node_dict, SourceDefinition) + assert_symmetric(node.to_resource(), node_dict, SourceDefinitionResource) assert node.is_ephemeral is False assert node.is_refable is False @@ -2088,14 +1972,6 @@ def test_complex_source_definition( pickle.loads(pickle.dumps(node)) -def test_source_no_loaded_at(complex_parsed_source_definition_object): - node = complex_parsed_source_definition_object - assert node.has_freshness is True - # no loaded_at_field -> does not have freshness - node.loaded_at_field = None - assert node.has_freshness is False - - def test_source_no_freshness(complex_parsed_source_definition_object): node = complex_parsed_source_definition_object assert node.has_freshness is True @@ -2104,27 +1980,30 @@ def test_source_no_freshness(complex_parsed_source_definition_object): unchanged_source_definitions = [ - lambda u: (u, u.replace(tags=["mytag"])), - lambda u: (u, u.replace(meta={"a": 1000})), + lambda u: (u, replace(u, tags=["mytag"])), + lambda u: (u, replace(u, meta={"a": 1000})), ] changed_source_definitions = [ lambda u: ( u, - u.replace( + replace( + u, freshness=FreshnessThreshold(warn_after=Time(period=TimePeriod.hour, count=1)), loaded_at_field="loaded_at", ), ), - lambda u: (u, u.replace(loaded_at_field="loaded_at")), + lambda u: (u, replace(u, loaded_at_field="loaded_at")), lambda u: ( u, - u.replace(freshness=FreshnessThreshold(error_after=Time(period=TimePeriod.hour, count=1))), + replace( + u, freshness=FreshnessThreshold(error_after=Time(period=TimePeriod.hour, count=1)) + ), ), - lambda u: (u, u.replace(quoting=Quoting(identifier=True))), - lambda u: (u, u.replace(database="other_database")), - lambda u: (u, u.replace(schema="other_schema")), - lambda u: (u, u.replace(identifier="identifier")), + lambda u: (u, replace(u, quoting=Quoting(identifier=True))), + lambda u: (u, replace(u, database="other_database")), + lambda u: (u, replace(u, schema="other_schema")), + lambda u: (u, replace(u, identifier="identifier")), ] @@ -2289,13 +2168,13 @@ def test_complex_parsed_exposure(complex_parsed_exposure_dict, complex_parsed_ex changed_parsed_exposures = [ - lambda u: (u, u.replace(fqn=u.fqn[:-1] + ["something", u.fqn[-1]])), - lambda u: (u, u.replace(type=ExposureType.ML)), - lambda u: (u, u.replace(owner=u.owner.replace(name="My Name"))), - lambda u: (u, u.replace(maturity=MaturityType.Medium)), - lambda u: (u, u.replace(url="https://example.com/dashboard/1")), - lambda u: (u, u.replace(description="My description")), - lambda u: (u, u.replace(depends_on=DependsOn(nodes=["model.test.blah"]))), + lambda u: (u, replace(u, fqn=u.fqn[:-1] + ["something", u.fqn[-1]])), + lambda u: (u, replace(u, type=ExposureType.ML)), + lambda u: (u, replace(u, owner=replace(u.owner, name="My Name"))), + lambda u: (u, replace(u, maturity=MaturityType.Medium)), + lambda u: (u, replace(u, url="https://example.com/dashboard/1")), + lambda u: (u, replace(u, description="My description")), + lambda u: (u, replace(u, depends_on=DependsOn(nodes=["model.test.blah"]))), ] @@ -2377,3 +2256,18 @@ def basic_parsed_metric_object(): meta={}, tags=[], ) + + +@given( + builds( + SemanticModel, + depends_on=builds(DependsOn), + dimensions=lists(builds(Dimension)), + entities=lists(builds(Entity)), + measures=lists(builds(Measure)), + refs=lists(builds(RefArgs)), + ) +) +def test_semantic_model_symmetry(semantic_model: SemanticModel): + assert semantic_model == SemanticModel.from_dict(semantic_model.to_dict()) + assert semantic_model == pickle.loads(pickle.dumps(semantic_model)) diff --git a/tests/unit/contracts/graph/test_semantic_manifest.py b/tests/unit/contracts/graph/test_semantic_manifest.py new file mode 100644 index 00000000000..cf3121dc9b0 --- /dev/null +++ b/tests/unit/contracts/graph/test_semantic_manifest.py @@ -0,0 +1,29 @@ +import pytest + +from dbt.contracts.graph.semantic_manifest import SemanticManifest + + +# Overwrite the default nods to construct the manifest +@pytest.fixture +def nodes(metricflow_time_spine_model): + return [metricflow_time_spine_model] + + +@pytest.fixture +def semantic_models( + semantic_model, +) -> list: + return [semantic_model] + + +@pytest.fixture +def metrics( + metric, +) -> list: + return [metric] + + +class TestSemanticManifest: + def test_validate(self, manifest): + sm_manifest = SemanticManifest(manifest) + assert sm_manifest.validate() diff --git a/tests/unit/test_contracts_graph_unparsed.py b/tests/unit/contracts/graph/test_unparsed.py similarity index 89% rename from tests/unit/test_contracts_graph_unparsed.py rename to tests/unit/contracts/graph/test_unparsed.py index ad9f6d74922..90fa2fcf8a7 100644 --- a/tests/unit/test_contracts_graph_unparsed.py +++ b/tests/unit/contracts/graph/test_unparsed.py @@ -1,33 +1,40 @@ import pickle from datetime import timedelta -from dbt.contracts.graph.unparsed import ( - UnparsedNode, - UnparsedRunHook, - UnparsedMacro, - Time, - TimePeriod, +import pytest + +from dbt.artifacts.resources import ( + ExposureType, FreshnessThreshold, + MaturityType, + Owner, Quoting, - UnparsedSourceDefinition, - UnparsedSourceTableDefinition, - UnparsedDocumentationFile, - UnparsedColumn, - UnparsedNodeUpdate, - UnparsedModelUpdate, + Time, +) +from dbt.artifacts.resources.types import TimePeriod +from dbt.artifacts.schemas.results import FreshnessStatus +from dbt.contracts.graph.unparsed import ( Docs, + HasColumnTests, + UnparsedColumn, + UnparsedDocumentationFile, UnparsedExposure, - MaturityType, - Owner, - ExposureType, + UnparsedMacro, UnparsedMetric, - UnparsedMetricTypeParams, UnparsedMetricInputMeasure, + UnparsedMetricTypeParams, + UnparsedModelUpdate, + UnparsedNode, + UnparsedNodeUpdate, + UnparsedRunHook, + UnparsedSourceDefinition, + UnparsedSourceTableDefinition, UnparsedVersion, ) -from dbt.contracts.results import FreshnessStatus +from dbt.exceptions import ParsingError from dbt.node_types import NodeType -from .utils import ContractTestCase +from dbt.parser.schemas import ParserRef +from tests.unit.utils import ContractTestCase class TestUnparsedMacro(ContractTestCase): @@ -131,18 +138,6 @@ def test_empty(self): self.assert_fails_validation(node_dict, cls=UnparsedRunHook) self.assert_fails_validation(node_dict, cls=UnparsedMacro) - def test_bad_type(self): - node_dict = { - "name": "foo", - "resource_type": NodeType.Source, # not valid! - "path": "/root/x/path.sql", - "original_file_path": "/root/path.sql", - "package_name": "test", - "language": "sql", - "raw_code": 'select * from {{ ref("thing") }}', - } - self.assert_fails_validation(node_dict) - class TestUnparsedRunHook(ContractTestCase): ContractType = UnparsedRunHook @@ -329,6 +324,7 @@ def test_table_defaults(self): "description": "", "config": {}, "docs": {"show": True}, + "data_tests": [], "tests": [], "columns": [], "constraints": [], @@ -342,6 +338,7 @@ def test_table_defaults(self): "description": "table 2", "config": {}, "docs": {"show": True}, + "data_tests": [], "tests": [], "columns": [], "constraints": [], @@ -415,6 +412,7 @@ def test_defaults(self): "columns": [], "description": "", "docs": {"show": True}, + "data_tests": [], "tests": [], "meta": {}, "config": {}, @@ -430,7 +428,7 @@ def test_contents(self): original_file_path="/some/fake/path", package_name="test", description="a description", - tests=["table_test"], + data_tests=["table_test"], meta={"key": ["value1", "value2"]}, columns=[ UnparsedColumn( @@ -441,7 +439,7 @@ def test_contents(self): UnparsedColumn( name="y", description="y description", - tests=["unique", {"accepted_values": {"values": ["blue", "green"]}}], + data_tests=["unique", {"accepted_values": {"values": ["blue", "green"]}}], meta={}, tags=["a", "b"], ), @@ -454,7 +452,8 @@ def test_contents(self): "original_file_path": "/some/fake/path", "package_name": "test", "description": "a description", - "tests": ["table_test"], + "data_tests": ["table_test"], + "tests": [], "meta": {"key": ["value1", "value2"]}, "constraints": [], "columns": [ @@ -462,6 +461,7 @@ def test_contents(self): "name": "x", "description": "x description", "docs": {"show": True}, + "data_tests": [], "tests": [], "meta": {"key2": "value3"}, "tags": [], @@ -471,7 +471,8 @@ def test_contents(self): "name": "y", "description": "y description", "docs": {"show": True}, - "tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], + "data_tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], + "tests": [], "meta": {}, "tags": ["a", "b"], "constraints": [], @@ -490,13 +491,15 @@ def test_bad_test_type(self): "original_file_path": "/some/fake/path", "package_name": "test", "description": "a description", - "tests": ["table_test"], + "data_tests": ["table_test"], + "tests": [], "meta": {"key": ["value1", "value2"]}, "columns": [ { "name": "x", "description": "x description", "docs": {"show": True}, + "data_tests": [], "tests": [], "meta": {"key2": "value3"}, }, @@ -504,7 +507,8 @@ def test_bad_test_type(self): "name": "y", "description": "y description", "docs": {"show": True}, - "tests": [100, {"accepted_values": {"values": ["blue", "green"]}}], + "data_tests": [100, {"accepted_values": {"values": ["blue", "green"]}}], + "tests": [], "meta": {}, "yaml_key": "models", "original_file_path": "/some/fake/path", @@ -520,13 +524,15 @@ def test_bad_test_type(self): "original_file_path": "/some/fake/path", "package_name": "test", "description": "a description", - "tests": ["table_test"], + "data_tests": ["table_test"], + "tests": [], "meta": {"key": ["value1", "value2"]}, "columns": [ # column missing a name { "description": "x description", "docs": {"show": True}, + "data_tests": [], "tests": [], "meta": {"key2": "value3"}, }, @@ -534,7 +540,8 @@ def test_bad_test_type(self): "name": "y", "description": "y description", "docs": {"show": True}, - "tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], + "data_tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], + "tests": [], "meta": {}, "yaml_key": "models", "original_file_path": "/some/fake/path", @@ -550,13 +557,15 @@ def test_bad_test_type(self): "original_file_path": "/some/fake/path", "package_name": "test", "description": "a description", - "tests": ["table_test"], + "data_tests": ["table_test"], + "tests": [], "meta": {"key": ["value1", "value2"]}, "columns": [ { "name": "x", "description": "x description", "docs": {"show": True}, + "data_tests": [], "tests": [], "meta": {"key2": "value3"}, }, @@ -564,7 +573,8 @@ def test_bad_test_type(self): "name": "y", "description": "y description", "docs": {"show": True}, - "tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], + "data_tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], + "tests": [], "meta": {}, "yaml_key": "models", "original_file_path": "/some/fake/path", @@ -599,6 +609,7 @@ def test_defaults(self): "columns": [], "description": "", "docs": {"show": True}, + "data_tests": [], "tests": [], "meta": {}, "config": {}, @@ -615,7 +626,7 @@ def test_contents(self): original_file_path="/some/fake/path", package_name="test", description="a description", - tests=["table_test"], + data_tests=["table_test"], meta={"key": ["value1", "value2"]}, columns=[ UnparsedColumn( @@ -626,7 +637,7 @@ def test_contents(self): UnparsedColumn( name="y", description="y description", - tests=["unique", {"accepted_values": {"values": ["blue", "green"]}}], + data_tests=["unique", {"accepted_values": {"values": ["blue", "green"]}}], meta={}, tags=["a", "b"], ), @@ -640,7 +651,8 @@ def test_contents(self): "original_file_path": "/some/fake/path", "package_name": "test", "description": "a description", - "tests": ["table_test"], + "data_tests": ["table_test"], + "tests": [], "meta": {"key": ["value1", "value2"]}, "constraints": [], "versions": [ @@ -658,6 +670,7 @@ def test_contents(self): "name": "x", "description": "x description", "docs": {"show": True}, + "data_tests": [], "tests": [], "meta": {"key2": "value3"}, "tags": [], @@ -667,7 +680,8 @@ def test_contents(self): "name": "y", "description": "y description", "docs": {"show": True}, - "tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], + "data_tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], + "tests": [], "meta": {}, "tags": ["a", "b"], "constraints": [], @@ -686,13 +700,15 @@ def test_bad_test_type(self): "original_file_path": "/some/fake/path", "package_name": "test", "description": "a description", - "tests": ["table_test"], + "data_tests": ["table_test"], + "tests": [], "meta": {"key": ["value1", "value2"]}, "columns": [ { "name": "x", "description": "x description", "docs": {"show": True}, + "data_tests": [], "tests": [], "meta": {"key2": "value3"}, }, @@ -700,7 +716,8 @@ def test_bad_test_type(self): "name": "y", "description": "y description", "docs": {"show": True}, - "tests": [100, {"accepted_values": {"values": ["blue", "green"]}}], + "data_tests": [100, {"accepted_values": {"values": ["blue", "green"]}}], + "tests": [], "meta": {}, "yaml_key": "models", "original_file_path": "/some/fake/path", @@ -716,13 +733,15 @@ def test_bad_test_type(self): "original_file_path": "/some/fake/path", "package_name": "test", "description": "a description", - "tests": ["table_test"], + "data_tests": ["table_test"], + "tests": [], "meta": {"key": ["value1", "value2"]}, "columns": [ # column missing a name { "description": "x description", "docs": {"show": True}, + "data_tests": [], "tests": [], "meta": {"key2": "value3"}, }, @@ -730,7 +749,8 @@ def test_bad_test_type(self): "name": "y", "description": "y description", "docs": {"show": True}, - "tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], + "data_tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], + "tests": [], "meta": {}, "yaml_key": "models", "original_file_path": "/some/fake/path", @@ -746,13 +766,15 @@ def test_bad_test_type(self): "original_file_path": "/some/fake/path", "package_name": "test", "description": "a description", - "tests": ["table_test"], + "data_tests": ["table_test"], + "tests": [], "meta": {"key": ["value1", "value2"]}, "columns": [ { "name": "x", "description": "x description", "docs": {"show": True}, + "data_tests": [], "tests": [], "meta": {"key2": "value3"}, }, @@ -760,7 +782,8 @@ def test_bad_test_type(self): "name": "y", "description": "y description", "docs": {"show": True}, - "tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], + "data_tests": ["unique", {"accepted_values": {"values": ["blue", "green"]}}], + "tests": [], "meta": {}, "yaml_key": "models", "original_file_path": "/some/fake/path", @@ -869,6 +892,7 @@ def get_ok_dict(self): "measure": { "name": "customers", "filter": "is_new = true", + "join_to_timespine": False, }, }, "config": {}, @@ -917,7 +941,7 @@ def get_ok_dict(self): "config": {}, "constraints": [], "docs": {"show": False}, - "tests": [], + "data_tests": [], "columns": [], } @@ -929,7 +953,7 @@ def test_ok(self): config={}, constraints=[], docs=Docs(show=False), - tests=[], + data_tests=[], columns=[], ) dct = self.get_ok_dict() @@ -940,3 +964,34 @@ def test_bad_version_no_v(self): version = self.get_ok_dict() del version["v"] self.assert_fails_validation(version) + + +@pytest.mark.parametrize( + "left,right,expected_lt", + [ + # same types + (2, 12, True), + (12, 2, False), + ("a", "b", True), + ("b", "a", False), + # mismatched types - numeric + (2, 12.0, True), + (12.0, 2, False), + (2, "12", True), + ("12", 2, False), + # mismatched types + (1, "test", True), + ("test", 1, False), + ], +) +def test_unparsed_version_lt(left, right, expected_lt): + assert (UnparsedVersion(left) < UnparsedVersion(right)) == expected_lt + + +def test_column_parse(): + unparsed_col = HasColumnTests( + columns=[UnparsedColumn(name="TestCol", constraints=[{"type": "!INVALID!"}])] + ) + + with pytest.raises(ParsingError): + ParserRef.from_target(unparsed_col) diff --git a/tests/unit/test_contracts_project.py b/tests/unit/contracts/test_project.py similarity index 89% rename from tests/unit/test_contracts_project.py rename to tests/unit/contracts/test_project.py index 01f07c8d4e4..37e57a33c12 100644 --- a/tests/unit/test_contracts_project.py +++ b/tests/unit/contracts/test_project.py @@ -1,8 +1,6 @@ -from .utils import ContractTestCase - -from dbt.dataclass_schema import ValidationError - from dbt.contracts.project import Project +from dbt_common.dataclass_schema import ValidationError +from tests.unit.utils import ContractTestCase class TestProject(ContractTestCase): diff --git a/tests/unit/deps/__init__.py b/tests/unit/deps/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/unit/test_deps.py b/tests/unit/deps/test_deps.py similarity index 75% rename from tests/unit/test_deps.py rename to tests/unit/deps/test_deps.py index 5728cb703b6..339bbbc5d23 100644 --- a/tests/unit/test_deps.py +++ b/tests/unit/deps/test_deps.py @@ -1,27 +1,31 @@ -from copy import deepcopy - import unittest +from argparse import Namespace +from copy import deepcopy from unittest import mock import dbt.deps import dbt.exceptions -from dbt.deps.git import GitUnpinnedPackage -from dbt.deps.local import LocalUnpinnedPackage -from dbt.deps.tarball import TarballUnpinnedPackage -from dbt.deps.registry import RegistryUnpinnedPackage from dbt.clients.registry import is_compatible_version -from dbt.deps.resolver import resolve_packages +from dbt.config.project import PartialProject +from dbt.config.renderer import DbtProjectYamlRenderer from dbt.contracts.project import ( - LocalPackage, - TarballPackage, GitPackage, + LocalPackage, + PackageConfig, RegistryPackage, + TarballPackage, ) -from dbt.contracts.project import PackageConfig -from dbt.semver import VersionSpecifier +from dbt.deps.git import GitUnpinnedPackage +from dbt.deps.local import LocalPinnedPackage, LocalUnpinnedPackage +from dbt.deps.registry import RegistryUnpinnedPackage +from dbt.deps.resolver import resolve_packages +from dbt.deps.tarball import TarballUnpinnedPackage +from dbt.flags import set_from_args from dbt.version import get_installed_version +from dbt_common.dataclass_schema import ValidationError +from dbt_common.semver import VersionSpecifier -from dbt.dataclass_schema import ValidationError +set_from_args(Namespace(WARN_ERROR=False), None) class TestLocalPackage(unittest.TestCase): @@ -36,40 +40,140 @@ def test_init(self): class TestTarballPackage(unittest.TestCase): - def test_TarballPackage(self): - from dbt.contracts.project import RegistryPackageMetadata - from mashumaro.exceptions import MissingField + class MockMetadata: + name = "mock_metadata_name" + + @mock.patch("dbt.config.project.PartialProject.from_project_root") + @mock.patch("os.listdir") + @mock.patch("dbt.deps.tarball.get_downloads_path") + @mock.patch("dbt_common.clients.system.untar_package") + @mock.patch("dbt_common.clients.system.download") + def test_fetch_metadata( + self, + mock_download, + mock_untar_package, + mock_get_downloads_path, + mock_listdir, + mock_from_project_root, + ): + mock_listdir.return_value = ["one_directory/"] + mock_get_downloads_path.return_value = "downloads_path" + mock_from_project_root.return_value = object() + mock_from_project_root.return_value + dict_well_formed_contract = { + "tarball": "http://example.com/invalid_url@/package.tar.gz", + "name": "my_package", + } - dict_well_formed_contract = {"tarball": "http://example.com", "name": "my_cool_package"} + a_contract = TarballPackage.from_dict(dict_well_formed_contract) + a = TarballUnpinnedPackage.from_contract(a_contract) + + a_pinned = a.resolved() + with mock.patch.object(PartialProject, "from_project_root", return_value=PartialProject): + with mock.patch.object( + PartialProject, "render_package_metadata", return_value=self.MockMetadata + ): + metadata = a_pinned.fetch_metadata("", DbtProjectYamlRenderer()) + + assert metadata == self.MockMetadata + mock_download.assert_called_once_with( + "http://example.com/invalid_url@/package.tar.gz", "downloads_path/my_package" + ) + mock_untar_package.assert_called_once_with( + "downloads_path/my_package", "downloads_path/my_package_untarred", "my_package" + ) + + @mock.patch("dbt.config.project.PartialProject.from_project_root") + @mock.patch("os.listdir") + @mock.patch("dbt.deps.tarball.get_downloads_path") + @mock.patch("dbt_common.clients.system.untar_package") + @mock.patch("dbt_common.clients.system.download") + def test_fetch_metadata_fails_on_incorrect_tar_folder_structure( + self, + mock_download, + mock_untar_package, + mock_get_downloads_path, + mock_listdir, + mock_from_project_root, + ): + mock_listdir.return_value = ["one_directory/", "another_directory/"] + + mock_get_downloads_path.return_value = "downloads_path" + mock_from_project_root.return_value = object() + mock_from_project_root.return_value + dict_well_formed_contract = { + "tarball": "http://example.com/invalid_url@/package.tar.gz", + "name": "my_package", + } + a_contract = TarballPackage.from_dict(dict_well_formed_contract) + a = TarballUnpinnedPackage.from_contract(a_contract) + + a_pinned = a.resolved() + with mock.patch.object(PartialProject, "from_project_root", return_value=PartialProject): + with mock.patch.object( + PartialProject, "render_package_metadata", return_value=self.MockMetadata + ): + with self.assertRaises(dbt.exceptions.DependencyError): + a_pinned.fetch_metadata("", DbtProjectYamlRenderer()) + + @mock.patch("dbt.deps.tarball.get_downloads_path") + def test_tarball_package_contract(self, mock_get_downloads_path): + dict_well_formed_contract = { + "tarball": "http://example.com/invalid_url@/package.tar.gz", + "name": "my_cool_package", + } a_contract = TarballPackage.from_dict(dict_well_formed_contract) # check contract and resolver - self.assertEqual(a_contract.tarball, "http://example.com") + self.assertEqual(a_contract.tarball, "http://example.com/invalid_url@/package.tar.gz") self.assertEqual(a_contract.name, "my_cool_package") a = TarballUnpinnedPackage.from_contract(a_contract) - self.assertEqual(a.tarball, "http://example.com") + self.assertEqual(a.tarball, "http://example.com/invalid_url@/package.tar.gz") self.assertEqual(a.package, "my_cool_package") a_pinned = a.resolved() self.assertEqual(a_pinned.source_type(), "tarball") - # check bad contract (no name) fails - dict_missing_name_should_fail_on_contract = {"tarball": "http://example.com"} + a_pinned_dict = a_pinned.to_dict() + self.assertEqual( + a_pinned_dict, + { + "tarball": "http://example.com/invalid_url@/package.tar.gz", + "name": "my_cool_package", + }, + ) - with self.assertRaises(MissingField): - TarballPackage.from_dict(dict_missing_name_should_fail_on_contract) + @mock.patch("dbt.deps.tarball.get_downloads_path") + def test_tarball_pinned_package_contract_with_unrendered(self, mock_get_downloads_path): + contract = TarballPackage( + tarball="http://example.com/invalid_url@/package.tar.gz", + name="my_cool_package", + unrendered={"tarball": "tarball_unrendered"}, + ) + tarball_unpinned_package = TarballUnpinnedPackage.from_contract(contract) - # check RegistryPackageMetadata - it is used in TarballUnpinnedPackage - dct = { - "name": a.package, - "packages": [], # note: required by RegistryPackageMetadata - "downloads": {"tarball": a_pinned.tarball}, - } + self.assertEqual( + tarball_unpinned_package.tarball, "http://example.com/invalid_url@/package.tar.gz" + ) + self.assertEqual(tarball_unpinned_package.package, "my_cool_package") + self.assertEqual(tarball_unpinned_package.tarball_unrendered, "tarball_unrendered") + + tarball_pinned_package = tarball_unpinned_package.resolved() + tarball_unpinned_package_dict = tarball_pinned_package.to_dict() + self.assertEqual( + tarball_unpinned_package_dict, + {"tarball": "tarball_unrendered", "name": "my_cool_package"}, + ) + + def test_tarball_package_contract_fails_on_no_name(self): + from mashumaro.exceptions import MissingField - metastore = RegistryPackageMetadata.from_dict(dct) - self.assertEqual(metastore.downloads.tarball, "http://example.com") + # check bad contract (no name) fails + a_contract = {"tarball": "http://example.com"} + with self.assertRaises(MissingField): + TarballPackage.from_dict(a_contract) class TestGitPackage(unittest.TestCase): @@ -92,6 +196,38 @@ def test_init(self): self.assertEqual(a_pinned.source_type(), "git") self.assertIs(a_pinned.warn_unpinned, True) + a_pinned_dict = a_pinned.to_dict() + self.assertEqual(a_pinned_dict, {"git": "http://example.com", "revision": "0.0.1"}) + + def test_init_with_unrendered(self): + contract = GitPackage( + git="http://example.com", revision="0.0.1", unrendered={"git": "git_unrendered"} + ) + + git_unpinned_package = GitUnpinnedPackage.from_contract(contract) + self.assertEqual(git_unpinned_package.git, "http://example.com") + self.assertEqual(git_unpinned_package.revisions, ["0.0.1"]) + self.assertIs(git_unpinned_package.git_unrendered, "git_unrendered") + + git_pinned_package = git_unpinned_package.resolved() + git_pinned_package_dict = git_pinned_package.to_dict() + self.assertEqual(git_pinned_package_dict, {"git": "git_unrendered", "revision": "0.0.1"}) + + @mock.patch("shutil.copytree") + @mock.patch("dbt.deps.local.system.make_symlink") + @mock.patch("dbt.deps.local.LocalPinnedPackage.get_installation_path") + @mock.patch("dbt.deps.local.LocalPinnedPackage.resolve_path") + def test_deps_install( + self, mock_resolve_path, mock_get_installation_path, mock_symlink, mock_shutil + ): + mock_resolve_path.return_value = "/tmp/source" + mock_get_installation_path.return_value = "/tmp/dest" + mock_symlink.side_effect = OSError("Install deps symlink error") + + LocalPinnedPackage("local").install("dummy", "dummy") + self.assertEqual(mock_shutil.call_count, 1) + mock_shutil.assert_called_once_with("/tmp/source", "/tmp/dest") + def test_invalid(self): with self.assertRaises(ValidationError): GitPackage.validate( @@ -105,11 +241,17 @@ def test_resolve_ok(self): b_contract = GitPackage.from_dict( {"git": "http://example.com", "revision": "0.0.1", "warn-unpinned": False}, ) + d_contract = GitPackage.from_dict( + {"git": "http://example.com", "revision": "0.0.1", "subdirectory": "foo-bar"}, + ) a = GitUnpinnedPackage.from_contract(a_contract) b = GitUnpinnedPackage.from_contract(b_contract) + c = a.incorporate(b) + d = GitUnpinnedPackage.from_contract(d_contract) + self.assertTrue(a.warn_unpinned) self.assertFalse(b.warn_unpinned) - c = a.incorporate(b) + self.assertTrue(d.warn_unpinned) c_pinned = c.resolved() self.assertEqual(c_pinned.name, "http://example.com") @@ -117,6 +259,12 @@ def test_resolve_ok(self): self.assertEqual(c_pinned.source_type(), "git") self.assertFalse(c_pinned.warn_unpinned) + d_pinned = d.resolved() + self.assertEqual(d_pinned.name, "http://example.com/foo-bar") + self.assertEqual(d_pinned.get_version(), "0.0.1") + self.assertEqual(d_pinned.source_type(), "git") + self.assertEqual(d_pinned.subdirectory, "foo-bar") + def test_resolve_fail(self): a_contract = GitPackage.from_dict( {"git": "http://example.com", "revision": "0.0.1"}, @@ -643,6 +791,19 @@ def test_dependency_resolution(self): self.assertEqual(resolved[1].name, "dbt-labs-test/b") self.assertEqual(resolved[1].version, "0.2.1") + def test_private_package_raise_error(self): + package_config = PackageConfig.from_dict( + { + "packages": [ + {"private": "dbt-labs-test/a", "subdirectory": "foo-bar"}, + ], + } + ) + with self.assertRaisesRegex( + dbt.exceptions.DependencyError, "Cannot resolve private package" + ): + resolve_packages(package_config.packages, mock.MagicMock(project_name="test"), {}) + def test_dependency_resolution_allow_prerelease(self): package_config = PackageConfig.from_dict( { @@ -663,7 +824,6 @@ def test_dependency_resolution_allow_prerelease(self): self.assertEqual(resolved[0].version, "0.1.4a1") def test_validation_error_when_version_is_missing_from_package_config(self): - packages_data = {"packages": [{"package": "dbt-labs-test/b", "version": None}]} with self.assertRaises(ValidationError) as exc: @@ -673,7 +833,6 @@ def test_validation_error_when_version_is_missing_from_package_config(self): assert msg in str(exc.exception) def test_validation_error_when_namespace_is_missing_from_package_config(self): - packages_data = {"packages": [{"package": "dbt-labs", "version": "1.0.0"}]} with self.assertRaises(ValidationError) as exc: diff --git a/tests/unit/events/__init__.py b/tests/unit/events/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/unit/events/test_logging.py b/tests/unit/events/test_logging.py new file mode 100644 index 00000000000..b2077b64793 --- /dev/null +++ b/tests/unit/events/test_logging.py @@ -0,0 +1,38 @@ +from argparse import Namespace + +from pytest_mock import MockerFixture + +from dbt.events.logging import setup_event_logger +from dbt.flags import get_flags, set_from_args +from dbt_common.events.base_types import BaseEvent +from dbt_common.events.event_manager_client import get_event_manager +from dbt_common.events.logger import LoggerConfig +from tests.utils import EventCatcher + + +class TestSetupEventLogger: + def test_clears_preexisting_event_manager_state(self) -> None: + manager = get_event_manager() + manager.add_logger(LoggerConfig(name="test_logger")) + manager.callbacks.append(EventCatcher(BaseEvent).catch) + assert len(manager.loggers) == 1 + assert len(manager.callbacks) == 1 + + args = Namespace(log_level="none", log_level_file="none") + set_from_args(args, {}) + + setup_event_logger(get_flags()) + assert len(manager.loggers) == 0 + assert len(manager.callbacks) == 1 # snowplow tracker for behavior flags + + def test_specify_max_bytes( + self, + mocker: MockerFixture, + ) -> None: + patched_file_handler = mocker.patch("dbt_common.events.logger.RotatingFileHandler") + args = Namespace(log_file_max_bytes=1234567) + set_from_args(args, {}) + setup_event_logger(get_flags()) + patched_file_handler.assert_called_once_with( + filename="logs/dbt.log", encoding="utf8", maxBytes=1234567, backupCount=5 + ) diff --git a/tests/unit/test_proto_events.py b/tests/unit/events/test_types.py similarity index 91% rename from tests/unit/test_proto_events.py rename to tests/unit/events/test_types.py index e8524e11740..51fdf8a2024 100644 --- a/tests/unit/test_proto_events.py +++ b/tests/unit/events/test_types.py @@ -1,22 +1,23 @@ +from google.protobuf.json_format import MessageToDict + +from dbt.adapters.events.types import PluginLoadError, RollbackFailed +from dbt.events import core_types_pb2 from dbt.events.types import ( - MainReportVersion, - MainReportArgs, - RollbackFailed, - MainEncounteredError, - PluginLoadError, LogStartLine, LogTestResult, + MainEncounteredError, + MainReportArgs, + MainReportVersion, ) -from dbt.events.functions import msg_to_dict, msg_to_json, LOG_VERSION, reset_metadata_vars -from dbt.events import types_pb2 -from dbt.events.base_types import msg_from_base_event, EventLevel from dbt.version import installed -from google.protobuf.json_format import MessageToDict -from dbt.flags import set_from_args -from argparse import Namespace - -set_from_args(Namespace(WARN_ERROR=False), None) - +from dbt_common.events import types_pb2 +from dbt_common.events.base_types import EventLevel, msg_from_base_event +from dbt_common.events.functions import ( + LOG_VERSION, + msg_to_dict, + msg_to_json, + reset_metadata_vars, +) info_keys = { "name", @@ -52,7 +53,7 @@ def test_events(): generic_msg.ParseFromString(serialized) assert generic_msg.info.code == "A001" # get the message class for the real message from the generic message - message_class = getattr(types_pb2, f"{generic_msg.info.name}Msg") + message_class = getattr(core_types_pb2, f"{generic_msg.info.name}Msg") new_msg = message_class() new_msg.ParseFromString(serialized) assert new_msg.info.code == msg.info.code @@ -169,7 +170,7 @@ def test_extra_dict_on_event(monkeypatch): generic_msg.ParseFromString(serialized) assert generic_msg.info.code == "A001" # get the message class for the real message from the generic message - message_class = getattr(types_pb2, f"{generic_msg.info.name}Msg") + message_class = getattr(core_types_pb2, f"{generic_msg.info.name}Msg") new_msg = message_class() new_msg.ParseFromString(serialized) new_msg_dict = MessageToDict(new_msg) diff --git a/tests/unit/fixtures.py b/tests/unit/fixtures.py new file mode 100644 index 00000000000..cfcc1636b18 --- /dev/null +++ b/tests/unit/fixtures.py @@ -0,0 +1,79 @@ +from dbt.artifacts.resources import Contract, TestConfig, TestMetadata +from dbt.contracts.files import FileHash +from dbt.contracts.graph.nodes import ( + DependsOn, + GenericTestNode, + InjectedCTE, + ModelConfig, + ModelNode, +) +from dbt.node_types import NodeType + + +def model_node(): + return ModelNode( + package_name="test", + path="/root/models/foo.sql", + original_file_path="models/foo.sql", + language="sql", + raw_code='select * from {{ ref("other") }}', + name="foo", + resource_type=NodeType.Model, + unique_id="model.test.foo", + fqn=["test", "models", "foo"], + refs=[], + sources=[], + metrics=[], + depends_on=DependsOn(), + description="", + primary_key=[], + database="test_db", + schema="test_schema", + alias="bar", + tags=[], + config=ModelConfig(), + contract=Contract(), + meta={}, + compiled=True, + extra_ctes=[InjectedCTE("whatever", "select * from other")], + extra_ctes_injected=True, + compiled_code="with whatever as (select * from other) select * from whatever", + checksum=FileHash.from_contents(""), + unrendered_config={}, + ) + + +def generic_test_node(): + return GenericTestNode( + package_name="test", + path="/root/x/path.sql", + original_file_path="/root/path.sql", + language="sql", + raw_code='select * from {{ ref("other") }}', + name="foo", + resource_type=NodeType.Test, + unique_id="model.test.foo", + fqn=["test", "models", "foo"], + refs=[], + sources=[], + metrics=[], + depends_on=DependsOn(), + description="", + database="test_db", + schema="dbt_test__audit", + alias="bar", + tags=[], + config=TestConfig(severity="warn"), + contract=Contract(), + meta={}, + compiled=True, + extra_ctes=[InjectedCTE("whatever", "select * from other")], + extra_ctes_injected=True, + compiled_code="with whatever as (select * from other) select * from whatever", + column_name="id", + test_metadata=TestMetadata(namespace=None, name="foo", kwargs={}), + checksum=FileHash.from_contents(""), + unrendered_config={ + "severity": "warn", + }, + ) diff --git a/tests/unit/graph/__init__.py b/tests/unit/graph/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/unit/test_graph_selector_parsing.py b/tests/unit/graph/test_cli.py similarity index 99% rename from tests/unit/test_graph_selector_parsing.py rename to tests/unit/graph/test_cli.py index f330412ffc8..62cabab14f6 100644 --- a/tests/unit/test_graph_selector_parsing.py +++ b/tests/unit/graph/test_cli.py @@ -1,15 +1,16 @@ -from dbt.graph import ( - cli, - SelectionUnion, - SelectionIntersection, - SelectionDifference, - SelectionCriteria, -) -from dbt.graph.selector_methods import MethodName import textwrap + import yaml from dbt.contracts.selection import SelectorFile +from dbt.graph import ( + SelectionCriteria, + SelectionDifference, + SelectionIntersection, + SelectionUnion, + cli, +) +from dbt.graph.selector_methods import MethodName def parse_file(txt: str) -> SelectorFile: diff --git a/tests/unit/graph/test_graph.py b/tests/unit/graph/test_graph.py new file mode 100644 index 00000000000..252ff0473d0 --- /dev/null +++ b/tests/unit/graph/test_graph.py @@ -0,0 +1,164 @@ +import pytest + +from dbt.compilation import Linker +from dbt.contracts.graph.manifest import Manifest +from dbt.contracts.graph.nodes import ModelNode +from dbt.graph.graph import Graph +from tests.unit.utils.manifest import make_model + + +class TestGraph: + @pytest.fixture + def extra_parent_model(self) -> ModelNode: + return make_model(pkg="pkg", name="extra_parent_model", code="SELECT 'cats' as interests") + + @pytest.fixture + def non_shared_child_of_extra(self, extra_parent_model: ModelNode) -> ModelNode: + return make_model( + pkg="pkg", + name="non_shared_child_of_extra", + code='SELECT * FROM {{ ref("extra_parent_model") }}', + refs=[extra_parent_model], + ) + + @pytest.fixture + def model_with_two_direct_parents( + self, extra_parent_model: ModelNode, ephemeral_model: ModelNode + ) -> ModelNode: + return make_model( + pkg="pkg", + name="model_with_two_direct_parents", + code='SELECT * FROM {{ ref("ephemeral_model") }} UNION ALL SELECT * FROM {{ ref("extra_parent_model") }}', + refs=[extra_parent_model, ephemeral_model], + ) + + @pytest.fixture(autouse=True) + def local_manifest_extensions( + self, + manifest: Manifest, + model_with_two_direct_parents: ModelNode, + non_shared_child_of_extra: ModelNode, + extra_parent_model: ModelNode, + ) -> Manifest: + manifest.add_node_nofile(extra_parent_model) + manifest.add_node_nofile(non_shared_child_of_extra) + manifest.add_node_nofile(model_with_two_direct_parents) + + @pytest.fixture + def graph(self, manifest: Manifest, local_manifest_extensions) -> Graph: + # We include the `local_manifest_extensions` in the arguments to ensure + # that fixture adds our extra node sbefore creating the graph + linker = Linker() + linker.link_graph(manifest=manifest) + return Graph(graph=linker.graph) + + def test_nodes(self, graph: Graph, manifest: Manifest): + graph_nodes = graph.nodes() + all_manifest_nodes = [] + for resources in manifest.get_resource_fqns().values(): + all_manifest_nodes.extend(list(resources)) + + # Assert that it is a set, thus no duplicates + assert isinstance(graph_nodes, set) + assert len(graph_nodes) == len(all_manifest_nodes) + + def test_descendantcs(self, graph: Graph, manifest: Manifest) -> None: + model: ModelNode = manifest.nodes["model.pkg.ephemeral_model"] + + # check result when not limiting the depth + descendants = graph.descendants(node=model.unique_id) + assert descendants == { + "model.pkg.model_with_two_direct_parents", + "test.pkg.view_test_nothing", + "model.pkg.view_model", + "model.pkg.table_model", + } + + # check that result excludes nodes that are out of depth + descendants = graph.descendants(node=model.unique_id, max_depth=1) + assert descendants == { + "model.pkg.model_with_two_direct_parents", + "model.pkg.table_model", + "model.pkg.view_model", + } + + def test_ancestors(self, graph: Graph, manifest: Manifest) -> None: + model: ModelNode = manifest.nodes["model.pkg.table_model"] + + # check result when not limiting the depth + ancestors = graph.ancestors(node=model.unique_id) + + assert ancestors == { + "model.pkg.ephemeral_model", + "source.pkg.raw.seed", + } + + # check that result excludes nodes that are out of depth + ancestors = graph.ancestors(node=model.unique_id, max_depth=1) + assert ancestors == {"model.pkg.ephemeral_model"} + + @pytest.mark.skip(reason="I haven't figured out how to add edge types to nodes") + def test_exclude_edge_type(self) -> None: + # I though something like the following would produce + # linker = Linker() + # linker.link_graph(manifest=manifest) + # linker.add_test_edges(manifest=manifest) + # graph = Graph(graph=linker.graph) + pass + + def test_select_childrens_parents( + self, + graph: Graph, + model_with_two_direct_parents: ModelNode, + extra_parent_model: ModelNode, + ephemeral_model: ModelNode, + ) -> None: + # `select_childrens_parents` should return + # * all children of the selected node (without depth limit) + # * all parents of the children of the selected node (without depth limit) + childrens_parents = graph.select_childrens_parents( + selected={ + extra_parent_model.unique_id, + } + ) + + assert model_with_two_direct_parents.unique_id in childrens_parents + assert extra_parent_model.unique_id in childrens_parents + assert ephemeral_model.unique_id in childrens_parents + assert len(childrens_parents) == 5 + + def test_select_children( + self, + graph: Graph, + ephemeral_model: ModelNode, + extra_parent_model: ModelNode, + ) -> None: + ephemerals_children = graph.select_children(selected={ephemeral_model.unique_id}) + extras_children = graph.select_children(selected={extra_parent_model.unique_id}) + joint_children = graph.select_children( + selected={extra_parent_model.unique_id, ephemeral_model.unique_id} + ) + + assert joint_children == ephemerals_children.union(extras_children) + # These additional assertions are because we intentionally setup the test such that + # neither nodes children set is a subset of the other + assert not ephemerals_children.issubset(extras_children) + assert not extras_children.issubset(ephemerals_children) + + def test_select_parents( + self, + graph: Graph, + non_shared_child_of_extra: ModelNode, + table_model: ModelNode, + ) -> None: + non_shareds_parents = graph.select_parents(selected={non_shared_child_of_extra.unique_id}) + tables_parents = graph.select_parents(selected={table_model.unique_id}) + joint_parents = graph.select_parents( + selected={table_model.unique_id, non_shared_child_of_extra.unique_id} + ) + + assert joint_parents == tables_parents.union(non_shareds_parents) + # These additional assertions are because we intentionally setup the test such that + # neither nodes parents set is a subset of the other + assert not non_shareds_parents.issubset(tables_parents) + assert not tables_parents.issubset(non_shareds_parents) diff --git a/tests/unit/graph/test_nodes.py b/tests/unit/graph/test_nodes.py new file mode 100644 index 00000000000..79522d06427 --- /dev/null +++ b/tests/unit/graph/test_nodes.py @@ -0,0 +1,393 @@ +from copy import deepcopy +from datetime import datetime +from typing import List + +import pytest +from freezegun import freeze_time + +from dbt.artifacts.resources import ( + Defaults, + Dimension, + Entity, + FileHash, + Measure, + TestMetadata, +) +from dbt.artifacts.resources.v1.semantic_model import NodeRelation +from dbt.contracts.graph.model_config import TestConfig +from dbt.contracts.graph.nodes import ColumnInfo, ModelNode, SemanticModel +from dbt.node_types import NodeType +from dbt_common.contracts.constraints import ( + ColumnLevelConstraint, + ConstraintType, + ModelLevelConstraint, +) +from dbt_semantic_interfaces.references import MeasureReference +from dbt_semantic_interfaces.type_enums import ( + AggregationType, + DimensionType, + EntityType, +) +from tests.unit.fixtures import generic_test_node, model_node + + +class TestModelNode: + @pytest.fixture(scope="class") + def default_model_node(self): + return ModelNode( + resource_type=NodeType.Model, + unique_id="model.test_package.test_name", + name="test_name", + package_name="test_package", + schema="test_schema", + alias="test_alias", + fqn=["models", "test_name"], + original_file_path="test_original_file_path", + checksum=FileHash.from_contents("checksum"), + path="test_path", + database=None, + ) + + @pytest.mark.parametrize( + "deprecation_date,current_date,expected_is_past_deprecation_date", + [ + (None, "2024-05-02", False), + ("2024-05-01", "2024-05-02", True), + ("2024-05-01", "2024-05-01", False), + ("2024-05-01", "2024-04-30", False), + ], + ) + def test_is_past_deprecation_date( + self, default_model_node, deprecation_date, current_date, expected_is_past_deprecation_date + ): + with freeze_time(current_date): + if deprecation_date is not None: + default_model_node.deprecation_date = datetime.strptime( + deprecation_date, "%Y-%m-%d" + ).astimezone() + + assert default_model_node.is_past_deprecation_date is expected_is_past_deprecation_date + + @pytest.mark.parametrize( + "model_constraints,columns,expected_all_constraints", + [ + ([], {}, []), + ( + [ModelLevelConstraint(type=ConstraintType.foreign_key)], + {}, + [ModelLevelConstraint(type=ConstraintType.foreign_key)], + ), + ( + [], + { + "id": ColumnInfo( + name="id", + constraints=[ColumnLevelConstraint(type=ConstraintType.foreign_key)], + ) + }, + [ColumnLevelConstraint(type=ConstraintType.foreign_key)], + ), + ( + [ModelLevelConstraint(type=ConstraintType.foreign_key)], + { + "id": ColumnInfo( + name="id", + constraints=[ColumnLevelConstraint(type=ConstraintType.foreign_key)], + ) + }, + [ + ModelLevelConstraint(type=ConstraintType.foreign_key), + ColumnLevelConstraint(type=ConstraintType.foreign_key), + ], + ), + ], + ) + def test_all_constraints( + self, default_model_node, model_constraints, columns, expected_all_constraints + ): + default_model_node.constraints = model_constraints + default_model_node.columns = columns + + assert default_model_node.all_constraints == expected_all_constraints + + +class TestSemanticModel: + @pytest.fixture(scope="function") + def dimensions(self) -> List[Dimension]: + return [Dimension(name="ds", type=DimensionType)] + + @pytest.fixture(scope="function") + def entities(self) -> List[Entity]: + return [Entity(name="test_entity", type=EntityType.PRIMARY, expr="id")] + + @pytest.fixture(scope="function") + def measures(self) -> List[Measure]: + return [Measure(name="test_measure", agg=AggregationType.COUNT, expr="id")] + + @pytest.fixture(scope="function") + def default_semantic_model( + self, dimensions: List[Dimension], entities: List[Entity], measures: List[Measure] + ) -> SemanticModel: + return SemanticModel( + name="test_semantic_model", + resource_type=NodeType.SemanticModel, + model="ref('test_model')", + package_name="test", + path="test_path", + original_file_path="test_fixture", + unique_id=f"{NodeType.SemanticModel}.test.test_semantic_model", + fqn=[], + defaults=Defaults(agg_time_dimension="ds"), + dimensions=dimensions, + entities=entities, + measures=measures, + node_relation=NodeRelation( + alias="test_alias", schema_name="test_schema", database="test_database" + ), + ) + + def test_checked_agg_time_dimension_for_measure_via_defaults( + self, + default_semantic_model: SemanticModel, + ): + assert default_semantic_model.defaults.agg_time_dimension is not None + measure = default_semantic_model.measures[0] + measure.agg_time_dimension = None + default_semantic_model.checked_agg_time_dimension_for_measure( + MeasureReference(element_name=measure.name) + ) + + def test_checked_agg_time_dimension_for_measure_via_measure( + self, default_semantic_model: SemanticModel + ): + default_semantic_model.defaults = None + measure = default_semantic_model.measures[0] + measure.agg_time_dimension = default_semantic_model.dimensions[0].name + default_semantic_model.checked_agg_time_dimension_for_measure( + MeasureReference(element_name=measure.name) + ) + + def test_checked_agg_time_dimension_for_measure_exception( + self, default_semantic_model: SemanticModel + ): + default_semantic_model.defaults = None + measure = default_semantic_model.measures[0] + measure.agg_time_dimension = None + + with pytest.raises(AssertionError) as execinfo: + default_semantic_model.checked_agg_time_dimension_for_measure( + MeasureReference(measure.name) + ) + + assert ( + f"Aggregation time dimension for measure {measure.name} on semantic model {default_semantic_model.name}" + in str(execinfo.value) + ) + + def test_semantic_model_same_contents(self, default_semantic_model: SemanticModel): + default_semantic_model_copy = deepcopy(default_semantic_model) + + assert default_semantic_model.same_contents(default_semantic_model_copy) + + def test_semantic_model_same_contents_update_model( + self, default_semantic_model: SemanticModel + ): + default_semantic_model_copy = deepcopy(default_semantic_model) + default_semantic_model_copy.model = "ref('test_another_model')" + + assert not default_semantic_model.same_contents(default_semantic_model_copy) + + def test_semantic_model_same_contents_different_node_relation( + self, + default_semantic_model: SemanticModel, + ): + default_semantic_model_copy = deepcopy(default_semantic_model) + default_semantic_model_copy.node_relation.alias = "test_another_alias" + # Relation should not be consided in same_contents + assert default_semantic_model.same_contents(default_semantic_model_copy) + + +# Infer primary key +def test_no_primary_key(): + model = model_node() + assert model.infer_primary_key([]) == [] + + +def test_primary_key_model_constraint(): + model = model_node() + model.constraints = [ModelLevelConstraint(type=ConstraintType.primary_key, columns=["pk"])] + assertSameContents(model.infer_primary_key([]), ["pk"]) + + model.constraints = [ + ModelLevelConstraint(type=ConstraintType.primary_key, columns=["pk1", "pk2"]) + ] + assertSameContents(model.infer_primary_key([]), ["pk1", "pk2"]) + + +def test_primary_key_column_constraint(): + model = model_node() + model.columns = { + "column1": ColumnInfo( + "column1", constraints=[ColumnLevelConstraint(type=ConstraintType.primary_key)] + ), + "column2": ColumnInfo("column2"), + } + assertSameContents(model.infer_primary_key([]), ["column1"]) + + +def test_unique_non_null_single(): + model = model_node() + test1 = generic_test_node() + test1.test_metadata = TestMetadata(name="unique", kwargs={"column_name": "column1"}) + test2 = generic_test_node() + test2.test_metadata = TestMetadata(name="not_null", kwargs={"column_name": "column1"}) + test3 = generic_test_node() + test3.test_metadata = TestMetadata(name="unique", kwargs={"column_name": "column2"}) + tests = [test1, test2] + assertSameContents(model.infer_primary_key(tests), ["column1"]) + + +def test_unique_non_null_multiple(): + model = model_node() + tests = [] + for i in range(2): + for enabled in [True, False]: + test1 = generic_test_node() + test1.test_metadata = TestMetadata( + name="unique", kwargs={"column_name": "column" + str(i) + str(enabled)} + ) + test1.config = TestConfig(enabled=enabled) + test2 = generic_test_node() + test2.test_metadata = TestMetadata( + name="not_null", kwargs={"column_name": "column" + str(i) + str(enabled)} + ) + test2.config = TestConfig(enabled=enabled) + tests.extend([test1, test2]) + + assertSameContents( + model.infer_primary_key(tests), + ["column0True", "column1True", "column0False", "column1False"], + ) + + +def test_enabled_unique_single(): + model = model_node() + test1 = generic_test_node() + test1.test_metadata = TestMetadata(name="unique", kwargs={"column_name": "column1"}) + test2 = generic_test_node() + test2.config = TestConfig(enabled=False) + test2.test_metadata = TestMetadata(name="unique", kwargs={"column_name": "column3"}) + + tests = [test1, test2] + assertSameContents(model.infer_primary_key(tests), ["column1"]) + + +def test_enabled_unique_multiple(): + model = model_node() + test1 = generic_test_node() + test1.test_metadata = TestMetadata(name="unique", kwargs={"column_name": "column1"}) + test2 = generic_test_node() + test2.test_metadata = TestMetadata(name="unique", kwargs={"column_name": "column2 || column3"}) + + tests = [test1, test2] + assertSameContents(model.infer_primary_key(tests), ["column1", "column2 || column3"]) + + +def test_enabled_unique_combo_single(): + model = model_node() + test1 = generic_test_node() + test1.test_metadata = TestMetadata( + name="unique_combination_of_columns", + kwargs={"combination_of_columns": ["column1", "column2"]}, + ) + test2 = generic_test_node() + test2.config = TestConfig(enabled=False) + test2.test_metadata = TestMetadata( + name="unique_combination_of_columns", + kwargs={"combination_of_columns": ["column3", "column4"]}, + ) + + tests = [test1, test2] + assertSameContents(model.infer_primary_key(tests), ["column1", "column2"]) + + +def test_enabled_unique_combo_multiple(): + model = model_node() + test1 = generic_test_node() + test1.test_metadata = TestMetadata( + name="unique", kwargs={"combination_of_columns": ["column1", "column2"]} + ) + test2 = generic_test_node() + test2.test_metadata = TestMetadata( + name="unique", kwargs={"combination_of_columns": ["column3", "column4"]} + ) + + tests = [test1, test2] + assertSameContents( + model.infer_primary_key(tests), ["column1", "column2", "column3", "column4"] + ) + + +def test_disabled_unique_single(): + model = model_node() + test1 = generic_test_node() + test1.config = TestConfig(enabled=False) + test1.test_metadata = TestMetadata(name="unique", kwargs={"column_name": "column1"}) + test2 = generic_test_node() + test2.test_metadata = TestMetadata(name="not_null", kwargs={"column_name": "column2"}) + + tests = [test1, test2] + assertSameContents(model.infer_primary_key(tests), ["column1"]) + + +def test_disabled_unique_multiple(): + model = model_node() + test1 = generic_test_node() + test1.config = TestConfig(enabled=False) + test1.test_metadata = TestMetadata(name="unique", kwargs={"column_name": "column1"}) + test2 = generic_test_node() + test2.config = TestConfig(enabled=False) + test2.test_metadata = TestMetadata(name="unique", kwargs={"column_name": "column2 || column3"}) + + tests = [test1, test2] + assertSameContents(model.infer_primary_key(tests), ["column1", "column2 || column3"]) + + +def test_disabled_unique_combo_single(): + model = model_node() + test1 = generic_test_node() + test1.config = TestConfig(enabled=False) + test1.test_metadata = TestMetadata( + name="unique", kwargs={"combination_of_columns": ["column1", "column2"]} + ) + test2 = generic_test_node() + test2.config = TestConfig(enabled=False) + test2.test_metadata = TestMetadata( + name="random", kwargs={"combination_of_columns": ["column3", "column4"]} + ) + + tests = [test1, test2] + assertSameContents(model.infer_primary_key(tests), ["column1", "column2"]) + + +def test_disabled_unique_combo_multiple(): + model = model_node() + test1 = generic_test_node() + test1.config = TestConfig(enabled=False) + test1.test_metadata = TestMetadata( + name="unique", kwargs={"combination_of_columns": ["column1", "column2"]} + ) + test2 = generic_test_node() + test2.config = TestConfig(enabled=False) + test2.test_metadata = TestMetadata( + name="unique", kwargs={"combination_of_columns": ["column3", "column4"]} + ) + + tests = [test1, test2] + assertSameContents( + model.infer_primary_key(tests), ["column1", "column2", "column3", "column4"] + ) + + +def assertSameContents(list1, list2): + assert sorted(list1) == sorted(list2) diff --git a/tests/unit/graph/test_queue.py b/tests/unit/graph/test_queue.py new file mode 100644 index 00000000000..50671d03fb2 --- /dev/null +++ b/tests/unit/graph/test_queue.py @@ -0,0 +1,47 @@ +import networkx as nx +import pytest + +from dbt.contracts.graph.manifest import Manifest +from dbt.graph.queue import GraphQueue +from tests.unit.utils import MockNode, make_manifest + + +class TestGraphQueue: + @pytest.fixture(scope="class") + def manifest(self) -> Manifest: + return make_manifest( + nodes=[ + MockNode(package="test_package", name="upstream_model"), + MockNode(package="test_package", name="downstream_model"), + ] + ) + + @pytest.fixture(scope="class") + def graph(self) -> nx.DiGraph: + graph = nx.DiGraph() + graph.add_edge("model.test_package.upstream_model", "model.test_package.downstream_model") + return graph + + def test_init_graph_queue(self, manifest, graph): + graph_queue = GraphQueue(graph=graph, manifest=manifest, selected={}) + + assert graph_queue.manifest == manifest + assert graph_queue.graph == graph + assert graph_queue.inner.queue == [(0, "model.test_package.upstream_model")] + assert graph_queue.in_progress == set() + assert graph_queue.queued == {"model.test_package.upstream_model"} + assert graph_queue.lock + + def test_init_graph_queue_preserve_edges_false(self, manifest, graph): + graph_queue = GraphQueue(graph=graph, manifest=manifest, selected={}, preserve_edges=False) + + # when preserve_edges is set to false, dependencies between nodes are no longer tracked in the priority queue + assert list(graph_queue.graph.edges) == [] + assert graph_queue.inner.queue == [ + (0, "model.test_package.downstream_model"), + (0, "model.test_package.upstream_model"), + ] + assert graph_queue.queued == { + "model.test_package.upstream_model", + "model.test_package.downstream_model", + } diff --git a/tests/unit/graph/test_selector.py b/tests/unit/graph/test_selector.py new file mode 100644 index 00000000000..3da1ccd4226 --- /dev/null +++ b/tests/unit/graph/test_selector.py @@ -0,0 +1,325 @@ +import string +from argparse import Namespace +from queue import Empty +from typing import List +from unittest.mock import MagicMock + +import networkx as nx +import pytest + +import dbt.compilation +import dbt.config +import dbt.exceptions +import dbt.graph.cli as graph_cli +import dbt.graph.selector as graph_selector +import dbt.parser +import dbt.parser.manifest +import dbt.utils +import dbt_common.exceptions +from dbt.config.runtime import RuntimeConfig +from dbt.flags import set_from_args +from dbt.graph import NodeSelector, parse_difference +from dbt.node_types import NodeType +from tests.unit.utils.manifest import make_manifest, make_model + +set_from_args(Namespace(WARN_ERROR=False), None) + + +def _get_graph(): + integer_graph = nx.balanced_tree(2, 2, nx.DiGraph()) + + package_mapping = { + i: "m." + ("X" if i % 2 == 0 else "Y") + "." + letter + for (i, letter) in enumerate(string.ascii_lowercase) + } + + # Edges: [(X.a, Y.b), (X.a, X.c), (Y.b, Y.d), (Y.b, X.e), (X.c, Y.f), (X.c, X.g)] + return graph_selector.Graph(nx.relabel_nodes(integer_graph, package_mapping)) + + +def _get_manifest(graph): + nodes = {} + for unique_id in graph: + fqn = unique_id.split(".") + node = MagicMock( + unique_id=unique_id, + fqn=fqn, + package_name=fqn[0], + tags=[], + resource_type=NodeType.Model, + empty=False, + config=MagicMock(enabled=True), + is_versioned=False, + ) + nodes[unique_id] = node + + nodes["m.X.a"].tags = ["abc"] + nodes["m.Y.b"].tags = ["abc", "bcef"] + nodes["m.X.c"].tags = ["abc", "bcef"] + nodes["m.Y.d"].tags = [] + nodes["m.X.e"].tags = ["efg", "bcef"] + nodes["m.Y.f"].tags = ["efg", "bcef"] + nodes["m.X.g"].tags = ["efg"] + return MagicMock(nodes=nodes) + + +@pytest.fixture +def graph(): + return _get_graph() + + +@pytest.fixture +def mock_manifest_with_mock_graph(graph): + return _get_manifest(graph) + + +def id_macro(arg): + if isinstance(arg, str): + return arg + try: + return "_".join(arg) + except TypeError: + return arg + + +run_specs = [ + # include by fqn + (["X.a"], [], {"m.X.a"}), + # include by tag + (["tag:abc"], [], {"m.X.a", "m.Y.b", "m.X.c"}), + # exclude by tag + (["*"], ["tag:abc"], {"m.Y.d", "m.X.e", "m.Y.f", "m.X.g"}), + # tag + fqn + (["tag:abc", "a"], [], {"m.X.a", "m.Y.b", "m.X.c"}), + (["tag:abc", "d"], [], {"m.X.a", "m.Y.b", "m.X.c", "m.Y.d"}), + # multiple node selection across packages + (["X.a", "b"], [], {"m.X.a", "m.Y.b"}), + (["X.a+"], ["b"], {"m.X.a", "m.X.c", "m.Y.d", "m.X.e", "m.Y.f", "m.X.g"}), + # children + (["X.c+"], [], {"m.X.c", "m.Y.f", "m.X.g"}), + (["X.a+1"], [], {"m.X.a", "m.Y.b", "m.X.c"}), + (["X.a+"], ["tag:efg"], {"m.X.a", "m.Y.b", "m.X.c", "m.Y.d"}), + # parents + (["+Y.f"], [], {"m.X.c", "m.Y.f", "m.X.a"}), + (["1+Y.f"], [], {"m.X.c", "m.Y.f"}), + # childrens parents + (["@X.c"], [], {"m.X.a", "m.X.c", "m.Y.f", "m.X.g"}), + # multiple selection/exclusion + (["tag:abc", "tag:bcef"], [], {"m.X.a", "m.Y.b", "m.X.c", "m.X.e", "m.Y.f"}), + (["tag:abc", "tag:bcef"], ["tag:efg"], {"m.X.a", "m.Y.b", "m.X.c"}), + (["tag:abc", "tag:bcef"], ["tag:efg", "a"], {"m.Y.b", "m.X.c"}), + # intersections + (["a,a"], [], {"m.X.a"}), + (["+c,c+"], [], {"m.X.c"}), + (["a,b"], [], set()), + (["tag:abc,tag:bcef"], [], {"m.Y.b", "m.X.c"}), + (["*,tag:abc,a"], [], {"m.X.a"}), + (["a,tag:abc,*"], [], {"m.X.a"}), + (["tag:abc,tag:bcef"], ["c"], {"m.Y.b"}), + (["tag:bcef,tag:efg"], ["tag:bcef,@b"], {"m.Y.f"}), + (["tag:bcef,tag:efg"], ["tag:bcef,@a"], set()), + (["*,@a,+b"], ["*,tag:abc,tag:bcef"], {"m.X.a"}), + (["tag:bcef,tag:efg", "*,tag:abc"], [], {"m.X.a", "m.Y.b", "m.X.c", "m.X.e", "m.Y.f"}), + (["tag:bcef,tag:efg", "*,tag:abc"], ["e"], {"m.X.a", "m.Y.b", "m.X.c", "m.Y.f"}), + (["tag:bcef,tag:efg", "*,tag:abc"], ["e"], {"m.X.a", "m.Y.b", "m.X.c", "m.Y.f"}), + (["tag:bcef,tag:efg", "*,tag:abc"], ["e", "f"], {"m.X.a", "m.Y.b", "m.X.c"}), + (["tag:bcef,tag:efg", "*,tag:abc"], ["tag:abc,tag:bcef"], {"m.X.a", "m.X.e", "m.Y.f"}), + (["tag:bcef,tag:efg", "*,tag:abc"], ["tag:abc,tag:bcef", "tag:abc,a"], {"m.X.e", "m.Y.f"}), +] + + +@pytest.mark.parametrize("include,exclude,expected", run_specs, ids=id_macro) +def test_run_specs(include, exclude, expected, graph, mock_manifest_with_mock_graph): + selector = graph_selector.NodeSelector(graph, mock_manifest_with_mock_graph) + spec = graph_cli.parse_difference(include, exclude) + selected, _ = selector.select_nodes(spec) + + assert selected == expected + + +param_specs = [ + ("a", False, None, False, None, "fqn", "a", False), + ("+a", True, None, False, None, "fqn", "a", False), + ("256+a", True, 256, False, None, "fqn", "a", False), + ("a+", False, None, True, None, "fqn", "a", False), + ("a+256", False, None, True, 256, "fqn", "a", False), + ("+a+", True, None, True, None, "fqn", "a", False), + ("16+a+32", True, 16, True, 32, "fqn", "a", False), + ("@a", False, None, False, None, "fqn", "a", True), + ("a.b", False, None, False, None, "fqn", "a.b", False), + ("+a.b", True, None, False, None, "fqn", "a.b", False), + ("256+a.b", True, 256, False, None, "fqn", "a.b", False), + ("a.b+", False, None, True, None, "fqn", "a.b", False), + ("a.b+256", False, None, True, 256, "fqn", "a.b", False), + ("+a.b+", True, None, True, None, "fqn", "a.b", False), + ("16+a.b+32", True, 16, True, 32, "fqn", "a.b", False), + ("@a.b", False, None, False, None, "fqn", "a.b", True), + ("a.b.*", False, None, False, None, "fqn", "a.b.*", False), + ("+a.b.*", True, None, False, None, "fqn", "a.b.*", False), + ("256+a.b.*", True, 256, False, None, "fqn", "a.b.*", False), + ("a.b.*+", False, None, True, None, "fqn", "a.b.*", False), + ("a.b.*+256", False, None, True, 256, "fqn", "a.b.*", False), + ("+a.b.*+", True, None, True, None, "fqn", "a.b.*", False), + ("16+a.b.*+32", True, 16, True, 32, "fqn", "a.b.*", False), + ("@a.b.*", False, None, False, None, "fqn", "a.b.*", True), + ("tag:a", False, None, False, None, "tag", "a", False), + ("+tag:a", True, None, False, None, "tag", "a", False), + ("256+tag:a", True, 256, False, None, "tag", "a", False), + ("tag:a+", False, None, True, None, "tag", "a", False), + ("tag:a+256", False, None, True, 256, "tag", "a", False), + ("+tag:a+", True, None, True, None, "tag", "a", False), + ("16+tag:a+32", True, 16, True, 32, "tag", "a", False), + ("@tag:a", False, None, False, None, "tag", "a", True), + ("source:a", False, None, False, None, "source", "a", False), + ("source:a+", False, None, True, None, "source", "a", False), + ("source:a+1", False, None, True, 1, "source", "a", False), + ("source:a+32", False, None, True, 32, "source", "a", False), + ("@source:a", False, None, False, None, "source", "a", True), +] + + +@pytest.mark.parametrize( + "spec,parents,parents_depth,children,children_depth,filter_type,filter_value,childrens_parents", + param_specs, + ids=id_macro, +) +def test_parse_specs( + spec, + parents, + parents_depth, + children, + children_depth, + filter_type, + filter_value, + childrens_parents, +): + parsed = graph_selector.SelectionCriteria.from_single_spec(spec) + assert parsed.parents == parents + assert parsed.parents_depth == parents_depth + assert parsed.children == children + assert parsed.children_depth == children_depth + assert parsed.method == filter_type + assert parsed.value == filter_value + assert parsed.childrens_parents == childrens_parents + + +invalid_specs = [ + "@a+", + "@a.b+", + "@a.b*+", + "@tag:a+", + "@source:a+", +] + + +@pytest.mark.parametrize("invalid", invalid_specs, ids=lambda k: str(k)) +def test_invalid_specs(invalid): + with pytest.raises(dbt_common.exceptions.DbtRuntimeError): + graph_selector.SelectionCriteria.from_single_spec(invalid) + + +class TestCompiler: + def test_single_model(self, runtime_config: RuntimeConfig): + model = make_model(pkg="pkg", name="model_one", code="SELECT * FROM events") + manifest = make_manifest(nodes=[model]) + + compiler = dbt.compilation.Compiler(config=runtime_config) + linker = compiler.compile(manifest) + + assert linker.nodes() == {model.unique_id} + assert linker.edges() == set() + + def test_two_models_simple_ref(self, runtime_config: RuntimeConfig): + model_one = make_model(pkg="pkg", name="model_one", code="SELECT * FROM events") + model_two = make_model( + pkg="pkg", + name="model_two", + code="SELECT * FROM {{ref('model_one')}}", + refs=[model_one], + ) + models = [model_one, model_two] + manifest = make_manifest(nodes=models) + + compiler = dbt.compilation.Compiler(config=runtime_config) + linker = compiler.compile(manifest) + + expected_nodes: List[str] = [model.unique_id for model in models] + assert linker.nodes() == set(expected_nodes) + assert list(linker.edges()) == [tuple(expected_nodes)] + + +class TestNodeSelector: + def test_dependency_list(self, runtime_config: RuntimeConfig): + model_one = make_model(pkg="pkg", name="model_one", code="SELECT * FROM events") + model_two = make_model( + pkg="pkg", + name="model_two", + code="SELECT * FROM {{ref('model_one')}}", + refs=[model_one], + ) + model_three = make_model( + pkg="pkg", + name="model_three", + code=""" + SELECT * FROM {{ ref("model_1") }} + union all + SELECT * FROM {{ ref("model_2") }} + """, + refs=[model_one, model_two], + ) + model_four = make_model( + pkg="pkg", + name="model_four", + code="SELECT * FROM {{ref('model_three')}}", + refs=[model_three], + ) + models = [model_one, model_two, model_three, model_four] + manifest = make_manifest(nodes=models) + + # Get the graph + compiler = dbt.compilation.Compiler(runtime_config) + graph = compiler.compile(manifest) + + # Create the selector and get the queue + selector = NodeSelector(graph, manifest) + queue = selector.get_graph_queue( + parse_difference( + None, + None, + ) + ) + + for model in models: + assert not queue.empty() + got = queue.get(block=False) + assert got.unique_id == model.unique_id + with pytest.raises(Empty): + queue.get(block=False) + queue.mark_done(got.unique_id) + assert queue.empty() + + def test_select_downstream_of_empty_model(self, runtime_config: RuntimeConfig): + # empty model + model_one = make_model(pkg="other", name="model_one", code="") + # non-empty model + model_two = make_model( + pkg="pkg", + name="model_two", + code="""select * from {{ref('model_one')}}""", + refs=[model_one], + ) + models = [model_one, model_two] + manifest = make_manifest(nodes=models) + + # Get the graph + compiler = dbt.compilation.Compiler(runtime_config) + graph = compiler.compile(manifest) + + # Ensure that model_two is selected as downstream of model_one + selector = NodeSelector(graph, manifest) + spec = graph_selector.SelectionCriteria.from_single_spec("model_one+") + assert selector.get_selected(spec) == {"model.pkg.model_two"} + + # Ensure that --indirect-selection empty returns the same result + spec.indirect_selection = graph_selector.IndirectSelection.Empty + assert selector.get_selected(spec) == {"model.pkg.model_two"} diff --git a/tests/unit/test_graph_selector_methods.py b/tests/unit/graph/test_selector_methods.py similarity index 63% rename from tests/unit/test_graph_selector_methods.py rename to tests/unit/graph/test_selector_methods.py index 564e06d0189..04aebe052d1 100644 --- a/tests/unit/test_graph_selector_methods.py +++ b/tests/unit/graph/test_selector_methods.py @@ -1,795 +1,47 @@ import copy - -import pytest +from dataclasses import replace +from pathlib import Path from unittest import mock -from pathlib import Path +import pytest -from dbt.contracts.files import FileHash -from dbt.contracts.graph.nodes import ( - DependsOn, - MacroDependsOn, - NodeConfig, - Macro, - ModelNode, - Exposure, - Metric, - MetricTypeParams, - MetricInputMeasure, - Group, - SeedNode, - SingularTestNode, - GenericTestNode, - SourceDefinition, - TestConfig, - TestMetadata, - ColumnInfo, -) -from dbt.contracts.graph.manifest import Manifest, ManifestMetadata -from dbt.contracts.graph.unparsed import ExposureType, Owner +import dbt_common.exceptions +from dbt.artifacts.resources import ColumnInfo, FileHash +from dbt.contracts.graph.manifest import Manifest from dbt.contracts.state import PreviousState -from dbt.node_types import NodeType from dbt.graph.selector_methods import ( - MethodManager, - QualifiedNameSelectorMethod, - TagSelectorMethod, - GroupSelectorMethod, AccessSelectorMethod, - SourceSelectorMethod, - PathSelectorMethod, + ConfigSelectorMethod, + ExposureSelectorMethod, FileSelectorMethod, + GroupSelectorMethod, + MethodManager, + MetricSelectorMethod, PackageSelectorMethod, - ConfigSelectorMethod, + PathSelectorMethod, + QualifiedNameSelectorMethod, + SavedQuerySelectorMethod, + SemanticModelSelectorMethod, + SourceSelectorMethod, + StateSelectorMethod, + TagSelectorMethod, TestNameSelectorMethod, TestTypeSelectorMethod, - StateSelectorMethod, - ExposureSelectorMethod, - MetricSelectorMethod, + UnitTestSelectorMethod, VersionSelectorMethod, ) -import dbt.exceptions -import dbt.contracts.graph.nodes -from dbt_semantic_interfaces.type_enums import MetricType -from .utils import replace_config - - -def make_model( - pkg, - name, - sql, - refs=None, - sources=None, - tags=None, - path=None, - alias=None, - config_kwargs=None, - fqn_extras=None, - depends_on_macros=None, - version=None, - latest_version=None, - access=None, -): - if refs is None: - refs = [] - if sources is None: - sources = [] - if tags is None: - tags = [] - if path is None: - path = f"{name}.sql" - if alias is None: - alias = name - if config_kwargs is None: - config_kwargs = {} - if depends_on_macros is None: - depends_on_macros = [] - - if fqn_extras is None: - fqn_extras = [] - - fqn = [pkg] + fqn_extras + [name] - if version: - fqn.append(f"v{version}") - - depends_on_nodes = [] - source_values = [] - ref_values = [] - for ref in refs: - ref_values.append([ref.name]) - depends_on_nodes.append(ref.unique_id) - for src in sources: - source_values.append([src.source_name, src.name]) - depends_on_nodes.append(src.unique_id) - - return ModelNode( - language="sql", - raw_code=sql, - database="dbt", - schema="dbt_schema", - alias=alias, - name=name, - fqn=fqn, - unique_id=f"model.{pkg}.{name}" if not version else f"model.{pkg}.{name}.v{version}", - package_name=pkg, - path=path, - original_file_path=f"models/{path}", - config=NodeConfig(**config_kwargs), - tags=tags, - refs=ref_values, - sources=source_values, - depends_on=DependsOn( - nodes=depends_on_nodes, - macros=depends_on_macros, - ), - resource_type=NodeType.Model, - checksum=FileHash.from_contents(""), - version=version, - latest_version=latest_version, - access=access, - ) - - -def make_seed( - pkg, name, path=None, loader=None, alias=None, tags=None, fqn_extras=None, checksum=None -): - if alias is None: - alias = name - if tags is None: - tags = [] - if path is None: - path = f"{name}.csv" - - if fqn_extras is None: - fqn_extras = [] - - if checksum is None: - checksum = FileHash.from_contents("") - - fqn = [pkg] + fqn_extras + [name] - return SeedNode( - database="dbt", - schema="dbt_schema", - alias=alias, - name=name, - fqn=fqn, - unique_id=f"seed.{pkg}.{name}", - package_name=pkg, - path=path, - original_file_path=f"data/{path}", - tags=tags, - resource_type=NodeType.Seed, - checksum=FileHash.from_contents(""), - ) - - -def make_source( - pkg, source_name, table_name, path=None, loader=None, identifier=None, fqn_extras=None -): - if path is None: - path = "models/schema.yml" - if loader is None: - loader = "my_loader" - if identifier is None: - identifier = table_name - - if fqn_extras is None: - fqn_extras = [] - - fqn = [pkg] + fqn_extras + [source_name, table_name] - - return SourceDefinition( - fqn=fqn, - database="dbt", - schema="dbt_schema", - unique_id=f"source.{pkg}.{source_name}.{table_name}", - package_name=pkg, - path=path, - original_file_path=path, - name=table_name, - source_name=source_name, - loader="my_loader", - identifier=identifier, - resource_type=NodeType.Source, - loaded_at_field="loaded_at", - tags=[], - source_description="", - ) - - -def make_macro(pkg, name, macro_sql, path=None, depends_on_macros=None): - if path is None: - path = "macros/macros.sql" - - if depends_on_macros is None: - depends_on_macros = [] - - return Macro( - name=name, - macro_sql=macro_sql, - unique_id=f"macro.{pkg}.{name}", - package_name=pkg, - path=path, - original_file_path=path, - resource_type=NodeType.Macro, - depends_on=MacroDependsOn(macros=depends_on_macros), - ) - - -def make_unique_test(pkg, test_model, column_name, path=None, refs=None, sources=None, tags=None): - return make_schema_test(pkg, "unique", test_model, {}, column_name=column_name) - - -def make_not_null_test( - pkg, test_model, column_name, path=None, refs=None, sources=None, tags=None -): - return make_schema_test(pkg, "not_null", test_model, {}, column_name=column_name) - - -def make_schema_test( - pkg, - test_name, - test_model, - test_kwargs, - path=None, - refs=None, - sources=None, - tags=None, - column_name=None, -): - kwargs = test_kwargs.copy() - ref_values = [] - source_values = [] - # this doesn't really have to be correct - if isinstance(test_model, SourceDefinition): - kwargs["model"] = ( - "{{ source('" + test_model.source_name + "', '" + test_model.name + "') }}" - ) - source_values.append([test_model.source_name, test_model.name]) - else: - kwargs["model"] = "{{ ref('" + test_model.name + "')}}" - ref_values.append([test_model.name]) - if column_name is not None: - kwargs["column_name"] = column_name - - # whatever - args_name = test_model.search_name.replace(".", "_") - if column_name is not None: - args_name += "_" + column_name - node_name = f"{test_name}_{args_name}" - raw_code = ( - '{{ config(severity="ERROR") }}{{ test_' + test_name + "(**dbt_schema_test_kwargs) }}" - ) - name_parts = test_name.split(".") - - if len(name_parts) == 2: - namespace, test_name = name_parts - macro_depends = f"macro.{namespace}.test_{test_name}" - elif len(name_parts) == 1: - namespace = None - macro_depends = f"macro.dbt.test_{test_name}" - else: - assert False, f"invalid test name: {test_name}" - - if path is None: - path = "schema.yml" - if tags is None: - tags = ["schema"] - - if refs is None: - refs = [] - if sources is None: - sources = [] - - depends_on_nodes = [] - for ref in refs: - ref_values.append([ref.name]) - depends_on_nodes.append(ref.unique_id) - - for source in sources: - source_values.append([source.source_name, source.name]) - depends_on_nodes.append(source.unique_id) - - return GenericTestNode( - language="sql", - raw_code=raw_code, - test_metadata=TestMetadata( - namespace=namespace, - name=test_name, - kwargs=kwargs, - ), - database="dbt", - schema="dbt_postgres", - name=node_name, - alias=node_name, - fqn=["minimal", "schema_test", node_name], - unique_id=f"test.{pkg}.{node_name}", - package_name=pkg, - path=f"schema_test/{node_name}.sql", - original_file_path=f"models/{path}", - resource_type=NodeType.Test, - tags=tags, - refs=ref_values, - sources=[], - depends_on=DependsOn(macros=[macro_depends], nodes=depends_on_nodes), - column_name=column_name, - checksum=FileHash.from_contents(""), - ) - - -def make_data_test( - pkg, name, sql, refs=None, sources=None, tags=None, path=None, config_kwargs=None -): - - if refs is None: - refs = [] - if sources is None: - sources = [] - if tags is None: - tags = ["data"] - if path is None: - path = f"{name}.sql" - - if config_kwargs is None: - config_kwargs = {} - - fqn = ["minimal", "data_test", name] - - depends_on_nodes = [] - source_values = [] - ref_values = [] - for ref in refs: - ref_values.append([ref.name]) - depends_on_nodes.append(ref.unique_id) - for src in sources: - source_values.append([src.source_name, src.name]) - depends_on_nodes.append(src.unique_id) - - return SingularTestNode( - language="sql", - raw_code=sql, - database="dbt", - schema="dbt_schema", - name=name, - alias=name, - fqn=fqn, - unique_id=f"test.{pkg}.{name}", - package_name=pkg, - path=path, - original_file_path=f"tests/{path}", - config=TestConfig(**config_kwargs), - tags=tags, - refs=ref_values, - sources=source_values, - depends_on=DependsOn(nodes=depends_on_nodes, macros=[]), - resource_type=NodeType.Test, - checksum=FileHash.from_contents(""), - ) - - -def make_exposure(pkg, name, path=None, fqn_extras=None, owner=None): - if path is None: - path = "schema.yml" - - if fqn_extras is None: - fqn_extras = [] - - if owner is None: - owner = Owner(email="test@example.com") - - fqn = [pkg, "exposures"] + fqn_extras + [name] - return Exposure( - name=name, - resource_type=NodeType.Exposure, - type=ExposureType.Notebook, - fqn=fqn, - unique_id=f"exposure.{pkg}.{name}", - package_name=pkg, - path=path, - original_file_path=path, - owner=owner, - ) - - -def make_metric(pkg, name, path=None): - if path is None: - path = "schema.yml" - - return Metric( - name=name, - resource_type=NodeType.Metric, - path=path, - package_name=pkg, - original_file_path=path, - unique_id=f"metric.{pkg}.{name}", - fqn=[pkg, "metrics", name], - label="New Customers", - description="New customers", - type=MetricType.SIMPLE, - type_params=MetricTypeParams(measure=MetricInputMeasure(name="count_cats")), - meta={"is_okr": True}, - tags=["okrs"], - ) - - -def make_group(pkg, name, path=None): - if path is None: - path = "schema.yml" - - return Group( - name=name, - resource_type=NodeType.Group, - path=path, - package_name=pkg, - original_file_path=path, - unique_id=f"group.{pkg}.{name}", - owner="email@gmail.com", - ) - - -@pytest.fixture -def macro_test_unique(): - return make_macro( - "dbt", "test_unique", "blablabla", depends_on_macros=["macro.dbt.default__test_unique"] - ) - - -@pytest.fixture -def macro_default_test_unique(): - return make_macro("dbt", "default__test_unique", "blablabla") - - -@pytest.fixture -def macro_test_not_null(): - return make_macro( - "dbt", "test_not_null", "blablabla", depends_on_macros=["macro.dbt.default__test_not_null"] - ) - - -@pytest.fixture -def macro_default_test_not_null(): - return make_macro("dbt", "default__test_not_null", "blabla") - - -@pytest.fixture -def seed(): - return make_seed("pkg", "seed") - - -@pytest.fixture -def source(): - return make_source("pkg", "raw", "seed", identifier="seed") - - -@pytest.fixture -def ephemeral_model(source): - return make_model( - "pkg", - "ephemeral_model", - 'select * from {{ source("raw", "seed") }}', - config_kwargs={"materialized": "ephemeral"}, - sources=[source], - ) - - -@pytest.fixture -def view_model(ephemeral_model): - return make_model( - "pkg", - "view_model", - 'select * from {{ ref("ephemeral_model") }}', - config_kwargs={"materialized": "view"}, - refs=[ephemeral_model], - tags=["uses_ephemeral"], - ) - - -@pytest.fixture -def table_model(ephemeral_model): - return make_model( - "pkg", - "table_model", - 'select * from {{ ref("ephemeral_model") }}', - config_kwargs={ - "materialized": "table", - "meta": { - # Other properties to test in test_select_config_meta - "string_property": "some_string", - "truthy_bool_property": True, - "falsy_bool_property": False, - "list_property": ["some_value", True, False], - }, - }, - refs=[ephemeral_model], - tags=["uses_ephemeral"], - path="subdirectory/table_model.sql", - ) - - -@pytest.fixture -def table_model_py(seed): - return make_model( - "pkg", - "table_model_py", - 'select * from {{ ref("seed") }}', - config_kwargs={"materialized": "table"}, - refs=[seed], - tags=[], - path="subdirectory/table_model.py", - ) - - -@pytest.fixture -def table_model_csv(seed): - return make_model( - "pkg", - "table_model_csv", - 'select * from {{ ref("seed") }}', - config_kwargs={"materialized": "table"}, - refs=[seed], - tags=[], - path="subdirectory/table_model.csv", - ) - - -@pytest.fixture -def ext_source(): - return make_source( - "ext", - "ext_raw", - "ext_source", - ) - - -@pytest.fixture -def ext_source_2(): - return make_source( - "ext", - "ext_raw", - "ext_source_2", - ) - - -@pytest.fixture -def ext_source_other(): - return make_source( - "ext", - "raw", - "ext_source", - ) - - -@pytest.fixture -def ext_source_other_2(): - return make_source( - "ext", - "raw", - "ext_source_2", - ) - - -@pytest.fixture -def ext_model(ext_source): - return make_model( - "ext", - "ext_model", - 'select * from {{ source("ext_raw", "ext_source") }}', - sources=[ext_source], - ) - - -@pytest.fixture -def union_model(seed, ext_source): - return make_model( - "pkg", - "union_model", - 'select * from {{ ref("seed") }} union all select * from {{ source("ext_raw", "ext_source") }}', - config_kwargs={"materialized": "table"}, - refs=[seed], - sources=[ext_source], - fqn_extras=["unions"], - path="subdirectory/union_model.sql", - tags=["unions"], - ) - - -@pytest.fixture -def versioned_model_v1(seed): - return make_model( - "pkg", - "versioned_model", - 'select * from {{ ref("seed") }}', - config_kwargs={"materialized": "table"}, - refs=[seed], - sources=[], - path="subdirectory/versioned_model_v1.sql", - version=1, - latest_version=2, - ) - - -@pytest.fixture -def versioned_model_v2(seed): - return make_model( - "pkg", - "versioned_model", - 'select * from {{ ref("seed") }}', - config_kwargs={"materialized": "table"}, - refs=[seed], - sources=[], - path="subdirectory/versioned_model_v2.sql", - version=2, - latest_version=2, - ) - - -@pytest.fixture -def versioned_model_v3(seed): - return make_model( - "pkg", - "versioned_model", - 'select * from {{ ref("seed") }}', - config_kwargs={"materialized": "table"}, - refs=[seed], - sources=[], - path="subdirectory/versioned_model_v3.sql", - version="3", - latest_version=2, - ) - - -@pytest.fixture -def versioned_model_v4_nested_dir(seed): - return make_model( - "pkg", - "versioned_model", - 'select * from {{ ref("seed") }}', - config_kwargs={"materialized": "table"}, - refs=[seed], - sources=[], - path="subdirectory/nested_dir/versioned_model_v3.sql", - version="4", - latest_version=2, - fqn_extras=["nested_dir"], - ) - - -@pytest.fixture -def table_id_unique(table_model): - return make_unique_test("pkg", table_model, "id") - - -@pytest.fixture -def table_id_not_null(table_model): - return make_not_null_test("pkg", table_model, "id") - - -@pytest.fixture -def view_id_unique(view_model): - return make_unique_test("pkg", view_model, "id") - - -@pytest.fixture -def ext_source_id_unique(ext_source): - return make_unique_test("ext", ext_source, "id") - - -@pytest.fixture -def view_test_nothing(view_model): - return make_data_test( - "pkg", - "view_test_nothing", - 'select * from {{ ref("view_model") }} limit 0', - refs=[view_model], - ) - - -# Support dots as namespace separators -@pytest.fixture -def namespaced_seed(): - return make_seed("pkg", "mynamespace.seed") - - -@pytest.fixture -def namespace_model(source): - return make_model( - "pkg", - "mynamespace.ephemeral_model", - 'select * from {{ source("raw", "seed") }}', - config_kwargs={"materialized": "ephemeral"}, - sources=[source], - ) - - -@pytest.fixture -def namespaced_union_model(seed, ext_source): - return make_model( - "pkg", - "mynamespace.union_model", - 'select * from {{ ref("mynamespace.seed") }} union all select * from {{ ref("mynamespace.ephemeral_model") }}', - config_kwargs={"materialized": "table"}, - refs=[seed], - sources=[ext_source], - fqn_extras=["unions"], - path="subdirectory/union_model.sql", - tags=["unions"], - ) - - -@pytest.fixture -def manifest( - seed, - source, - ephemeral_model, - view_model, - table_model, - table_model_py, - table_model_csv, - ext_source, - ext_model, - union_model, - versioned_model_v1, - versioned_model_v2, - versioned_model_v3, - versioned_model_v4_nested_dir, - ext_source_2, - ext_source_other, - ext_source_other_2, - table_id_unique, - table_id_not_null, - view_id_unique, - ext_source_id_unique, - view_test_nothing, - namespaced_seed, - namespace_model, - namespaced_union_model, - macro_test_unique, - macro_default_test_unique, - macro_test_not_null, - macro_default_test_not_null, -): - nodes = [ - seed, - ephemeral_model, - view_model, - table_model, - table_model_py, - table_model_csv, - union_model, - versioned_model_v1, - versioned_model_v2, - versioned_model_v3, - versioned_model_v4_nested_dir, - ext_model, - table_id_unique, - table_id_not_null, - view_id_unique, - ext_source_id_unique, - view_test_nothing, - namespaced_seed, - namespace_model, - namespaced_union_model, - ] - sources = [source, ext_source, ext_source_2, ext_source_other, ext_source_other_2] - macros = [ - macro_test_unique, - macro_default_test_unique, - macro_test_not_null, - macro_default_test_not_null, - ] - manifest = Manifest( - nodes={n.unique_id: n for n in nodes}, - sources={s.unique_id: s for s in sources}, - macros={m.unique_id: m for m in macros}, - docs={}, - files={}, - exposures={}, - metrics={}, - disabled=[], - selectors={}, - groups={}, - metadata=ManifestMetadata(adapter_type="postgres"), - ) - return manifest +from tests.unit.utils import replace_config +from tests.unit.utils.manifest import ( + make_exposure, + make_group, + make_macro, + make_metric, + make_model, + make_saved_query, + make_seed, + make_semantic_model, + make_unit_test, +) def search_manifest_using_method(manifest, method, selection): @@ -797,7 +49,10 @@ def search_manifest_using_method(manifest, method, selection): set(manifest.nodes) | set(manifest.sources) | set(manifest.exposures) - | set(manifest.metrics), + | set(manifest.metrics) + | set(manifest.semantic_models) + | set(manifest.saved_queries) + | set(manifest.unit_tests), selection, ) results = {manifest.expect(uid).search_name for uid in selected} @@ -822,6 +77,7 @@ def test_select_fqn(manifest): "versioned_model.v2", "versioned_model.v3", "versioned_model.v4", + "versioned_model.v12", "table_model", "table_model_py", "table_model_csv", @@ -831,6 +87,7 @@ def test_select_fqn(manifest): "mynamespace.union_model", "mynamespace.ephemeral_model", "mynamespace.seed", + "unit_test_table_model", } assert search_manifest_using_method(manifest, method, "ext") == {"ext_model"} # versions @@ -839,6 +96,7 @@ def test_select_fqn(manifest): "versioned_model.v2", "versioned_model.v3", "versioned_model.v4", + "versioned_model.v12", } assert search_manifest_using_method(manifest, method, "versioned_model.v1") == { "versioned_model.v1" @@ -855,7 +113,9 @@ def test_select_fqn(manifest): assert search_manifest_using_method(manifest, method, "*.*.*_model") == { "mynamespace.union_model", "mynamespace.ephemeral_model", + "test_semantic_model", "union_model", + "unit_test_table_model", } # multiple wildcards assert search_manifest_using_method(manifest, method, "*unions*") == { @@ -869,6 +129,7 @@ def test_select_fqn(manifest): "table_model", "table_model_py", "table_model_csv", + "unit_test_table_model", } # wildcard and ? (matches exactly one character) assert search_manifest_using_method(manifest, method, "*ext_m?del") == {"ext_model"} @@ -909,9 +170,7 @@ def test_select_group(manifest, view_model): manifest.groups[group.unique_id] = group change_node( manifest, - view_model.replace( - config={"materialized": "view", "group": group_name}, - ), + replace(view_model, config={"materialized": "view", "group": group_name}), ) methods = MethodManager(manifest, None) method = methods.get_method("group", []) @@ -919,15 +178,14 @@ def test_select_group(manifest, view_model): assert method.arguments == [] assert search_manifest_using_method(manifest, method, group_name) == {"view_model"} + assert search_manifest_using_method(manifest, method, "my?group") == {"view_model"} assert not search_manifest_using_method(manifest, method, "not_my_group") def test_select_access(manifest, view_model): change_node( manifest, - view_model.replace( - access="public", - ), + replace(view_model, access="public"), ) methods = MethodManager(manifest, None) method = methods.get_method("access", []) @@ -1050,6 +308,7 @@ def test_select_package(manifest): "versioned_model.v2", "versioned_model.v3", "versioned_model.v4", + "versioned_model.v12", "table_model", "table_model_py", "table_model_csv", @@ -1064,6 +323,7 @@ def test_select_package(manifest): "mynamespace.seed", "mynamespace.ephemeral_model", "mynamespace.union_model", + "unit_test_table_model", } assert search_manifest_using_method(manifest, method, "ext") == { "ext_model", @@ -1086,6 +346,33 @@ def test_select_package(manifest): } +def test_select_package_this(manifest): + new_manifest = copy.deepcopy(manifest) + + # change the package name for all nodes except ones where the unique_id contains "table_model" + for id, node in new_manifest.nodes.items(): + if "table_model" not in id: + node.package_name = "foo" + + for source in new_manifest.sources.values(): + if "table_model" not in source.unique_id: + source.package_name = "foo" + + methods = MethodManager(new_manifest, None) + method = methods.get_method("package", []) + assert isinstance(method, PackageSelectorMethod) + assert method.arguments == [] + + assert search_manifest_using_method(new_manifest, method, "this") == { + "not_null_table_model_id", + "table_model", + "table_model_csv", + "table_model_py", + "unique_table_model_id", + "unit_test_table_model", + } + + def test_select_config_materialized(manifest): methods = MethodManager(manifest, None) method = methods.get_method("config", ["materialized"]) @@ -1102,6 +389,7 @@ def test_select_config_materialized(manifest): "versioned_model.v2", "versioned_model.v3", "versioned_model.v4", + "versioned_model.v12", "mynamespace.union_model", } @@ -1175,7 +463,16 @@ def test_select_test_type(manifest): "unique_view_model_id", "unique_ext_raw_ext_source_id", } - assert search_manifest_using_method(manifest, method, "data") == {"view_test_nothing"} + assert search_manifest_using_method(manifest, method, "data") == { + "view_test_nothing", + "unique_table_model_id", + "not_null_table_model_id", + "unique_view_model_id", + "unique_ext_raw_ext_source_id", + } + assert search_manifest_using_method(manifest, method, "unit") == { + "unit_test_table_model", + } def test_select_version(manifest): @@ -1188,6 +485,7 @@ def test_select_version(manifest): assert search_manifest_using_method(manifest, method, "prerelease") == { "versioned_model.v3", "versioned_model.v4", + "versioned_model.v12", } assert search_manifest_using_method(manifest, method, "none") == { "table_model_py", @@ -1224,18 +522,127 @@ def test_select_metric(manifest): assert search_manifest_using_method(manifest, method, "*_metric") == {"my_metric"} -@pytest.fixture -def previous_state(manifest): +def test_select_semantic_model(manifest, table_model): + semantic_model = make_semantic_model( + "pkg", + "customer", + model=table_model, + path="_semantic_models.yml", + ) + manifest.semantic_models[semantic_model.unique_id] = semantic_model + methods = MethodManager(manifest, None) + method = methods.get_method("semantic_model", []) + assert isinstance(method, SemanticModelSelectorMethod) + assert search_manifest_using_method(manifest, method, "customer") == {"customer"} + assert not search_manifest_using_method(manifest, method, "not_customer") + assert search_manifest_using_method(manifest, method, "*omer") == {"customer"} + + +def test_select_semantic_model_by_tag(manifest, table_model): + semantic_model = make_semantic_model( + "pkg", + "customer", + model=table_model, + path="_semantic_models.yml", + ) + manifest.semantic_models[semantic_model.unique_id] = semantic_model + methods = MethodManager(manifest, None) + method = methods.get_method("tag", []) + assert isinstance(method, TagSelectorMethod) + assert method.arguments == [] + search_manifest_using_method(manifest, method, "any_tag") + + +def test_select_saved_query(manifest: Manifest) -> None: + metric = make_metric("test", "my_metric") + saved_query = make_saved_query( + "pkg", + "test_saved_query", + "my_metric", + ) + manifest.metrics[metric.unique_id] = metric + manifest.saved_queries[saved_query.unique_id] = saved_query + methods = MethodManager(manifest, None) + method = methods.get_method("saved_query", []) + assert isinstance(method, SavedQuerySelectorMethod) + assert search_manifest_using_method(manifest, method, "test_saved_query") == { + "test_saved_query" + } + assert not search_manifest_using_method(manifest, method, "not_test_saved_query") + assert search_manifest_using_method(manifest, method, "*uery") == {"test_saved_query"} + + +def test_select_saved_query_by_tag(manifest: Manifest) -> None: + metric = make_metric("test", "my_metric") + saved_query = make_saved_query( + "pkg", + "test_saved_query", + "my_metric", + ) + manifest.metrics[metric.unique_id] = metric + manifest.saved_queries[saved_query.unique_id] = saved_query + methods = MethodManager(manifest, None) + method = methods.get_method("tag", []) + assert isinstance(method, TagSelectorMethod) + assert method.arguments == [] + search_manifest_using_method(manifest, method, "any_tag") + + +def test_modified_saved_query(manifest: Manifest) -> None: + metric = make_metric("test", "my_metric") + saved_query = make_saved_query( + "pkg", + "test_saved_query", + "my_metric", + ) + manifest.metrics[metric.unique_id] = metric + manifest.saved_queries[saved_query.unique_id] = saved_query + # Create PreviousState with a saved query, this deepcopies manifest + previous_state = create_previous_state(manifest) + method = statemethod(manifest, previous_state) + + # create another metric and add to saved query + alt_metric = make_metric("test", "alt_metric") + manifest.metrics[alt_metric.unique_id] = alt_metric + saved_query.query_params.metrics.append("alt_metric") + + assert search_manifest_using_method(manifest, method, "modified") == {"test_saved_query"} + + +def test_select_unit_test(manifest: Manifest) -> None: + test_model = make_model("test", "my_model", "select 1 as id") + unit_test = make_unit_test("test", "my_unit_test", test_model) + manifest.unit_tests[unit_test.unique_id] = unit_test + methods = MethodManager(manifest, None) + method = methods.get_method("unit_test", []) + + assert isinstance(method, UnitTestSelectorMethod) + assert not search_manifest_using_method(manifest, method, "not_test_unit_test") + assert search_manifest_using_method(manifest, method, "*nit_test") == {unit_test.search_name} + assert search_manifest_using_method(manifest, method, "test.my_unit_test") == { + unit_test.search_name + } + assert search_manifest_using_method(manifest, method, "my_unit_test") == { + unit_test.search_name + } + + +def create_previous_state(manifest): writable = copy.deepcopy(manifest).writable_manifest() state = PreviousState( state_path=Path("/path/does/not/exist"), target_path=Path("/path/does/not/exist"), project_root=Path("/path/does/not/exist"), ) - state.manifest = writable + state.manifest = Manifest.from_writable_manifest(writable) return state +@pytest.fixture +def previous_state(manifest): + return create_previous_state(manifest) + + def add_node(manifest, node): manifest.nodes[node.unique_id] = node @@ -1271,19 +678,19 @@ def test_select_state_no_change(manifest, previous_state): def test_select_state_nothing(manifest, previous_state): previous_state.manifest = None method = statemethod(manifest, previous_state) - with pytest.raises(dbt.exceptions.DbtRuntimeError) as exc: + with pytest.raises(dbt_common.exceptions.DbtRuntimeError) as exc: search_manifest_using_method(manifest, method, "modified") assert "no comparison manifest" in str(exc.value) - with pytest.raises(dbt.exceptions.DbtRuntimeError) as exc: + with pytest.raises(dbt_common.exceptions.DbtRuntimeError) as exc: search_manifest_using_method(manifest, method, "new") assert "no comparison manifest" in str(exc.value) - with pytest.raises(dbt.exceptions.DbtRuntimeError) as exc: + with pytest.raises(dbt_common.exceptions.DbtRuntimeError) as exc: search_manifest_using_method(manifest, method, "unmodified") assert "no comparison manifest" in str(exc.value) - with pytest.raises(dbt.exceptions.DbtRuntimeError) as exc: + with pytest.raises(dbt_common.exceptions.DbtRuntimeError) as exc: search_manifest_using_method(manifest, method, "old") assert "no comparison manifest" in str(exc.value) @@ -1301,7 +708,7 @@ def test_select_state_added_model(manifest, previous_state): def test_select_state_changed_model_sql(manifest, previous_state, view_model): - change_node(manifest, view_model.replace(raw_code="select 1 as id")) + change_node(manifest, replace(view_model, raw_code="select 1 as id")) method = statemethod(manifest, previous_state) # both of these @@ -1320,7 +727,7 @@ def test_select_state_changed_model_sql(manifest, previous_state, view_model): def test_select_state_changed_model_fqn(manifest, previous_state, view_model): change_node( - manifest, view_model.replace(fqn=view_model.fqn[:-1] + ["nested"] + view_model.fqn[-1:]) + manifest, replace(view_model, fqn=view_model.fqn[:-1] + ["nested"] + view_model.fqn[-1:]) ) method = statemethod(manifest, previous_state) assert search_manifest_using_method(manifest, method, "modified") == {"view_model"} @@ -1343,7 +750,7 @@ def test_select_state_added_seed(manifest, previous_state): def test_select_state_changed_seed_checksum_sha_to_sha(manifest, previous_state, seed): - change_node(manifest, seed.replace(checksum=FileHash.from_contents("changed"))) + change_node(manifest, replace(seed, checksum=FileHash.from_contents("changed"))) method = statemethod(manifest, previous_state) assert search_manifest_using_method(manifest, method, "modified") == {"seed"} assert not search_manifest_using_method(manifest, method, "new") @@ -1353,10 +760,10 @@ def test_select_state_changed_seed_checksum_sha_to_sha(manifest, previous_state, def test_select_state_changed_seed_checksum_path_to_path(manifest, previous_state, seed): change_node( previous_state.manifest, - seed.replace(checksum=FileHash(name="path", checksum=seed.original_file_path)), + replace(seed, checksum=FileHash(name="path", checksum=seed.original_file_path)), ) change_node( - manifest, seed.replace(checksum=FileHash(name="path", checksum=seed.original_file_path)) + manifest, replace(seed, checksum=FileHash(name="path", checksum=seed.original_file_path)) ) method = statemethod(manifest, previous_state) with mock.patch("dbt.contracts.graph.nodes.warn_or_error") as warn_or_error_patch: @@ -1383,7 +790,7 @@ def test_select_state_changed_seed_checksum_path_to_path(manifest, previous_stat def test_select_state_changed_seed_checksum_sha_to_path(manifest, previous_state, seed): change_node( - manifest, seed.replace(checksum=FileHash(name="path", checksum=seed.original_file_path)) + manifest, replace(seed, checksum=FileHash(name="path", checksum=seed.original_file_path)) ) method = statemethod(manifest, previous_state) with mock.patch("dbt.contracts.graph.nodes.warn_or_error") as warn_or_error_patch: @@ -1411,7 +818,7 @@ def test_select_state_changed_seed_checksum_sha_to_path(manifest, previous_state def test_select_state_changed_seed_checksum_path_to_sha(manifest, previous_state, seed): change_node( previous_state.manifest, - seed.replace(checksum=FileHash(name="path", checksum=seed.original_file_path)), + replace(seed, checksum=FileHash(name="path", checksum=seed.original_file_path)), ) method = statemethod(manifest, previous_state) with mock.patch("dbt.contracts.graph.nodes.warn_or_error") as warn_or_error_patch: @@ -1429,7 +836,7 @@ def test_select_state_changed_seed_checksum_path_to_sha(manifest, previous_state def test_select_state_changed_seed_fqn(manifest, previous_state, seed): - change_node(manifest, seed.replace(fqn=seed.fqn[:-1] + ["nested"] + seed.fqn[-1:])) + change_node(manifest, replace(seed, fqn=seed.fqn[:-1] + ["nested"] + seed.fqn[-1:])) method = statemethod(manifest, previous_state) assert search_manifest_using_method(manifest, method, "modified") == {"seed"} assert not search_manifest_using_method(manifest, method, "new") @@ -1452,7 +859,7 @@ def test_select_state_changed_seed_relation_documented(manifest, previous_state, def test_select_state_changed_seed_relation_documented_nodocs(manifest, previous_state, seed): seed_doc_relation = replace_config(seed, persist_docs={"relation": True}) - seed_doc_relation_documented = seed_doc_relation.replace(description="a description") + seed_doc_relation_documented = replace(seed_doc_relation, description="a description") change_node(previous_state.manifest, seed_doc_relation) change_node(manifest, seed_doc_relation_documented) method = statemethod(manifest, previous_state) @@ -1468,7 +875,7 @@ def test_select_state_changed_seed_relation_documented_nodocs(manifest, previous def test_select_state_changed_seed_relation_documented_withdocs(manifest, previous_state, seed): seed_doc_relation = replace_config(seed, persist_docs={"relation": True}) - seed_doc_relation_documented = seed_doc_relation.replace(description="a description") + seed_doc_relation_documented = replace(seed_doc_relation, description="a description") change_node(previous_state.manifest, seed_doc_relation_documented) change_node(manifest, seed_doc_relation) method = statemethod(manifest, previous_state) @@ -1496,7 +903,8 @@ def test_select_state_changed_seed_columns_documented(manifest, previous_state, def test_select_state_changed_seed_columns_documented_nodocs(manifest, previous_state, seed): seed_doc_columns = replace_config(seed, persist_docs={"columns": True}) - seed_doc_columns_documented_columns = seed_doc_columns.replace( + seed_doc_columns_documented_columns = replace( + seed_doc_columns, columns={"a": ColumnInfo(name="a", description="a description")}, ) @@ -1516,7 +924,8 @@ def test_select_state_changed_seed_columns_documented_nodocs(manifest, previous_ def test_select_state_changed_seed_columns_documented_withdocs(manifest, previous_state, seed): seed_doc_columns = replace_config(seed, persist_docs={"columns": True}) - seed_doc_columns_documented_columns = seed_doc_columns.replace( + seed_doc_columns_documented_columns = replace( + seed_doc_columns, columns={"a": ColumnInfo(name="a", description="a description")}, ) @@ -1537,8 +946,8 @@ def test_select_state_changed_seed_columns_documented_withdocs(manifest, previou def test_select_state_changed_test_macro_sql( manifest, previous_state, macro_default_test_not_null ): - manifest.macros[macro_default_test_not_null.unique_id] = macro_default_test_not_null.replace( - macro_sql="lalala" + manifest.macros[macro_default_test_not_null.unique_id] = replace( + macro_default_test_not_null, macro_sql="lalala" ) method = statemethod(manifest, previous_state) assert search_manifest_using_method(manifest, method, "modified") == { @@ -1557,7 +966,7 @@ def test_select_state_changed_test_macro_sql( def test_select_state_changed_test_macros(manifest, previous_state): changed_macro = make_macro("dbt", "changed_macro", "blablabla") add_macro(manifest, changed_macro) - add_macro(previous_state.manifest, changed_macro.replace(macro_sql="something different")) + add_macro(previous_state.manifest, replace(changed_macro, macro_sql="something different")) unchanged_macro = make_macro("dbt", "unchanged_macro", "blablabla") add_macro(manifest, unchanged_macro) @@ -1598,7 +1007,7 @@ def test_select_state_changed_test_macros(manifest, previous_state): def test_select_state_changed_test_macros_with_upstream_change(manifest, previous_state): changed_macro = make_macro("dbt", "changed_macro", "blablabla") add_macro(manifest, changed_macro) - add_macro(previous_state.manifest, changed_macro.replace(macro_sql="something different")) + add_macro(previous_state.manifest, replace(changed_macro, macro_sql="something different")) unchanged_macro1 = make_macro("dbt", "unchanged_macro", "blablabla") add_macro(manifest, unchanged_macro1) diff --git a/tests/unit/test_graph_selector_spec.py b/tests/unit/graph/test_selector_spec.py similarity index 73% rename from tests/unit/test_graph_selector_spec.py rename to tests/unit/graph/test_selector_spec.py index 8a19a8b5934..451b107d85c 100644 --- a/tests/unit/test_graph_selector_spec.py +++ b/tests/unit/graph/test_selector_spec.py @@ -1,14 +1,61 @@ +import os +from unittest.mock import patch + import pytest from dbt.exceptions import DbtRuntimeError +from dbt.graph.selector_methods import MethodName from dbt.graph.selector_spec import ( + IndirectSelection, SelectionCriteria, - SelectionIntersection, SelectionDifference, + SelectionIntersection, SelectionUnion, ) -from dbt.graph.selector_methods import MethodName -import os + + +@pytest.mark.parametrize( + "indirect_selection_value,expected_value", + [(v, v) for v in IndirectSelection], +) +def test_selection_criteria_default_indirect_value(indirect_selection_value, expected_value): + # Check selection criteria with indirect selection value would follow the resolved value in flags + # if indirect selection is not specified in the selection criteria. + with patch("dbt.graph.selector_spec.get_flags") as patched_get_flags: + patched_get_flags.return_value.INDIRECT_SELECTION = indirect_selection_value + patched_get_flags.INDIRECT_SELECTION = indirect_selection_value + selection_dict_without_indirect_selection_specified = { + "method": "path", + "value": "models/marts/orders.sql", + "children": False, + "parents": False, + } + selection_criteria_without_indirect_selection_specified = ( + SelectionCriteria.selection_criteria_from_dict( + selection_dict_without_indirect_selection_specified, + selection_dict_without_indirect_selection_specified, + ) + ) + assert ( + selection_criteria_without_indirect_selection_specified.indirect_selection + == expected_value + ) + selection_dict_without_indirect_selection_specified = { + "method": "path", + "value": "models/marts/orders.sql", + "children": False, + "parents": False, + "indirect_selection": "buildable", + } + selection_criteria_with_indirect_selection_specified = ( + SelectionCriteria.selection_criteria_from_dict( + selection_dict_without_indirect_selection_specified, + selection_dict_without_indirect_selection_specified, + ) + ) + assert ( + selection_criteria_with_indirect_selection_specified.indirect_selection == "buildable" + ) def test_raw_parse_simple(): diff --git a/tests/unit/materializations/incremental/test_microbatch.py b/tests/unit/materializations/incremental/test_microbatch.py new file mode 100644 index 00000000000..68521a84e1e --- /dev/null +++ b/tests/unit/materializations/incremental/test_microbatch.py @@ -0,0 +1,446 @@ +from datetime import datetime +from unittest import mock + +import pytest +import pytz +from freezegun import freeze_time + +from dbt.artifacts.resources import NodeConfig +from dbt.artifacts.resources.types import BatchSize +from dbt.materializations.incremental.microbatch import MicrobatchBuilder + + +class TestMicrobatchBuilder: + @pytest.fixture(scope="class") + def microbatch_model(self): + model = mock.Mock() + model.config = mock.MagicMock(NodeConfig) + model.config.materialized = "incremental" + model.config.incremental_strategy = "microbatch" + + return model + + @freeze_time("2024-09-05 08:56:00") + @pytest.mark.parametrize( + "is_incremental,event_time_end,expected_end_time", + [ + ( + False, + None, + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + ), + ( + True, + None, + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + ), + ( + False, + datetime(2024, 10, 1, 0, 0, 0, 0, pytz.UTC), + datetime(2024, 10, 1, 0, 0, 0, 0, pytz.UTC), + ), + ( + True, + datetime(2024, 10, 1, 0, 0, 0, 0, pytz.UTC), + datetime(2024, 10, 1, 0, 0, 0, 0, pytz.UTC), + ), + ], + ) + def test_build_end_time( + self, microbatch_model, is_incremental, event_time_end, expected_end_time + ): + microbatch_builder = MicrobatchBuilder( + model=microbatch_model, + is_incremental=is_incremental, + event_time_start=None, + event_time_end=event_time_end, + ) + + assert microbatch_builder.build_end_time() == expected_end_time + + @pytest.mark.parametrize( + "is_incremental,event_time_start,checkpoint,batch_size,lookback,expected_start_time", + [ + ( + False, + None, + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + BatchSize.day, + 0, + None, + ), + # BatchSize.year + ( + False, + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + BatchSize.year, + 0, + datetime(2024, 1, 1, 0, 0, 0, 0, pytz.UTC), + ), + ( + False, + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + BatchSize.year, + # Offset not applied when event_time_start provided + 1, + datetime(2024, 1, 1, 0, 0, 0, 0, pytz.UTC), + ), + ( + False, + None, + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + BatchSize.year, + 0, + # is_incremental=False + no start_time -> None + None, + ), + ( + True, + None, + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + BatchSize.year, + 0, + datetime(2024, 1, 1, 0, 0, 0, 0, pytz.UTC), + ), + ( + True, + None, + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + BatchSize.year, + 1, + datetime(2023, 1, 1, 0, 0, 0, 0, pytz.UTC), + ), + # BatchSize.month + ( + False, + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + BatchSize.month, + 0, + datetime(2024, 9, 1, 0, 0, 0, 0, pytz.UTC), + ), + ( + False, + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + BatchSize.month, + # Offset not applied when event_time_start provided + 1, + datetime(2024, 9, 1, 0, 0, 0, 0, pytz.UTC), + ), + ( + False, + None, + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + BatchSize.month, + 0, + # is_incremental=False + no start_time -> None + None, + ), + ( + True, + None, + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + BatchSize.month, + 0, + datetime(2024, 9, 1, 0, 0, 0, 0, pytz.UTC), + ), + ( + True, + None, + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + BatchSize.month, + 1, + datetime(2024, 8, 1, 0, 0, 0, 0, pytz.UTC), + ), + # BatchSize.day + ( + False, + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + BatchSize.day, + 0, + datetime(2024, 9, 5, 0, 0, 0, 0, pytz.UTC), + ), + ( + False, + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + BatchSize.day, + # Offset not applied when event_time_start provided + 1, + datetime(2024, 9, 5, 0, 0, 0, 0, pytz.UTC), + ), + ( + False, + None, + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + BatchSize.day, + 0, + # is_incremental=False + no start_time -> None + None, + ), + ( + True, + None, + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + BatchSize.day, + 0, + datetime(2024, 9, 5, 0, 0, 0, 0, pytz.UTC), + ), + ( + True, + None, + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + BatchSize.day, + 1, + datetime(2024, 9, 4, 0, 0, 0, 0, pytz.UTC), + ), + # BatchSize.hour + ( + False, + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + BatchSize.hour, + 0, + datetime(2024, 9, 5, 8, 0, 0, 0, pytz.UTC), + ), + ( + False, + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + BatchSize.hour, + # Offset not applied when event_time_start provided + 1, + datetime(2024, 9, 5, 8, 0, 0, 0, pytz.UTC), + ), + ( + False, + None, + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + BatchSize.hour, + 0, + # is_incremental=False + no start_time -> None + None, + ), + ( + True, + None, + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + BatchSize.hour, + 0, + datetime(2024, 9, 5, 8, 0, 0, 0, pytz.UTC), + ), + ( + True, + None, + datetime(2024, 9, 5, 8, 56, 0, 0, pytz.UTC), + BatchSize.hour, + 1, + datetime(2024, 9, 5, 7, 0, 0, 0, pytz.UTC), + ), + ], + ) + def test_build_start_time( + self, + microbatch_model, + is_incremental, + event_time_start, + checkpoint, + batch_size, + lookback, + expected_start_time, + ): + microbatch_model.config.batch_size = batch_size + microbatch_model.config.lookback = lookback + microbatch_builder = MicrobatchBuilder( + model=microbatch_model, + is_incremental=is_incremental, + event_time_start=event_time_start, + event_time_end=None, + ) + + assert microbatch_builder.build_start_time(checkpoint) == expected_start_time + + @pytest.mark.parametrize( + "start,end,batch_size,expected_batches", + [ + # BatchSize.year + ( + datetime(2024, 1, 1, 0, 0, 0, 0, pytz.UTC), + datetime(2026, 1, 7, 3, 56, 0, 0, pytz.UTC), + BatchSize.year, + [ + ( + datetime(2024, 1, 1, 0, 0, 0, 0, pytz.UTC), + datetime(2025, 1, 1, 0, 0, 0, 0, pytz.UTC), + ), + ( + datetime(2025, 1, 1, 0, 0, 0, 0, pytz.UTC), + datetime(2026, 1, 1, 0, 0, 0, 0, pytz.UTC), + ), + ( + datetime(2026, 1, 1, 0, 0, 0, 0, pytz.UTC), + datetime(2026, 1, 7, 3, 56, 0, 0, pytz.UTC), + ), + ], + ), + # BatchSize.month + ( + datetime(2024, 9, 1, 0, 0, 0, 0, pytz.UTC), + datetime(2024, 11, 7, 3, 56, 0, 0, pytz.UTC), + BatchSize.month, + [ + ( + datetime(2024, 9, 1, 0, 0, 0, 0, pytz.UTC), + datetime(2024, 10, 1, 0, 0, 0, 0, pytz.UTC), + ), + ( + datetime(2024, 10, 1, 0, 0, 0, 0, pytz.UTC), + datetime(2024, 11, 1, 0, 0, 0, 0, pytz.UTC), + ), + ( + datetime(2024, 11, 1, 0, 0, 0, 0, pytz.UTC), + datetime(2024, 11, 7, 3, 56, 0, 0, pytz.UTC), + ), + ], + ), + # BatchSize.day + ( + datetime(2024, 9, 5, 0, 0, 0, 0, pytz.UTC), + datetime(2024, 9, 7, 3, 56, 0, 0, pytz.UTC), + BatchSize.day, + [ + ( + datetime(2024, 9, 5, 0, 0, 0, 0, pytz.UTC), + datetime(2024, 9, 6, 0, 0, 0, 0, pytz.UTC), + ), + ( + datetime(2024, 9, 6, 0, 0, 0, 0, pytz.UTC), + datetime(2024, 9, 7, 0, 0, 0, 0, pytz.UTC), + ), + ( + datetime(2024, 9, 7, 0, 0, 0, 0, pytz.UTC), + datetime(2024, 9, 7, 3, 56, 0, 0, pytz.UTC), + ), + ], + ), + # BatchSize.hour + ( + datetime(2024, 9, 5, 1, 0, 0, 0, pytz.UTC), + datetime(2024, 9, 5, 3, 56, 0, 0, pytz.UTC), + BatchSize.hour, + [ + ( + datetime(2024, 9, 5, 1, 0, 0, 0, pytz.UTC), + datetime(2024, 9, 5, 2, 0, 0, 0, pytz.UTC), + ), + ( + datetime(2024, 9, 5, 2, 0, 0, 0, pytz.UTC), + datetime(2024, 9, 5, 3, 0, 0, 0, pytz.UTC), + ), + ( + datetime(2024, 9, 5, 3, 0, 0, 0, pytz.UTC), + datetime(2024, 9, 5, 3, 56, 0, 0, pytz.UTC), + ), + ], + ), + ], + ) + def test_build_batches(self, microbatch_model, start, end, batch_size, expected_batches): + microbatch_model.config.batch_size = batch_size + microbatch_builder = MicrobatchBuilder( + model=microbatch_model, is_incremental=True, event_time_start=None, event_time_end=None + ) + + actual_batches = microbatch_builder.build_batches(start, end) + assert len(actual_batches) == len(expected_batches) + assert actual_batches == expected_batches + + @pytest.mark.parametrize( + "timestamp,batch_size,offset,expected_timestamp", + [ + ( + datetime(2024, 9, 5, 3, 56, 1, 1, pytz.UTC), + BatchSize.year, + 1, + datetime(2025, 1, 1, 0, 0, 0, 0, pytz.UTC), + ), + ( + datetime(2024, 9, 5, 3, 56, 1, 1, pytz.UTC), + BatchSize.year, + -1, + datetime(2023, 1, 1, 0, 0, 0, 0, pytz.UTC), + ), + ( + datetime(2024, 9, 5, 3, 56, 1, 1, pytz.UTC), + BatchSize.month, + 1, + datetime(2024, 10, 1, 0, 0, 0, 0, pytz.UTC), + ), + ( + datetime(2024, 9, 5, 3, 56, 1, 1, pytz.UTC), + BatchSize.month, + -1, + datetime(2024, 8, 1, 0, 0, 0, 0, pytz.UTC), + ), + ( + datetime(2024, 9, 5, 3, 56, 1, 1, pytz.UTC), + BatchSize.day, + 1, + datetime(2024, 9, 6, 0, 0, 0, 0, pytz.UTC), + ), + ( + datetime(2024, 9, 5, 3, 56, 1, 1, pytz.UTC), + BatchSize.day, + -1, + datetime(2024, 9, 4, 0, 0, 0, 0, pytz.UTC), + ), + ( + datetime(2024, 9, 5, 3, 56, 1, 1, pytz.UTC), + BatchSize.hour, + 1, + datetime(2024, 9, 5, 4, 0, 0, 0, pytz.UTC), + ), + ( + datetime(2024, 9, 5, 3, 56, 1, 1, pytz.UTC), + BatchSize.hour, + -1, + datetime(2024, 9, 5, 2, 0, 0, 0, pytz.UTC), + ), + ], + ) + def test_offset_timestamp(self, timestamp, batch_size, offset, expected_timestamp): + assert ( + MicrobatchBuilder.offset_timestamp(timestamp, batch_size, offset) == expected_timestamp + ) + + @pytest.mark.parametrize( + "timestamp,batch_size,expected_timestamp", + [ + ( + datetime(2024, 9, 5, 3, 56, 1, 1, pytz.UTC), + BatchSize.year, + datetime(2024, 1, 1, 0, 0, 0, 0, pytz.UTC), + ), + ( + datetime(2024, 9, 5, 3, 56, 1, 1, pytz.UTC), + BatchSize.month, + datetime(2024, 9, 1, 0, 0, 0, 0, pytz.UTC), + ), + ( + datetime(2024, 9, 5, 3, 56, 1, 1, pytz.UTC), + BatchSize.day, + datetime(2024, 9, 5, 0, 0, 0, 0, pytz.UTC), + ), + ( + datetime(2024, 9, 5, 3, 56, 1, 1, pytz.UTC), + BatchSize.hour, + datetime(2024, 9, 5, 3, 0, 0, 0, pytz.UTC), + ), + ], + ) + def test_truncate_timestamp(self, timestamp, batch_size, expected_timestamp): + assert MicrobatchBuilder.truncate_timestamp(timestamp, batch_size) == expected_timestamp diff --git a/tests/unit/mock_adapter.py b/tests/unit/mock_adapter.py index d3bdf87b2e4..458f1dbd570 100644 --- a/tests/unit/mock_adapter.py +++ b/tests/unit/mock_adapter.py @@ -1,7 +1,7 @@ +from contextlib import contextmanager from unittest import mock from dbt.adapters.base import BaseAdapter -from contextlib import contextmanager def adapter_factory(): @@ -33,6 +33,9 @@ def rename_relation(self, *args, **kwargs): def get_columns_in_relation(self, *args, **kwargs): return self.responder.get_columns_in_relation(*args, **kwargs) + def get_catalog_for_single_relation(self, *args, **kwargs): + return self.responder.get_catalog_for_single_relation(*args, **kwargs) + def expand_column_types(self, *args, **kwargs): return self.responder.expand_column_types(*args, **kwargs) @@ -55,6 +58,9 @@ def convert_text_type(self, *args, **kwargs): def convert_number_type(self, *args, **kwargs): return self.responder.convert_number_type(*args, **kwargs) + def convert_integer_type(self, *args, **kwargs): + return self.responder.convert_integer_type(*args, **kwargs) + def convert_boolean_type(self, *args, **kwargs): return self.responder.convert_boolean_type(*args, **kwargs) diff --git a/tests/unit/parser/__init__.py b/tests/unit/parser/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/unit/test_docs_blocks.py b/tests/unit/parser/test_docs.py similarity index 96% rename from tests/unit/test_docs_blocks.py rename to tests/unit/parser/test_docs.py index 2a277d82030..2eb5d12c383 100644 --- a/tests/unit/test_docs_blocks.py +++ b/tests/unit/parser/test_docs.py @@ -1,15 +1,17 @@ import os import unittest +from argparse import Namespace -from dbt.contracts.files import SourceFile, FileHash, FilePath +from dbt.contracts.files import FileHash, FilePath, SourceFile from dbt.contracts.graph.manifest import Manifest from dbt.contracts.graph.nodes import Documentation +from dbt.flags import set_from_args from dbt.node_types import NodeType from dbt.parser import docs from dbt.parser.search import FileBlock +from tests.unit.utils import config_from_parts_or_dicts -from .utils import config_from_parts_or_dicts - +set_from_args(Namespace(WARN_ERROR=False), None) SNOWPLOW_SESSIONS_DOCS = r""" This table contains one record for every session recorded by Snowplow. diff --git a/tests/unit/parser/test_manifest.py b/tests/unit/parser/test_manifest.py new file mode 100644 index 00000000000..e01b41ce5b2 --- /dev/null +++ b/tests/unit/parser/test_manifest.py @@ -0,0 +1,240 @@ +from argparse import Namespace +from unittest.mock import MagicMock, patch + +import pytest +from pytest_mock import MockerFixture + +from dbt.artifacts.resources.base import FileHash +from dbt.config import RuntimeConfig +from dbt.contracts.graph.manifest import Manifest, ManifestStateCheck +from dbt.events.types import UnusedResourceConfigPath +from dbt.flags import set_from_args +from dbt.parser.manifest import ManifestLoader, _warn_for_unused_resource_config_paths +from dbt.parser.read_files import FileDiff +from dbt.tracking import User +from dbt_common.events.event_manager_client import add_callback_to_manager +from tests.utils import EventCatcher + + +class TestPartialParse: + @patch("dbt.parser.manifest.ManifestLoader.build_manifest_state_check") + @patch("dbt.parser.manifest.os.path.exists") + @patch("dbt.parser.manifest.open") + def test_partial_parse_file_path(self, patched_open, patched_os_exist, patched_state_check): + mock_project = MagicMock(RuntimeConfig) + mock_project.project_target_path = "mock_target_path" + patched_os_exist.return_value = True + ManifestLoader(mock_project, {}) + # by default we use the project_target_path + patched_open.assert_called_with("mock_target_path/partial_parse.msgpack", "rb") + set_from_args(Namespace(partial_parse_file_path="specified_partial_parse_path"), {}) + ManifestLoader(mock_project, {}) + # if specified in flags, we use the specified path + patched_open.assert_called_with("specified_partial_parse_path", "rb") + + def test_profile_hash_change(self, mock_project): + # This test validate that the profile_hash is updated when the connection keys change + profile_hash = "750bc99c1d64ca518536ead26b28465a224be5ffc918bf2a490102faa5a1bcf5" + mock_project.credentials.connection_info.return_value = "test" + manifest = ManifestLoader(mock_project, {}) + assert manifest.manifest.state_check.profile_hash.checksum == profile_hash + mock_project.credentials.connection_info.return_value = "test1" + manifest = ManifestLoader(mock_project, {}) + assert manifest.manifest.state_check.profile_hash.checksum != profile_hash + + @patch("dbt.parser.manifest.ManifestLoader.build_manifest_state_check") + @patch("dbt.parser.manifest.os.path.exists") + @patch("dbt.parser.manifest.open") + def test_partial_parse_by_version( + self, + patched_open, + patched_os_exist, + patched_state_check, + runtime_config: RuntimeConfig, + manifest: Manifest, + ): + file_hash = FileHash.from_contents("test contests") + manifest.state_check = ManifestStateCheck( + vars_hash=file_hash, + profile_hash=file_hash, + profile_env_vars_hash=file_hash, + project_env_vars_hash=file_hash, + ) + # we need a loader to compare the two manifests + loader = ManifestLoader(runtime_config, {runtime_config.project_name: runtime_config}) + loader.manifest = manifest.deepcopy() + + is_partial_parsable, _ = loader.is_partial_parsable(manifest) + assert is_partial_parsable + + manifest.metadata.dbt_version = "0.0.1a1" + is_partial_parsable, _ = loader.is_partial_parsable(manifest) + assert not is_partial_parsable + + manifest.metadata.dbt_version = "99999.99.99" + is_partial_parsable, _ = loader.is_partial_parsable(manifest) + assert not is_partial_parsable + + +class TestFailedPartialParse: + @patch("dbt.tracking.track_partial_parser") + @patch("dbt.tracking.active_user") + @patch("dbt.parser.manifest.PartialParsing") + @patch("dbt.parser.manifest.ManifestLoader.read_manifest_for_partial_parse") + @patch("dbt.parser.manifest.ManifestLoader.build_manifest_state_check") + def test_partial_parse_safe_update_project_parser_files_partially( + self, + patched_state_check, + patched_read_manifest_for_partial_parse, + patched_partial_parsing, + patched_active_user, + patched_track_partial_parser, + ): + mock_instance = MagicMock() + mock_instance.skip_parsing.return_value = False + mock_instance.get_parsing_files.side_effect = KeyError("Whoopsie!") + patched_partial_parsing.return_value = mock_instance + + mock_project = MagicMock(RuntimeConfig) + mock_project.project_target_path = "mock_target_path" + + mock_saved_manifest = MagicMock(Manifest) + mock_saved_manifest.files = {} + patched_read_manifest_for_partial_parse.return_value = mock_saved_manifest + + loader = ManifestLoader(mock_project, {}) + loader.safe_update_project_parser_files_partially({}) + + patched_track_partial_parser.assert_called_once() + exc_info = patched_track_partial_parser.call_args[0][0] + assert "traceback" in exc_info + assert "exception" in exc_info + assert "code" in exc_info + assert "location" in exc_info + assert "full_reparse_reason" in exc_info + assert "KeyError: 'Whoopsie!'" == exc_info["exception"] + assert isinstance(exc_info["code"], str) or isinstance(exc_info["code"], type(None)) + + +class TestGetFullManifest: + @pytest.fixture + def set_required_mocks( + self, mocker: MockerFixture, manifest: Manifest, mock_adapter: MagicMock + ): + mocker.patch("dbt.parser.manifest.get_adapter").return_value = mock_adapter + mocker.patch("dbt.parser.manifest.ManifestLoader.load").return_value = manifest + mocker.patch("dbt.parser.manifest._check_manifest").return_value = None + mocker.patch("dbt.parser.manifest.ManifestLoader.save_macros_to_adapter").return_value = ( + None + ) + mocker.patch("dbt.tracking.active_user").return_value = User(None) + + def test_write_perf_info( + self, + mock_project: MagicMock, + mocker: MockerFixture, + set_required_mocks, + ) -> None: + write_perf_info = mocker.patch("dbt.parser.manifest.ManifestLoader.write_perf_info") + + ManifestLoader.get_full_manifest( + config=mock_project, + # write_perf_info=False let it default instead + ) + assert not write_perf_info.called + + ManifestLoader.get_full_manifest(config=mock_project, write_perf_info=False) + assert not write_perf_info.called + + ManifestLoader.get_full_manifest(config=mock_project, write_perf_info=True) + assert write_perf_info.called + + def test_reset( + self, + mock_project: MagicMock, + mock_adapter: MagicMock, + set_required_mocks, + ) -> None: + + ManifestLoader.get_full_manifest( + config=mock_project, + # reset=False let it default instead + ) + assert not mock_project.clear_dependencies.called + assert not mock_adapter.clear_macro_resolver.called + + ManifestLoader.get_full_manifest(config=mock_project, reset=False) + assert not mock_project.clear_dependencies.called + assert not mock_adapter.clear_macro_resolver.called + + ManifestLoader.get_full_manifest(config=mock_project, reset=True) + assert mock_project.clear_dependencies.called + assert mock_adapter.clear_macro_resolver.called + + def test_partial_parse_file_diff_flag( + self, + mock_project: MagicMock, + mocker: MockerFixture, + set_required_mocks, + ) -> None: + + # FileDiff.from_dict is only called if PARTIAL_PARSE_FILE_DIFF == False + # So we can track this function call to check if setting PARTIAL_PARSE_FILE_DIFF + # works appropriately + mock_file_diff = mocker.patch("dbt.parser.read_files.FileDiff.from_dict") + mock_file_diff.return_value = FileDiff([], [], []) + + ManifestLoader.get_full_manifest(config=mock_project) + assert not mock_file_diff.called + + set_from_args(Namespace(PARTIAL_PARSE_FILE_DIFF=True), {}) + ManifestLoader.get_full_manifest(config=mock_project) + assert not mock_file_diff.called + + set_from_args(Namespace(PARTIAL_PARSE_FILE_DIFF=False), {}) + ManifestLoader.get_full_manifest(config=mock_project) + assert mock_file_diff.called + + +class TestWarnUnusedConfigs: + @pytest.mark.parametrize( + "resource_type,path,expect_used", + [ + ("data_tests", "unused_path", False), + ("data_tests", "minimal", True), + ("metrics", "unused_path", False), + ("metrics", "test", True), + ("models", "unused_path", False), + ("models", "pkg", True), + ("saved_queries", "unused_path", False), + ("saved_queries", "test", True), + ("seeds", "unused_path", False), + ("seeds", "pkg", True), + ("semantic_models", "unused_path", False), + ("semantic_models", "test", True), + ("sources", "unused_path", False), + ("sources", "pkg", True), + ("unit_tests", "unused_path", False), + ("unit_tests", "pkg", True), + ], + ) + def test_warn_for_unused_resource_config_paths( + self, + resource_type: str, + path: str, + expect_used: bool, + manifest: Manifest, + runtime_config: RuntimeConfig, + ) -> None: + catcher = EventCatcher(UnusedResourceConfigPath) + add_callback_to_manager(catcher.catch) + + setattr(runtime_config, resource_type, {path: {"+materialized": "table"}}) + + _warn_for_unused_resource_config_paths(manifest=manifest, config=runtime_config) + + if expect_used: + assert len(catcher.caught_events) == 0 + else: + assert len(catcher.caught_events) == 1 + assert f"{resource_type}.{path}" in str(catcher.caught_events[0].data) diff --git a/tests/unit/test_parser.py b/tests/unit/parser/test_parser.py similarity index 92% rename from tests/unit/test_parser.py rename to tests/unit/parser/test_parser.py index e04f93c367b..20a2c9e8c83 100644 --- a/tests/unit/test_parser.py +++ b/tests/unit/parser/test_parser.py @@ -1,5 +1,6 @@ import os import unittest +from argparse import Namespace from copy import deepcopy from unittest import mock @@ -8,52 +9,56 @@ import dbt.flags import dbt.parser from dbt import tracking +from dbt.artifacts.resources import ModelConfig, RefArgs from dbt.context.context_config import ContextConfig -from dbt.contracts.files import SourceFile, FileHash, FilePath, SchemaSourceFile +from dbt.contracts.files import FileHash, FilePath, SchemaSourceFile, SourceFile from dbt.contracts.graph.manifest import Manifest -from dbt.contracts.graph.model_config import NodeConfig, TestConfig, SnapshotConfig +from dbt.contracts.graph.model_config import NodeConfig, SnapshotConfig, TestConfig from dbt.contracts.graph.nodes import ( - ModelNode, - Macro, + AnalysisNode, DependsOn, + Macro, + ModelNode, SingularTestNode, SnapshotNode, - AnalysisNode, UnpatchedSourceDefinition, - RefArgs, ) -from dbt.exceptions import CompilationError, ParsingError +from dbt.exceptions import CompilationError, ParsingError, SchemaConfigError +from dbt.flags import set_from_args from dbt.node_types import NodeType from dbt.parser import ( - ModelParser, - MacroParser, - SingularTestParser, + AnalysisParser, GenericTestParser, + MacroParser, + ModelParser, SchemaParser, + SingularTestParser, SnapshotParser, - AnalysisParser, ) from dbt.parser.common import YamlBlock from dbt.parser.models import ( _get_config_call_dict, - _shift_sources, _get_exp_sample_result, - _get_stable_sample_result, _get_sample_result, + _get_stable_sample_result, + _shift_sources, ) from dbt.parser.schemas import ( - TestablePatchParser, - ModelPatchParser, - SourceParser, AnalysisPatchParser, MacroPatchParser, + ModelPatchParser, + SourceParser, + TestablePatchParser, yaml_from_file, ) from dbt.parser.search import FileBlock from dbt.parser.sources import SourcePatcher -from .utils import config_from_parts_or_dicts, normalize, generate_name_macros, MockNode -from dbt.flags import set_from_args -from argparse import Namespace +from tests.unit.utils import ( + MockNode, + config_from_parts_or_dicts, + generate_name_macros, + normalize, +) set_from_args(Namespace(WARN_ERROR=False), None) @@ -157,7 +162,7 @@ def tearDown(self): self.parser_patcher.stop() self.patcher.stop() - def file_block_for(self, data: str, filename: str, searched: str): + def source_file_for(self, data: str, filename: str, searched: str): root_dir = get_abs_os_path("./dbt_packages/snowplow") filename = normalize(filename) path = FilePath( @@ -173,16 +178,21 @@ def file_block_for(self, data: str, filename: str, searched: str): project_name="snowplow", ) source_file.contents = data + return source_file + + def file_block_for(self, data: str, filename: str, searched: str): + source_file = self.source_file_for(data, filename, searched) return FileBlock(file=source_file) def assert_has_manifest_lengths( - self, manifest, macros=3, nodes=0, sources=0, docs=0, disabled=0 + self, manifest, macros=3, nodes=0, sources=0, docs=0, disabled=0, unit_tests=0 ): self.assertEqual(len(manifest.macros), macros) self.assertEqual(len(manifest.nodes), nodes) self.assertEqual(len(manifest.sources), sources) self.assertEqual(len(manifest.docs), docs) self.assertEqual(len(manifest.disabled), disabled) + self.assertEqual(len(manifest.unit_tests), unit_tests) def assertEqualNodes(node_one, node_two): @@ -223,6 +233,23 @@ def assertEqualNodes(node_one, node_two): - name: my_table """ + +MULTIPLE_TABLE_SOURCE_META = """ +sources: + - name: my_source + meta: + source_field: source_value + shared_field: shared_field_default + tables: + - name: my_table_shared_field_default + meta: + table_field: table_value + - name: my_table_shared_field_override + meta: + shared_field: shared_field_table_override + table_field: table_value +""" + SINGLE_TABLE_SOURCE_TESTS = """ sources: - name: my_source @@ -231,7 +258,7 @@ def assertEqualNodes(node_one, node_two): description: A description of my table columns: - name: color - tests: + data_tests: - not_null: severity: WARN - accepted_values: @@ -245,7 +272,7 @@ def assertEqualNodes(node_one, node_two): columns: - name: color description: The color value - tests: + data_tests: - not_null: severity: WARN - accepted_values: @@ -254,18 +281,34 @@ def assertEqualNodes(node_one, node_two): arg: 100 """ +SINGLE_TABLE_MODEL_TESTS_WRONG_SEVERITY = """ +models: + - name: my_model + description: A description of my model + columns: + - name: color + description: The color value + data_tests: + - not_null: + severity: WARNING + - accepted_values: + values: ['red', 'blue', 'green'] + - foreign_package.test_case: + arg: 100 +""" + MULTIPLE_TABLE_VERSIONED_MODEL_TESTS = """ models: - name: my_model description: A description of my model - tests: + data_tests: - unique: column_name: color columns: - name: color description: The color value - tests: + data_tests: - not_null: severity: WARN - name: location_id @@ -273,7 +316,7 @@ def assertEqualNodes(node_one, node_two): versions: - v: 1 defined_in: arbitrary_file_name - tests: [] + data_tests: [] columns: - include: '*' - name: extra @@ -352,7 +395,7 @@ def assertEqualNodes(node_one, node_two): - name: my_table columns: - name: id - tests: + data_tests: - not_null - unique """ @@ -371,8 +414,8 @@ def setUp(self): manifest=self.manifest, ) - def file_block_for(self, data, filename): - return super().file_block_for(data, filename, "models") + def file_block_for(self, data, filename, searched="models"): + return super().file_block_for(data, filename, searched) def yaml_block_for(self, test_yml: str, filename: str): file_block = self.file_block_for(data=test_yml, filename=filename) @@ -414,6 +457,41 @@ def test__parse_basic_source(self): assert src.resource_type == NodeType.Source assert src.fqn == ["snowplow", "my_source", "my_table"] + @mock.patch("dbt.parser.sources.get_adapter") + def test__parse_basic_source_meta(self, mock_get_adapter): + block = self.file_block_for(MULTIPLE_TABLE_SOURCE_META, "test_one.yml") + dct = yaml_from_file(block.file) + self.parser.parse_file(block, dct) + self.assert_has_manifest_lengths(self.parser.manifest, sources=2) + + unpatched_src_default = self.parser.manifest.sources[ + "source.snowplow.my_source.my_table_shared_field_default" + ] + src_default = self.source_patcher.parse_source(unpatched_src_default) + assert src_default.meta == { + "source_field": "source_value", + "shared_field": "shared_field_default", + "table_field": "table_value", + } + assert src_default.source_meta == { + "source_field": "source_value", + "shared_field": "shared_field_default", + } + + unpatched_src_override = self.parser.manifest.sources[ + "source.snowplow.my_source.my_table_shared_field_override" + ] + src_override = self.source_patcher.parse_source(unpatched_src_override) + assert src_override.meta == { + "source_field": "source_value", + "shared_field": "shared_field_table_override", + "table_field": "table_value", + } + assert src_override.source_meta == { + "source_field": "source_value", + "shared_field": "shared_field_default", + } + def test__read_basic_source_tests(self): block = self.yaml_block_for(SINGLE_TABLE_SOURCE_TESTS, "test_one.yml") analysis_tests = AnalysisPatchParser(self.parser, block, "analyses").parse().test_blocks @@ -464,7 +542,7 @@ def test__parse_basic_source_tests(self): file_id = "snowplow://" + normalize("models/test_one.yml") self.assertIn(file_id, self.parser.manifest.files) - self.assertEqual(self.parser.manifest.files[file_id].tests, {}) + self.assertEqual(self.parser.manifest.files[file_id].data_tests, {}) self.assertEqual( self.parser.manifest.files[file_id].sources, ["source.snowplow.my_source.my_table"] ) @@ -492,7 +570,7 @@ def test__read_source_patch(self): self.assertEqual(table.name, "my_table") self.assertIsNone(table.description) self.assertEqual(len(table.columns), 1) - self.assertEqual(len(table.columns[0].tests), 2) + self.assertEqual(len(table.columns[0].data_tests), 2) class SchemaParserModelsTest(SchemaParserTest): @@ -506,9 +584,11 @@ def setUp(self): sources=[], patch_path=None, ) + source_file = self.source_file_for("", "my_model.sql", "models") nodes = {my_model_node.unique_id: my_model_node} macros = {m.unique_id: m for m in generate_name_macros("root")} self.manifest = Manifest(nodes=nodes, macros=macros) + self.manifest.files[source_file.file_id] = source_file self.manifest.ref_lookup self.parser = SchemaParser( project=self.snowplow_project_config, @@ -523,6 +603,14 @@ def test__read_basic_model_tests(self): self.assertEqual(len(list(self.parser.manifest.sources)), 0) self.assertEqual(len(list(self.parser.manifest.nodes)), 4) + def test__read_basic_model_tests_wrong_severity(self): + block = self.yaml_block_for(SINGLE_TABLE_MODEL_TESTS_WRONG_SEVERITY, "test_one.yml") + dct = yaml_from_file(block.file) + with self.assertRaisesRegex( + SchemaConfigError, "Severity must be either 'warn' or 'error'. Got 'WARNING'" + ): + self.parser.parse_file(block, dct) + def test__parse_basic_model_tests(self): block = self.file_block_for(SINGLE_TABLE_MODEL_TESTS, "test_one.yml") self.parser.manifest.files[block.file.file_id] = block.file @@ -612,7 +700,7 @@ class SchemaParserVersionedModels(SchemaParserTest): def setUp(self): super().setUp() my_model_v1_node = MockNode( - package="root", + package="snowplow", name="arbitrary_file_name", config=mock.MagicMock(enabled=True), refs=[], @@ -620,8 +708,9 @@ def setUp(self): patch_path=None, file_id="snowplow://models/arbitrary_file_name.sql", ) + my_model_v1_source_file = self.source_file_for("", "arbitrary_file_name.sql", "models") my_model_v2_node = MockNode( - package="root", + package="snowplow", name="my_model_v2", config=mock.MagicMock(enabled=True), refs=[], @@ -629,12 +718,16 @@ def setUp(self): patch_path=None, file_id="snowplow://models/my_model_v2.sql", ) + my_model_v2_source_file = self.source_file_for("", "my_model_v2.sql", "models") nodes = { my_model_v1_node.unique_id: my_model_v1_node, my_model_v2_node.unique_id: my_model_v2_node, } macros = {m.unique_id: m for m in generate_name_macros("root")} - files = {node.file_id: mock.MagicMock(nodes=[node.unique_id]) for node in nodes.values()} + files = { + my_model_v1_source_file.file_id: my_model_v1_source_file, + my_model_v2_source_file.file_id: my_model_v2_source_file, + } self.manifest = Manifest(nodes=nodes, macros=macros, files=files) self.manifest.ref_lookup self.parser = SchemaParser( @@ -797,6 +890,7 @@ def model(dbt, session): a_dict = {'test2': dbt.ref('test2')} df5 = {'test2': dbt.ref('test3')} df6 = [dbt.ref("test4")] + f"{dbt.ref('test5')}" df = df0.limit(2) return df @@ -860,6 +954,17 @@ def model(dbt, session): return dbt.ref("some_model"), dbt.ref("some_other_model") """ +python_model_f_string = """ +# my_python_model.py +import pandas as pd + +def model(dbt, fal): + dbt.config(materialized="table") + print(f"my var: {dbt.config.get('my_var')}") # Prints "my var: None" + df: pd.DataFrame = dbt.ref("some_model") + return df +""" + python_model_no_return = """ def model(dbt, session): dbt.config(materialized='table') @@ -928,7 +1033,7 @@ def test_basic(self): fqn=["snowplow", "nested", "model_1"], package_name="snowplow", original_file_path=normalize("models/nested/model_1.sql"), - config=NodeConfig(materialized="table"), + config=ModelConfig(materialized="table"), path=normalize("nested/model_1.sql"), language="sql", raw_code=sql_model, @@ -966,7 +1071,7 @@ def test_python_model_parse(self): fqn=["snowplow", "nested", "py_model"], package_name="snowplow", original_file_path=normalize("models/nested/py_model.py"), - config=NodeConfig(materialized="table", packages=python_packages), + config=ModelConfig(materialized="table", packages=python_packages), # config.packages = ['textblob'] path=normalize("nested/py_model.py"), language="python", @@ -984,6 +1089,7 @@ def test_python_model_parse(self): RefArgs("test2"), RefArgs("test3"), RefArgs("test4"), + RefArgs("test5"), ], sources=[["test", "table1"]], ) @@ -1000,6 +1106,14 @@ def test_python_model_config(self): node = list(self.parser.manifest.nodes.values())[0] self.assertEqual(node.config.to_dict()["config_keys_used"], ["param_1", "param_2"]) + def test_python_model_f_string_config(self): + block = self.file_block_for(python_model_f_string, "nested/py_model.py") + self.parser.manifest.files[block.file.file_id] = block.file + + self.parser.parse_file(block) + node = list(self.parser.manifest.nodes.values())[0] + self.assertEqual(node.config.to_dict()["config_keys_used"], ["my_var"]) + def test_python_model_config_with_defaults(self): block = self.file_block_for(python_model_config_with_defaults, "nested/py_model.py") self.parser.manifest.files[block.file.file_id] = block.file @@ -1123,7 +1237,7 @@ def test_built_in_macro_override_detection(self): fqn=["snowplow", "nested", "model_1"], package_name="snowplow", original_file_path=normalize("models/nested/model_1.sql"), - config=NodeConfig(materialized="table"), + config=ModelConfig(materialized="table"), path=normalize("nested/model_1.sql"), language="sql", raw_code=raw_code, @@ -1159,7 +1273,7 @@ def setUp(self): fqn=["snowplow", "nested", "model_1"], package_name="snowplow", original_file_path=normalize("models/nested/model_1.sql"), - config=NodeConfig(materialized="table"), + config=ModelConfig(materialized="table"), path=normalize("nested/model_1.sql"), language="sql", raw_code='{{ config(materialized="table") }}select 1 as id', diff --git a/tests/unit/parser/test_partial.py b/tests/unit/parser/test_partial.py new file mode 100644 index 00000000000..b3ad25498a5 --- /dev/null +++ b/tests/unit/parser/test_partial.py @@ -0,0 +1,211 @@ +import time +from copy import deepcopy +from typing import Dict, List + +import pytest + +from dbt.contracts.files import ( + BaseSourceFile, + FileHash, + FilePath, + ParseFileType, + SchemaSourceFile, + SourceFile, +) +from dbt.node_types import NodeType +from dbt.parser.partial import PartialParsing +from dbt.tests.util import safe_set_invocation_context +from tests.unit.utils import normalize +from tests.unit.utils.manifest import make_generic_test, make_model + +PROJECT_NAME = "my_test" + + +@pytest.fixture +def files() -> Dict[str, BaseSourceFile]: + project_root = "/users/root" + sql_model_file = SourceFile( + path=FilePath( + project_root=project_root, + searched_path="models", + relative_path="my_model.sql", + modification_time=time.time(), + ), + checksum=FileHash.from_contents("abcdef"), + project_name=PROJECT_NAME, + parse_file_type=ParseFileType.Model, + nodes=["model.my_test.my_model"], + env_vars=[], + ) + sql_model_file_untouched = SourceFile( + path=FilePath( + project_root=project_root, + searched_path="models", + relative_path="my_model_untouched.sql", + modification_time=time.time(), + ), + checksum=FileHash.from_contents("abcdef"), + project_name=PROJECT_NAME, + parse_file_type=ParseFileType.Model, + nodes=["model.my_test.my_model_untouched"], + env_vars=[], + ) + + python_model_file = SourceFile( + path=FilePath( + project_root=project_root, + searched_path="models", + relative_path="python_model.py", + modification_time=time.time(), + ), + checksum=FileHash.from_contents("lalala"), + project_name=PROJECT_NAME, + parse_file_type=ParseFileType.Model, + nodes=["model.my_test.python_model"], + env_vars=[], + ) + python_model_file_untouched = SourceFile( + path=FilePath( + project_root=project_root, + searched_path="models", + relative_path="python_model_untouched.py", + modification_time=time.time(), + ), + checksum=FileHash.from_contents("lalala"), + project_name=PROJECT_NAME, + parse_file_type=ParseFileType.Model, + nodes=["model.my_test.python_model_untouched"], + env_vars=[], + ) + schema_file = SchemaSourceFile( + path=FilePath( + project_root=project_root, + searched_path="models", + relative_path="schema.yml", + modification_time=time.time(), + ), + checksum=FileHash.from_contents("ghijkl"), + project_name=PROJECT_NAME, + parse_file_type=ParseFileType.Schema, + dfy={ + "version": 2, + "models": [ + {"name": "my_model", "description": "Test model"}, + {"name": "python_model", "description": "python"}, + {"name": "not_null", "model": "test.my_test.test_my_model"}, + ], + }, + ndp=["model.my_test.my_model"], + env_vars={}, + data_tests={"models": {"not_null": {"test.my_test.test_my_model": []}}}, + ) + return { + schema_file.file_id: schema_file, + sql_model_file.file_id: sql_model_file, + sql_model_file_untouched.file_id: sql_model_file_untouched, + python_model_file.file_id: python_model_file, + python_model_file_untouched.file_id: python_model_file_untouched, + } + + +@pytest.fixture +def nodes() -> List[NodeType]: + patch_path = "my_test://" + normalize("models/schema.yml") + my_model = make_model(PROJECT_NAME, "my_model", "", patch_path=patch_path) + return [ + my_model, + make_model(PROJECT_NAME, "my_model_untouched", "", patch_path=patch_path), + make_model(PROJECT_NAME, "python_model", "", language="python", patch_path=patch_path), + make_model( + PROJECT_NAME, "python_model_untouched", "", language="python", patch_path=patch_path + ), + make_generic_test(PROJECT_NAME, "test", my_model, {}), + ] + + +@pytest.fixture +def partial_parsing(manifest, files): + safe_set_invocation_context() + return PartialParsing(manifest, deepcopy(files)) + + +def test_simple(partial_parsing, files, nodes): + # Nothing has changed + assert partial_parsing is not None + assert partial_parsing.skip_parsing() is True + + # Change a model file + sql_model_file_id = "my_test://" + normalize("models/my_model.sql") + partial_parsing.new_files[sql_model_file_id].checksum = FileHash.from_contents("xyzabc") + + python_model_file_id = "my_test://" + normalize("models/python_model.py") + partial_parsing.new_files[python_model_file_id].checksum = FileHash.from_contents("ohohoh") + + partial_parsing.build_file_diff() + assert partial_parsing.skip_parsing() is False + pp_files = partial_parsing.get_parsing_files() + pp_files["my_test"]["ModelParser"] = set(pp_files["my_test"]["ModelParser"]) + # models has 'patch_path' so we expect to see a SchemaParser file listed + schema_file_id = "my_test://" + normalize("models/schema.yml") + expected_pp_files = { + "my_test": { + "ModelParser": set([sql_model_file_id, python_model_file_id]), + "SchemaParser": [schema_file_id], + } + } + assert pp_files == expected_pp_files + schema_file = files[schema_file_id] + schema_file_model_names = set([model["name"] for model in schema_file.pp_dict["models"]]) + expected_model_names = set(["python_model", "my_model"]) + assert schema_file_model_names == expected_model_names + schema_file_model_descriptions = set( + [model["description"] for model in schema_file.pp_dict["models"]] + ) + expected_model_descriptions = set(["Test model", "python"]) + assert schema_file_model_descriptions == expected_model_descriptions + + +def test_schedule_nodes_for_parsing_basic(partial_parsing, nodes): + assert partial_parsing.file_diff["deleted"] == [] + assert partial_parsing.project_parser_files == {} + partial_parsing.schedule_nodes_for_parsing([nodes[0].unique_id]) + assert partial_parsing.project_parser_files == { + "my_test": { + "ModelParser": ["my_test://models/my_model.sql"], + "SchemaParser": ["my_test://models/schema.yml"], + } + } + + +def test_schedule_macro_nodes_for_parsing_basic(partial_parsing): + # XXX it seems kind of confusing what exactly this function does. + # Whoever Changes this function please add more comment. + + # this rely on the dfy and data_tests fields in schema node to add schema file to reparse + partial_parsing.schedule_macro_nodes_for_parsing(["test.my_test.test_my_model"]) + assert partial_parsing.project_parser_files == { + "my_test": {"SchemaParser": ["my_test://models/schema.yml"]} + } + + +class TestFileDiff: + @pytest.fixture + def partial_parsing(self, manifest, files): + safe_set_invocation_context() + saved_files = deepcopy(files) + saved_files["my_test://models/python_model_untouched.py"].checksum = ( + FileHash.from_contents("something new") + ) + return PartialParsing(manifest, saved_files) + + def test_build_file_diff_basic(self, partial_parsing): + partial_parsing.build_file_diff() + assert set(partial_parsing.file_diff["unchanged"]) == { + "my_test://models/my_model_untouched.sql", + "my_test://models/my_model.sql", + "my_test://models/schema.yml", + "my_test://models/python_model.py", + } + assert partial_parsing.file_diff["changed"] == [ + "my_test://models/python_model_untouched.py" + ] diff --git a/tests/unit/test_yaml_renderer.py b/tests/unit/parser/test_schema_renderer.py similarity index 100% rename from tests/unit/test_yaml_renderer.py rename to tests/unit/parser/test_schema_renderer.py diff --git a/tests/unit/parser/test_unit_tests.py b/tests/unit/parser/test_unit_tests.py new file mode 100644 index 00000000000..4e6554aeca7 --- /dev/null +++ b/tests/unit/parser/test_unit_tests.py @@ -0,0 +1,295 @@ +from unittest import mock + +from dbt.artifacts.resources import DependsOn, UnitTestConfig, UnitTestFormat +from dbt.contracts.graph.nodes import NodeType, UnitTestDefinition +from dbt.contracts.graph.unparsed import UnitTestOutputFixture +from dbt.parser import SchemaParser +from dbt.parser.unit_tests import UnitTestParser +from dbt_common.events.event_manager_client import add_callback_to_manager +from dbt_common.events.types import SystemStdErr +from tests.unit.parser.test_parser import SchemaParserTest, assertEqualNodes +from tests.unit.utils import MockNode +from tests.utils import EventCatcher + +UNIT_TEST_MODEL_NOT_FOUND_SOURCE = """ +unit_tests: + - name: test_my_model_doesnt_exist + model: my_model_doesnt_exist + description: "unit test description" + given: [] + expect: + rows: + - {a: 1} +""" + + +UNIT_TEST_SOURCE = """ +unit_tests: + - name: test_my_model + model: my_model + description: "unit test description" + given: [] + expect: + rows: + - {a: 1} +""" + + +UNIT_TEST_VERSIONED_MODEL_SOURCE = """ +unit_tests: + - name: test_my_model_versioned + model: my_model_versioned.v1 + description: "unit test description" + given: [] + expect: + rows: + - {a: 1} +""" + + +UNIT_TEST_CONFIG_SOURCE = """ +unit_tests: + - name: test_my_model + model: my_model + config: + tags: "schema_tag" + meta: + meta_key: meta_value + meta_jinja_key: '{{ 1 + 1 }}' + description: "unit test description" + given: [] + expect: + rows: + - {a: 1} +""" + + +UNIT_TEST_MULTIPLE_SOURCE = """ +unit_tests: + - name: test_my_model + model: my_model + description: "unit test description" + given: [] + expect: + rows: + - {a: 1} + - name: test_my_model2 + model: my_model + description: "unit test description" + given: [] + expect: + rows: + - {a: 1} +""" + +UNIT_TEST_NONE_ROWS_SORT = """ +unit_tests: + - name: test_my_model_null_handling + model: my_model + description: "unit test description" + given: [] + expect: + rows: + - {"id": , "col1": "d"} + - {"id": , "col1": "e"} + - {"id": 6, "col1": "f"} +""" + +UNIT_TEST_NONE_ROWS_SORT_CSV = """ +unit_tests: + - name: test_my_model_null_handling + model: my_model + description: "unit test description" + given: [] + expect: + format: csv + rows: | + id,col1 + ,d + ,e + 6,f +""" + +UNIT_TEST_NONE_ROWS_SORT_SQL = """ +unit_tests: + - name: test_my_model_null_handling + model: my_model + description: "unit test description" + given: [] + expect: + format: sql + rows: | + select null + select 1 +""" + +UNIT_TEST_NONE_ROWS_SORT_FAILS = """ +unit_tests: + - name: test_my_model_null_handling + model: my_model + description: "this unit test needs one non-None value row" + given: [] + expect: + rows: + - {"id": , "col1": "d"} + - {"id": , "col1": "e"} +""" + + +class UnitTestParserTest(SchemaParserTest): + def setUp(self): + super().setUp() + my_model_node = MockNode( + package="snowplow", + name="my_model", + config=mock.MagicMock(enabled=True), + schema="test_schema", + refs=[], + sources=[], + patch_path=None, + ) + self.manifest.nodes = {my_model_node.unique_id: my_model_node} + self.parser = SchemaParser( + project=self.snowplow_project_config, + manifest=self.manifest, + root_project=self.root_project_config, + ) + + def file_block_for(self, data, filename): + return super().file_block_for(data, filename, "unit_tests") + + def test_basic(self): + block = self.yaml_block_for(UNIT_TEST_SOURCE, "test_my_model.yml") + + UnitTestParser(self.parser, block).parse() + + self.assert_has_manifest_lengths(self.parser.manifest, nodes=1, unit_tests=1) + unit_test = list(self.parser.manifest.unit_tests.values())[0] + expected = UnitTestDefinition( + name="test_my_model", + model="my_model", + resource_type=NodeType.Unit, + package_name="snowplow", + path=block.path.relative_path, + original_file_path=block.path.original_file_path, + unique_id="unit_test.snowplow.my_model.test_my_model", + given=[], + expect=UnitTestOutputFixture(rows=[{"a": 1}]), + description="unit test description", + overrides=None, + depends_on=DependsOn(nodes=["model.snowplow.my_model"]), + fqn=["snowplow", "my_model", "test_my_model"], + config=UnitTestConfig(), + schema="test_schema", + ) + expected.build_unit_test_checksum() + assertEqualNodes(unit_test, expected) + + def test_unit_test_config(self): + block = self.yaml_block_for(UNIT_TEST_CONFIG_SOURCE, "test_my_model.yml") + self.root_project_config.unit_tests = { + "snowplow": {"my_model": {"+tags": ["project_tag"]}} + } + + UnitTestParser(self.parser, block).parse() + + self.assert_has_manifest_lengths(self.parser.manifest, nodes=1, unit_tests=1) + unit_test = self.parser.manifest.unit_tests["unit_test.snowplow.my_model.test_my_model"] + self.assertEqual(sorted(unit_test.config.tags), sorted(["schema_tag", "project_tag"])) + self.assertEqual(unit_test.config.meta, {"meta_key": "meta_value", "meta_jinja_key": "2"}) + + def test_unit_test_versioned_model(self): + block = self.yaml_block_for(UNIT_TEST_VERSIONED_MODEL_SOURCE, "test_my_model.yml") + my_model_versioned_node = MockNode( + package="snowplow", + name="my_model_versioned", + config=mock.MagicMock(enabled=True), + refs=[], + sources=[], + patch_path=None, + version=1, + ) + self.manifest.nodes[my_model_versioned_node.unique_id] = my_model_versioned_node + + UnitTestParser(self.parser, block).parse() + + self.assert_has_manifest_lengths(self.parser.manifest, nodes=2, unit_tests=1) + unit_test = self.parser.manifest.unit_tests[ + "unit_test.snowplow.my_model_versioned.v1.test_my_model_versioned" + ] + self.assertEqual(len(unit_test.depends_on.nodes), 1) + self.assertEqual(unit_test.depends_on.nodes[0], "model.snowplow.my_model_versioned.v1") + + def test_multiple_unit_tests(self): + block = self.yaml_block_for(UNIT_TEST_MULTIPLE_SOURCE, "test_my_model.yml") + + UnitTestParser(self.parser, block).parse() + + self.assert_has_manifest_lengths(self.parser.manifest, nodes=1, unit_tests=2) + for unit_test in self.parser.manifest.unit_tests.values(): + self.assertEqual(len(unit_test.depends_on.nodes), 1) + self.assertEqual(unit_test.depends_on.nodes[0], "model.snowplow.my_model") + + def _assert_fixture_yml_reorders_to_expected_rows( + self, unit_test_fixture_yml, fixture_expected_field_format, expected_rows + ): + block = self.yaml_block_for(unit_test_fixture_yml, "test_my_model.yml") + + UnitTestParser(self.parser, block).parse() + + self.assert_has_manifest_lengths(self.parser.manifest, nodes=1, unit_tests=1) + unit_test = list(self.parser.manifest.unit_tests.values())[0] + expected = UnitTestDefinition( + name="test_my_model_null_handling", + model="my_model", + resource_type=NodeType.Unit, + package_name="snowplow", + path=block.path.relative_path, + original_file_path=block.path.original_file_path, + unique_id="unit_test.snowplow.my_model.test_my_model_null_handling", + given=[], + expect=UnitTestOutputFixture(format=fixture_expected_field_format, rows=expected_rows), + description="unit test description", + overrides=None, + depends_on=DependsOn(nodes=["model.snowplow.my_model"]), + fqn=["snowplow", "my_model", "test_my_model_null_handling"], + config=UnitTestConfig(), + schema="test_schema", + ) + expected.build_unit_test_checksum() + assertEqualNodes(unit_test, expected) + + def test_expected_promote_non_none_row_dct(self): + expected_rows = [ + {"id": 6, "col1": "f"}, + {"id": None, "col1": "e"}, + {"id": None, "col1": "d"}, + ] + self._assert_fixture_yml_reorders_to_expected_rows( + UNIT_TEST_NONE_ROWS_SORT, UnitTestFormat.Dict, expected_rows + ) + + def test_expected_promote_non_none_row_csv(self): + expected_rows = [ + {"id": "6", "col1": "f"}, + {"id": None, "col1": "e"}, + {"id": None, "col1": "d"}, + ] + self._assert_fixture_yml_reorders_to_expected_rows( + UNIT_TEST_NONE_ROWS_SORT_CSV, UnitTestFormat.CSV, expected_rows + ) + + def test_expected_promote_non_none_row_sql(self): + expected_rows = "select null\n" + "select 1" + self._assert_fixture_yml_reorders_to_expected_rows( + UNIT_TEST_NONE_ROWS_SORT_SQL, UnitTestFormat.SQL, expected_rows + ) + + def test_no_full_row_does_not_raise_exception(self): + catcher = EventCatcher(SystemStdErr) + add_callback_to_manager(catcher.catch) + + block = self.yaml_block_for(UNIT_TEST_NONE_ROWS_SORT_FAILS, "test_my_model.yml") + UnitTestParser(self.parser, block).parse() + + assert len(catcher.caught_events) == 1 diff --git a/tests/unit/test_plugin_manager.py b/tests/unit/plugins/test_manager.py similarity index 79% rename from tests/unit/test_plugin_manager.py rename to tests/unit/plugins/test_manager.py index fc0f1c339ca..b67677879be 100644 --- a/tests/unit/test_plugin_manager.py +++ b/tests/unit/plugins/test_manager.py @@ -1,10 +1,12 @@ +from unittest import mock + import pytest from dbt.exceptions import DbtRuntimeError -from dbt.plugins import PluginManager, dbtPlugin, dbt_hook -from dbt.plugins.manifest import PluginNodes, ModelNodeArgs -from dbt.plugins.contracts import PluginArtifacts, PluginArtifact +from dbt.plugins import PluginManager, dbt_hook, dbtPlugin +from dbt.plugins.contracts import PluginArtifact, PluginArtifacts from dbt.plugins.exceptions import dbtPluginError +from dbt.plugins.manifest import ModelNodeArgs, PluginNodes class ExceptionInitializePlugin(dbtPlugin): @@ -92,11 +94,34 @@ def test_plugin_manager_init_multiple_hooks(self, get_nodes_plugin, get_artifact assert len(pm.hooks["get_manifest_artifacts"]) == 1 assert pm.hooks["get_manifest_artifacts"][0] == get_artifacts_plugin.get_manifest_artifacts - def test_get_nodes(self, get_nodes_plugins): + @mock.patch("dbt.tracking") + def test_get_nodes(self, tracking, get_nodes_plugins): + tracking.active_user = mock.Mock() pm = PluginManager(plugins=get_nodes_plugins) + nodes = pm.get_nodes() + assert len(nodes.models) == 2 + expected_calls = [ + mock.call( + { + "plugin_name": get_nodes_plugins[0].name, + "num_model_nodes": 1, + "num_model_packages": 1, + } + ), + mock.call( + { + "plugin_name": get_nodes_plugins[1].name, + "num_model_nodes": 1, + "num_model_packages": 1, + } + ), + ] + + tracking.track_plugin_get_nodes.assert_has_calls(expected_calls) + def test_get_manifest_artifact(self, get_artifacts_plugins): pm = PluginManager(plugins=get_artifacts_plugins) artifacts = pm.get_manifest_artifacts(None) diff --git a/tests/unit/task/__init__.py b/tests/unit/task/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/unit/task/docs/__init__.py b/tests/unit/task/docs/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/tests/unit/task/docs/test_serve.py b/tests/unit/task/docs/test_serve.py new file mode 100644 index 00000000000..5a832341201 --- /dev/null +++ b/tests/unit/task/docs/test_serve.py @@ -0,0 +1,34 @@ +from http.server import SimpleHTTPRequestHandler +from unittest.mock import MagicMock, patch + +import pytest + +from dbt.task.docs.serve import ServeTask + + +@pytest.fixture +def serve_task(): + # Set up + task = ServeTask(config=MagicMock(), args=MagicMock()) + task.config.project_target_path = "." + task.args.port = 8000 + task.args.host = "127.0.0.1" + return task + + +def test_serve_bind_to_127(serve_task): + serve_task.args.browser = False + with patch("dbt.task.docs.serve.socketserver.TCPServer") as patched_TCPServer: + patched_TCPServer.return_value = MagicMock() + serve_task.run() + patched_TCPServer.assert_called_once_with(("127.0.0.1", 8000), SimpleHTTPRequestHandler) + + +def test_serve_bind_to_all(serve_task): + serve_task.args.browser = False + serve_task.args.host = "" + + with patch("dbt.task.docs.serve.socketserver.TCPServer") as patched_TCPServer: + patched_TCPServer.return_value = MagicMock() + serve_task.run() + patched_TCPServer.assert_called_once_with(("", 8000), SimpleHTTPRequestHandler) diff --git a/tests/unit/task/test_base.py b/tests/unit/task/test_base.py new file mode 100644 index 00000000000..c28d59f0df7 --- /dev/null +++ b/tests/unit/task/test_base.py @@ -0,0 +1,54 @@ +import os + +import dbt_common.exceptions +from dbt.contracts.graph.nodes import SourceDefinition +from dbt.task.base import BaseRunner, ConfiguredTask +from tests.unit.config import BaseConfigTest + +INITIAL_ROOT = os.getcwd() + + +class MockRunner(BaseRunner): + def compile(self): + pass + + +class TestBaseRunner: + def test_handle_generic_exception_handles_nodes_without_build_path( + self, basic_parsed_source_definition_object: SourceDefinition + ): + # Source definition nodes don't have `build_path` attributes. Thus, this + # test will fail if _handle_generic_exception doesn't account for this + runner = MockRunner( + config=None, + adapter=None, + node=basic_parsed_source_definition_object, + node_index=None, + num_nodes=None, + ) + assert not hasattr(basic_parsed_source_definition_object, "build_path") + runner._handle_generic_exception(Exception("bad thing happened"), ctx=None) + + +class InheritsFromConfiguredTask(ConfiguredTask): + def run(self): + pass + + +class TestConfiguredTask(BaseConfigTest): + def tearDown(self): + super().tearDown() + # These tests will change the directory to the project path, + # so it's necessary to change it back at the end. + os.chdir(INITIAL_ROOT) + + def test_configured_task_dir_change(self): + self.assertEqual(os.getcwd(), INITIAL_ROOT) + self.assertNotEqual(INITIAL_ROOT, self.project_dir) + InheritsFromConfiguredTask.from_args(self.args) + self.assertEqual(os.path.realpath(os.getcwd()), os.path.realpath(self.project_dir)) + + def test_configured_task_dir_change_with_bad_path(self): + self.args.project_dir = "bad_path" + with self.assertRaises(dbt_common.exceptions.DbtRuntimeError): + InheritsFromConfiguredTask.from_args(self.args) diff --git a/tests/unit/task/test_build.py b/tests/unit/task/test_build.py new file mode 100644 index 00000000000..87d7b081ae8 --- /dev/null +++ b/tests/unit/task/test_build.py @@ -0,0 +1,14 @@ +from dbt.contracts.graph.nodes import SavedQuery +from dbt.task.build import SavedQueryRunner + + +def test_saved_query_runner_on_skip(saved_query: SavedQuery): + runner = SavedQueryRunner( + config=None, + adapter=None, + node=saved_query, + node_index=None, + num_nodes=None, + ) + # on_skip would work + runner.on_skip() diff --git a/tests/unit/task/test_clone.py b/tests/unit/task/test_clone.py new file mode 100644 index 00000000000..9ef07d5e492 --- /dev/null +++ b/tests/unit/task/test_clone.py @@ -0,0 +1,16 @@ +from unittest.mock import MagicMock, patch + +from dbt.flags import get_flags +from dbt.task.clone import CloneTask + + +def test_clone_task_not_preserve_edges(): + mock_node_selector = MagicMock() + mock_spec = MagicMock() + with patch.object( + CloneTask, "get_node_selector", return_value=mock_node_selector + ), patch.object(CloneTask, "get_selection_spec", return_value=mock_spec): + task = CloneTask(get_flags(), None, None) + task.get_graph_queue() + # when we get the graph queue, preserve_edges is False + mock_node_selector.get_graph_queue.assert_called_with(mock_spec, False) diff --git a/tests/unit/test_docs_generate.py b/tests/unit/task/test_docs.py similarity index 99% rename from tests/unit/test_docs_generate.py rename to tests/unit/task/test_docs.py index 8daf767e96b..171b081c781 100644 --- a/tests/unit/test_docs_generate.py +++ b/tests/unit/task/test_docs.py @@ -1,15 +1,15 @@ +import unittest from decimal import Decimal from unittest import mock -import unittest -from dbt.task import generate +from dbt.task.docs import generate class GenerateTest(unittest.TestCase): def setUp(self): self.maxDiff = None self.manifest = mock.MagicMock() - self.patcher = mock.patch("dbt.task.generate.get_unique_id_mapping") + self.patcher = mock.patch("dbt.task.docs.generate.get_unique_id_mapping") self.mock_get_unique_id_mapping = self.patcher.start() def tearDown(self): diff --git a/tests/unit/task/test_freshness.py b/tests/unit/task/test_freshness.py new file mode 100644 index 00000000000..4816ae98f05 --- /dev/null +++ b/tests/unit/task/test_freshness.py @@ -0,0 +1,155 @@ +import datetime +from unittest import mock + +import pytest + +from dbt.task.freshness import FreshnessResponse, FreshnessTask + + +class TestFreshnessTaskMetadataCache: + @pytest.fixture(scope="class") + def args(self): + mock_args = mock.Mock() + mock_args.state = None + mock_args.defer_state = None + mock_args.write_json = None + + return mock_args + + @pytest.fixture(scope="class") + def config(self): + mock_config = mock.Mock() + mock_config.threads = 1 + mock_config.target_name = "mock_config_target_name" + + @pytest.fixture(scope="class") + def manifest(self): + return mock.Mock() + + @pytest.fixture(scope="class") + def source_with_loaded_at_field(self): + mock_source = mock.Mock() + mock_source.unique_id = "source_with_loaded_at_field" + mock_source.loaded_at_field = "loaded_at_field" + return mock_source + + @pytest.fixture(scope="class") + def source_no_loaded_at_field(self): + mock_source = mock.Mock() + mock_source.unique_id = "source_no_loaded_at_field" + return mock_source + + @pytest.fixture(scope="class") + def source_no_loaded_at_field2(self): + mock_source = mock.Mock() + mock_source.unique_id = "source_no_loaded_at_field2" + return mock_source + + @pytest.fixture(scope="class") + def adapter(self): + return mock.Mock() + + @pytest.fixture(scope="class") + def freshness_response(self): + return FreshnessResponse( + max_loaded_at=datetime.datetime(2020, 5, 2), + snapshotted_at=datetime.datetime(2020, 5, 4), + age=2, + ) + + def test_populate_metadata_freshness_cache( + self, args, config, manifest, adapter, source_no_loaded_at_field, freshness_response + ): + manifest.sources = {source_no_loaded_at_field.unique_id: source_no_loaded_at_field} + adapter.Relation.create_from.return_value = "source_relation" + adapter.calculate_freshness_from_metadata_batch.return_value = ( + [], + {"source_relation": freshness_response}, + ) + task = FreshnessTask(args=args, config=config, manifest=manifest) + + task.populate_metadata_freshness_cache(adapter, {source_no_loaded_at_field.unique_id}) + + assert task.get_freshness_metadata_cache() == {"source_relation": freshness_response} + + def test_populate_metadata_freshness_cache_multiple_sources( + self, + args, + config, + manifest, + adapter, + source_no_loaded_at_field, + source_no_loaded_at_field2, + freshness_response, + ): + manifest.sources = { + source_no_loaded_at_field.unique_id: source_no_loaded_at_field, + source_no_loaded_at_field2.unique_id: source_no_loaded_at_field2, + } + adapter.Relation.create_from.side_effect = ["source_relation1", "source_relation2"] + adapter.calculate_freshness_from_metadata_batch.return_value = ( + [], + {"source_relation1": freshness_response, "source_relation2": freshness_response}, + ) + task = FreshnessTask(args=args, config=config, manifest=manifest) + + task.populate_metadata_freshness_cache(adapter, {source_no_loaded_at_field.unique_id}) + + assert task.get_freshness_metadata_cache() == { + "source_relation1": freshness_response, + "source_relation2": freshness_response, + } + + def test_populate_metadata_freshness_cache_with_loaded_at_field( + self, args, config, manifest, adapter, source_with_loaded_at_field, freshness_response + ): + manifest.sources = { + source_with_loaded_at_field.unique_id: source_with_loaded_at_field, + } + adapter.Relation.create_from.return_value = "source_relation" + adapter.calculate_freshness_from_metadata_batch.return_value = ( + [], + {"source_relation": freshness_response}, + ) + task = FreshnessTask(args=args, config=config, manifest=manifest) + + task.populate_metadata_freshness_cache(adapter, {source_with_loaded_at_field.unique_id}) + + assert task.get_freshness_metadata_cache() == {"source_relation": freshness_response} + + def test_populate_metadata_freshness_cache_multiple_sources_mixed( + self, + args, + config, + manifest, + adapter, + source_no_loaded_at_field, + source_with_loaded_at_field, + freshness_response, + ): + manifest.sources = { + source_no_loaded_at_field.unique_id: source_no_loaded_at_field, + source_with_loaded_at_field.unique_id: source_with_loaded_at_field, + } + adapter.Relation.create_from.return_value = "source_relation" + adapter.calculate_freshness_from_metadata_batch.return_value = ( + [], + {"source_relation": freshness_response}, + ) + task = FreshnessTask(args=args, config=config, manifest=manifest) + + task.populate_metadata_freshness_cache(adapter, {source_no_loaded_at_field.unique_id}) + + assert task.get_freshness_metadata_cache() == {"source_relation": freshness_response} + + def test_populate_metadata_freshness_cache_adapter_exception( + self, args, config, manifest, adapter, source_no_loaded_at_field, freshness_response + ): + manifest.sources = {source_no_loaded_at_field.unique_id: source_no_loaded_at_field} + adapter.Relation.create_from.return_value = "source_relation" + adapter.calculate_freshness_from_metadata_batch.side_effect = Exception() + task = FreshnessTask(args=args, config=config, manifest=manifest) + + task.populate_metadata_freshness_cache(adapter, {source_no_loaded_at_field.unique_id}) + + assert task.get_freshness_metadata_cache() == {} diff --git a/tests/unit/task/test_list.py b/tests/unit/task/test_list.py new file mode 100644 index 00000000000..da701fe2fcf --- /dev/null +++ b/tests/unit/task/test_list.py @@ -0,0 +1,22 @@ +from argparse import Namespace +from unittest.mock import patch + +from dbt.flags import get_flags, set_from_args +from dbt.task.list import ListTask +from dbt_common.events.types import PrintEvent + + +def test_list_output_results(): + set_from_args(Namespace(models=None), {}) + task = ListTask(get_flags(), None, None) + results = ["node1", "node2", "node3"] + expected_node_results = ["node1", "node2", "node3"] + + with patch("dbt.task.list.fire_event") as mock_fire_event: + node_results = task.output_results(results) + + assert node_results == expected_node_results + # assert called with PrintEvent type object and message 'node1', 'node2', 'node3' + for call_args in mock_fire_event.call_args_list: + assert isinstance(call_args[0][0], PrintEvent) + assert call_args[0][0].msg in expected_node_results diff --git a/tests/unit/test_retry_commands.py b/tests/unit/task/test_retry.py similarity index 89% rename from tests/unit/test_retry_commands.py rename to tests/unit/task/test_retry.py index 3eb151cb6a3..0e0c8898b4e 100644 --- a/tests/unit/test_retry_commands.py +++ b/tests/unit/task/test_retry.py @@ -1,5 +1,5 @@ from dbt.cli.types import Command -from dbt.task.retry import TASK_DICT, CMD_DICT +from dbt.task.retry import CMD_DICT, TASK_DICT EXCLUDED_COMMANDS = { "clean", diff --git a/tests/unit/task/test_run.py b/tests/unit/task/test_run.py new file mode 100644 index 00000000000..19a88f6aa8f --- /dev/null +++ b/tests/unit/task/test_run.py @@ -0,0 +1,128 @@ +from argparse import Namespace +from unittest.mock import MagicMock, patch + +import pytest + +from dbt.adapters.postgres import PostgresAdapter +from dbt.artifacts.schemas.results import RunStatus +from dbt.artifacts.schemas.run import RunResult +from dbt.config.runtime import RuntimeConfig +from dbt.contracts.graph.manifest import Manifest +from dbt.contracts.graph.nodes import ModelNode +from dbt.events.types import LogModelResult +from dbt.flags import get_flags, set_from_args +from dbt.task.run import ModelRunner, RunTask +from dbt.tests.util import safe_set_invocation_context +from dbt_common.events.base_types import EventLevel +from dbt_common.events.event_manager_client import add_callback_to_manager +from tests.utils import EventCatcher + + +@pytest.mark.parametrize( + "exception_to_raise, expected_cancel_connections", + [ + (SystemExit, True), + (KeyboardInterrupt, True), + (Exception, False), + ], +) +def test_run_task_cancel_connections( + exception_to_raise, expected_cancel_connections, runtime_config: RuntimeConfig +): + safe_set_invocation_context() + + def mock_run_queue(*args, **kwargs): + raise exception_to_raise("Test exception") + + with patch.object(RunTask, "run_queue", mock_run_queue), patch.object( + RunTask, "_cancel_connections" + ) as mock_cancel_connections: + + set_from_args(Namespace(write_json=False), None) + task = RunTask( + get_flags(), + runtime_config, + None, + ) + with pytest.raises(exception_to_raise): + task.execute_nodes() + assert mock_cancel_connections.called == expected_cancel_connections + + +def test_run_task_preserve_edges(): + mock_node_selector = MagicMock() + mock_spec = MagicMock() + with patch.object(RunTask, "get_node_selector", return_value=mock_node_selector), patch.object( + RunTask, "get_selection_spec", return_value=mock_spec + ): + task = RunTask(get_flags(), None, None) + task.get_graph_queue() + # when we get the graph queue, preserve_edges is True + mock_node_selector.get_graph_queue.assert_called_with(mock_spec, True) + + +class TestModelRunner: + @pytest.fixture + def log_model_result_catcher(self) -> EventCatcher: + catcher = EventCatcher(event_to_catch=LogModelResult) + add_callback_to_manager(catcher.catch) + return catcher + + @pytest.fixture + def model_runner( + self, + postgres_adapter: PostgresAdapter, + table_model: ModelNode, + runtime_config: RuntimeConfig, + ) -> ModelRunner: + return ModelRunner( + config=runtime_config, + adapter=postgres_adapter, + node=table_model, + node_index=1, + num_nodes=1, + ) + + @pytest.fixture + def run_result(self, table_model: ModelNode) -> RunResult: + return RunResult( + status=RunStatus.Success, + timing=[], + thread_id="an_id", + execution_time=0, + adapter_response={}, + message="It did it", + failures=None, + node=table_model, + ) + + def test_print_result_line( + self, + log_model_result_catcher: EventCatcher, + model_runner: ModelRunner, + run_result: RunResult, + ) -> None: + # Check `print_result_line` with "successful" RunResult + model_runner.print_result_line(run_result) + assert len(log_model_result_catcher.caught_events) == 1 + assert log_model_result_catcher.caught_events[0].info.level == EventLevel.INFO + assert log_model_result_catcher.caught_events[0].data.status == run_result.message + + # reset event catcher + log_model_result_catcher.flush() + + # Check `print_result_line` with "error" RunResult + run_result.status = RunStatus.Error + model_runner.print_result_line(run_result) + assert len(log_model_result_catcher.caught_events) == 1 + assert log_model_result_catcher.caught_events[0].info.level == EventLevel.ERROR + assert log_model_result_catcher.caught_events[0].data.status == EventLevel.ERROR + + @pytest.mark.skip( + reason="Default and adapter macros aren't being appropriately populated, leading to a runtime error" + ) + def test_execute( + self, table_model: ModelNode, manifest: Manifest, model_runner: ModelRunner + ) -> None: + model_runner.execute(model=table_model, manifest=manifest) + # TODO: Assert that the model was executed diff --git a/tests/unit/task/test_test.py b/tests/unit/task/test_test.py new file mode 100644 index 00000000000..350f80950eb --- /dev/null +++ b/tests/unit/task/test_test.py @@ -0,0 +1,71 @@ +import agate +import pytest + +from dbt.task.test import list_rows_from_table + + +class TestListRowsFromTable: + @pytest.mark.parametrize( + "agate_table_cols,agate_table_rows,expected_list_rows", + [ + (["a", "b", "c"], [], [["a", "b", "c"]]), # no rows + (["a", "b", "c"], [[1, 2, 3]], [["a", "b", "c"], [1, 2, 3]]), # single row, no nulls + ( + ["a", "b", "c"], + [[1, 2, 3], [2, 3, 4]], + [["a", "b", "c"], [1, 2, 3], [2, 3, 4]], + ), # multiple rows + ( + ["a", "b", "c"], + [[None, 2, 3], [2, None, 4]], + [["a", "b", "c"], [None, 2, 3], [2, None, 4]], + ), # multiple rows, with nulls + ], + ) + def test_list_rows_from_table_no_sort( + self, agate_table_cols, agate_table_rows, expected_list_rows + ): + table = agate.Table(rows=agate_table_rows, column_names=agate_table_cols) + + list_rows = list_rows_from_table(table) + assert list_rows == expected_list_rows + + @pytest.mark.parametrize( + "agate_table_cols,agate_table_rows,expected_list_rows", + [ + (["a", "b", "c"], [], [["a", "b", "c"]]), # no rows + (["a", "b", "c"], [[1, 2, 3]], [["a", "b", "c"], [1, 2, 3]]), # single row, no nulls + ( + ["a", "b", "c"], + [[1, 2, 3], [2, 3, 4]], + [["a", "b", "c"], [1, 2, 3], [2, 3, 4]], + ), # multiple rows, in order + ( + ["a", "b", "c"], + [[2, 3, 4], [1, 2, 3]], + [["a", "b", "c"], [1, 2, 3], [2, 3, 4]], + ), # multiple rows, out of order + ( + ["a", "b", "c"], + [[None, 2, 3], [2, 3, 4]], + [["a", "b", "c"], [2, 3, 4], [None, 2, 3]], + ), # multiple rows, out of order with nulls in first position + ( + ["a", "b", "c"], + [[4, 5, 6], [1, None, 3]], + [["a", "b", "c"], [1, None, 3], [4, 5, 6]], + ), # multiple rows, out of order with null in non-first position + ( + ["a", "b", "c"], + [[None, 5, 6], [1, None, 3]], + [["a", "b", "c"], [1, None, 3], [None, 5, 6]], + ), # multiple rows, out of order with nulls in many positions + ], + ) + def test_list_rows_from_table_with_sort( + self, agate_table_cols, agate_table_rows, expected_list_rows + ): + table = agate.Table(rows=agate_table_rows, column_names=agate_table_cols) + + list_rows = list_rows_from_table(table, sort=True) + assert list_rows == expected_list_rows diff --git a/tests/unit/test_adapter_connection_manager.py b/tests/unit/test_adapter_connection_manager.py deleted file mode 100644 index b270f6a5d19..00000000000 --- a/tests/unit/test_adapter_connection_manager.py +++ /dev/null @@ -1,494 +0,0 @@ -import unittest -from unittest import mock -import sys - -import dbt.exceptions - -import psycopg2 - -from dbt.contracts.connection import Connection -from dbt.adapters.base import BaseConnectionManager -from dbt.adapters.postgres import PostgresCredentials, PostgresConnectionManager -from dbt.events import AdapterLogger - - -class BaseConnectionManagerTest(unittest.TestCase): - def setUp(self): - self.postgres_credentials = PostgresCredentials( - host="localhost", - user="test-user", - port=1111, - password="test-password", - database="test-db", - schema="test-schema", - ) - self.logger = AdapterLogger("test") - self.postgres_connection = Connection("postgres", None, self.postgres_credentials) - - def test_retry_connection(self): - """Test a dummy handle is set on a connection on the first attempt. - - This test uses a Connection populated with test PostgresCredentials values, and - expects the Connection.handle attribute to be set to True and it's state to - "open", after calling retry_connection. - - Moreover, the attribute should be set in the first attempt as no exception would - be raised for retrying. A mock connect function is used to simulate a real connection - passing on the first attempt. - """ - conn = self.postgres_connection - attempts = 0 - - def connect(): - nonlocal attempts - attempts += 1 - return True - - conn = BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retryable_exceptions=[], - ) - - assert conn.state == "open" - assert conn.handle is True - assert attempts == 1 - - def test_retry_connection_fails_unhandled(self): - """Test setting a handle fails upon raising a non-handled exception. - - This test uses a Connection populated with test PostgresCredentials values, and - expects a ValueError to be raised by a mock connect function. As a - result: - * The Connection state should be "fail" and the handle None. - * The resulting attempt count should be 1 as we are not explicitly configured to handle a - ValueError. - * retry_connection should raise a FailedToConnectError with the Exception message. - """ - conn = self.postgres_connection - attempts = 0 - - def connect(): - nonlocal attempts - attempts += 1 - raise ValueError("Something went horribly wrong") - - with self.assertRaisesRegex( - dbt.exceptions.FailedToConnectError, "Something went horribly wrong" - ): - - BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retry_limit=1, - retry_timeout=lambda attempt: 0, - retryable_exceptions=(TypeError,), - ) - - assert conn.state == "fail" - assert conn.handle is None - assert attempts == 1 - - def test_retry_connection_fails_handled(self): - """Test setting a handle fails upon raising a handled exception. - - This test uses a Connection populated with test PostgresCredentials values, and - expects a ValueError to be raised by a mock connect function. - As a result: - * The Connection state should be "fail" and the handle None. - * The resulting attempt count should be 2 as we are configured to handle a ValueError. - * retry_connection should raise a FailedToConnectError with the Exception message. - """ - conn = self.postgres_connection - attempts = 0 - - def connect(): - nonlocal attempts - attempts += 1 - raise ValueError("Something went horribly wrong") - - with self.assertRaisesRegex( - dbt.exceptions.FailedToConnectError, "Something went horribly wrong" - ): - - BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retry_timeout=0, - retryable_exceptions=(ValueError,), - retry_limit=1, - ) - - assert conn.state == "fail" - assert conn.handle is None - - def test_retry_connection_passes_handled(self): - """Test setting a handle fails upon raising a handled exception. - - This test uses a Connection populated with test PostgresCredentials values, and - expects a ValueError to be raised by a mock connect function only the first - time is called. Upon handling the exception once, connect should return. - As a result: - * The Connection state should be "open" and the handle True. - * The resulting attempt count should be 2 as we are configured to handle a ValueError. - """ - conn = self.postgres_connection - is_handled = False - attempts = 0 - - def connect(): - nonlocal is_handled - nonlocal attempts - - attempts += 1 - - if is_handled: - return True - - is_handled = True - raise ValueError("Something went horribly wrong") - - conn = BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retry_timeout=0, - retryable_exceptions=(ValueError,), - retry_limit=1, - ) - - assert conn.state == "open" - assert conn.handle is True - assert is_handled is True - assert attempts == 2 - - def test_retry_connection_attempts(self): - """Test setting a handle fails upon raising a handled exception multiple times. - - This test uses a Connection populated with test PostgresCredentials values, and - expects a ValueError to be raised by a mock connect function. As a result: - * The Connection state should be "fail" and the handle None, as connect - never returns. - * The resulting attempt count should be 11 as we are configured to handle a ValueError. - * retry_connection should raise a FailedToConnectError with the Exception message. - """ - conn = self.postgres_connection - attempts = 0 - - def connect(): - nonlocal attempts - attempts += 1 - - raise ValueError("Something went horribly wrong") - - with self.assertRaisesRegex( - dbt.exceptions.FailedToConnectError, "Something went horribly wrong" - ): - BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retry_timeout=0, - retryable_exceptions=(ValueError,), - retry_limit=10, - ) - - assert conn.state == "fail" - assert conn.handle is None - assert attempts == 11 - - def test_retry_connection_fails_handling_all_exceptions(self): - """Test setting a handle fails after exhausting all attempts. - - This test uses a Connection populated with test PostgresCredentials values, and - expects a TypeError to be raised by a mock connect function. As a result: - * The Connection state should be "fail" and the handle None, as connect - never returns. - * The resulting attempt count should be 11 as we are configured to handle all Exceptions. - * retry_connection should raise a FailedToConnectError with the Exception message. - """ - conn = self.postgres_connection - attempts = 0 - - def connect(): - nonlocal attempts - attempts += 1 - - raise TypeError("An unhandled thing went horribly wrong") - - with self.assertRaisesRegex( - dbt.exceptions.FailedToConnectError, "An unhandled thing went horribly wrong" - ): - BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retry_timeout=0, - retryable_exceptions=[Exception], - retry_limit=15, - ) - - assert conn.state == "fail" - assert conn.handle is None - assert attempts == 16 - - def test_retry_connection_passes_multiple_handled(self): - """Test setting a handle passes upon handling multiple exceptions. - - This test uses a Connection populated with test PostgresCredentials values, and - expects a mock connect to raise a ValueError in the first invocation and a - TypeError in the second invocation. As a result: - * The Connection state should be "open" and the handle True, as connect - returns after both exceptions have been handled. - * The resulting attempt count should be 3. - """ - conn = self.postgres_connection - is_value_err_handled = False - is_type_err_handled = False - attempts = 0 - - def connect(): - nonlocal is_value_err_handled - nonlocal is_type_err_handled - nonlocal attempts - - attempts += 1 - - if is_value_err_handled and is_type_err_handled: - return True - elif is_type_err_handled: - is_value_err_handled = True - raise ValueError("Something went horribly wrong") - else: - is_type_err_handled = True - raise TypeError("An unhandled thing went horribly wrong") - - conn = BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retry_timeout=0, - retryable_exceptions=(ValueError, TypeError), - retry_limit=2, - ) - - assert conn.state == "open" - assert conn.handle is True - assert is_type_err_handled is True - assert is_value_err_handled is True - assert attempts == 3 - - def test_retry_connection_passes_none_excluded(self): - """Test setting a handle passes upon handling multiple exceptions. - - This test uses a Connection populated with test PostgresCredentials values, and - expects a mock connect to raise a ValueError in the first invocation and a - TypeError in the second invocation. As a result: - * The Connection state should be "open" and the handle True, as connect - returns after both exceptions have been handled. - * The resulting attempt count should be 3. - """ - conn = self.postgres_connection - is_value_err_handled = False - is_type_err_handled = False - attempts = 0 - - def connect(): - nonlocal is_value_err_handled - nonlocal is_type_err_handled - nonlocal attempts - - attempts += 1 - - if is_value_err_handled and is_type_err_handled: - return True - elif is_type_err_handled: - is_value_err_handled = True - raise ValueError("Something went horribly wrong") - else: - is_type_err_handled = True - raise TypeError("An unhandled thing went horribly wrong") - - conn = BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retry_timeout=0, - retryable_exceptions=(ValueError, TypeError), - retry_limit=2, - ) - - assert conn.state == "open" - assert conn.handle is True - assert is_type_err_handled is True - assert is_value_err_handled is True - assert attempts == 3 - - def test_retry_connection_retry_limit(self): - """Test retry_connection raises an exception with a negative retry limit.""" - conn = self.postgres_connection - attempts = 0 - - def connect(): - nonlocal attempts - attempts += 1 - return True - - with self.assertRaisesRegex( - dbt.exceptions.FailedToConnectError, "retry_limit cannot be negative" - ): - BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retry_timeout=0, - retryable_exceptions=(ValueError,), - retry_limit=-2, - ) - - assert conn.state == "fail" - assert conn.handle is None - assert attempts == 0 - - def test_retry_connection_retry_timeout(self): - """Test retry_connection raises an exception with a negative timeout.""" - conn = self.postgres_connection - attempts = 0 - - def connect(): - nonlocal attempts - attempts += 1 - return True - - for retry_timeout in [-10, -2.5, lambda _: -100, lambda _: -10.1]: - with self.assertRaisesRegex( - dbt.exceptions.FailedToConnectError, - "retry_timeout cannot be negative or return a negative time", - ): - BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retry_timeout=-10, - retryable_exceptions=(ValueError,), - retry_limit=2, - ) - - assert conn.state == "init" - assert conn.handle is None - assert attempts == 0 - - def test_retry_connection_exceeds_recursion_limit(self): - """Test retry_connection raises an exception with retries that exceed recursion limit.""" - conn = self.postgres_connection - attempts = 0 - - def connect(): - nonlocal attempts - attempts += 1 - return True - - with self.assertRaisesRegex( - dbt.exceptions.FailedToConnectError, - "retry_limit cannot be negative", - ): - BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retry_timeout=2, - retryable_exceptions=(ValueError,), - retry_limit=sys.getrecursionlimit() + 1, - ) - - assert conn.state == "fail" - assert conn.handle is None - assert attempts == 0 - - def test_retry_connection_with_exponential_backoff_timeout(self): - """Test retry_connection with an exponential backoff timeout. - - We assert the provided exponential backoff function gets passed the right attempt number - and produces the expected timeouts. - """ - conn = self.postgres_connection - attempts = 0 - timeouts = [] - - def connect(): - nonlocal attempts - attempts += 1 - - if attempts < 12: - raise ValueError("Keep trying!") - return True - - def exp_backoff(n): - nonlocal timeouts - computed = 2**n - # We store the computed values to ensure they match the expected backoff... - timeouts.append((n, computed)) - # but we return 0 as we don't want the test to go on forever. - return 0 - - conn = BaseConnectionManager.retry_connection( - conn, - connect, - self.logger, - retry_timeout=exp_backoff, - retryable_exceptions=(ValueError,), - retry_limit=12, - ) - - assert conn.state == "open" - assert conn.handle is True - assert attempts == 12 - assert timeouts == [(n, 2**n) for n in range(12)] - - -class PostgresConnectionManagerTest(unittest.TestCase): - def setUp(self): - self.credentials = PostgresCredentials( - host="localhost", - user="test-user", - port=1111, - password="test-password", - database="test-db", - schema="test-schema", - retries=2, - ) - self.connection = Connection("postgres", None, self.credentials) - - def test_open(self): - """Test opening a Postgres Connection with failures in the first 3 attempts. - - This test uses a Connection populated with test PostgresCredentials values, and - expects a mock connect to raise a psycopg2.errors.ConnectionFailuer - in the first 3 invocations, after which the mock should return True. As a result: - * The Connection state should be "open" and the handle True, as connect - returns in the 4th attempt. - * The resulting attempt count should be 4. - """ - conn = self.connection - attempt = 0 - - def connect(*args, **kwargs): - nonlocal attempt - attempt += 1 - - if attempt <= 2: - raise psycopg2.errors.ConnectionFailure("Connection has failed") - - return True - - with mock.patch("psycopg2.connect", wraps=connect) as mock_connect: - PostgresConnectionManager.open(conn) - - assert mock_connect.call_count == 3 - - assert attempt == 3 - assert conn.state == "open" - assert conn.handle is True diff --git a/tests/unit/test_adapter_factory.py b/tests/unit/test_adapter_factory.py deleted file mode 100644 index 366e7b32e3d..00000000000 --- a/tests/unit/test_adapter_factory.py +++ /dev/null @@ -1,94 +0,0 @@ -from pathlib import Path -import unittest -from unittest import mock -from dbt.adapters.factory import AdapterContainer -from dbt.adapters.base.plugin import AdapterPlugin -from dbt.include.global_project import ( - PROJECT_NAME as GLOBAL_PROJECT_NAME, -) - - -class TestGetPackageNames(unittest.TestCase): - def setUp(self): - with mock.patch("dbt.adapters.base.plugin.project_name_from_path") as get_name: - get_name.return_value = "root" - self.root_plugin = AdapterPlugin( - adapter=mock.MagicMock(), - credentials=mock.MagicMock(), - include_path="/path/to/root/plugin", - dependencies=["childa", "childb"], - ) - get_name.return_value = "pkg_childa" - self.childa = AdapterPlugin( - adapter=mock.MagicMock(), - credentials=mock.MagicMock(), - include_path="/path/to/childa", - ) - get_name.return_value = "pkg_childb" - self.childb = AdapterPlugin( - adapter=mock.MagicMock(), - credentials=mock.MagicMock(), - include_path="/path/to/childb", - dependencies=["childc"], - ) - get_name.return_value = "pkg_childc" - self.childc = AdapterPlugin( - adapter=mock.MagicMock(), - credentials=mock.MagicMock(), - include_path="/path/to/childc", - ) - - self._mock_modules = { - "root": self.root_plugin, - "childa": self.childa, - "childb": self.childb, - "childc": self.childc, - } - - self.factory = AdapterContainer() - - self.load_patch = mock.patch.object(AdapterContainer, "load_plugin") - self.mock_load = self.load_patch.start() - - def mock_load_plugin(name: str): - try: - plugin = self._mock_modules[name] - except KeyError: - raise RuntimeError(f"test could not find adapter type {name}!") - self.factory.plugins[name] = plugin - self.factory.packages[plugin.project_name] = Path(plugin.include_path) - for dep in plugin.dependencies: - self.factory.load_plugin(dep) - - self.mock_load.side_effect = mock_load_plugin - - def tearDown(self): - self.load_patch.stop() - - def test_no_packages(self): - assert self.factory.get_adapter_package_names(None) == [GLOBAL_PROJECT_NAME] - - def test_one_package(self): - self.factory.load_plugin("childc") - assert self.factory.get_adapter_package_names("childc") == [ - "pkg_childc", - GLOBAL_PROJECT_NAME, - ] - - def test_simple_child_packages(self): - self.factory.load_plugin("childb") - assert self.factory.get_adapter_package_names("childb") == [ - "pkg_childb", - "pkg_childc", - GLOBAL_PROJECT_NAME, - ] - - def test_layered_child_packages(self): - self.factory.load_plugin("root") - assert self.factory.get_adapter_package_names("root") == [ - "root", - "pkg_childa", - "pkg_childb", - "pkg_childc", - GLOBAL_PROJECT_NAME, - ] diff --git a/tests/unit/test_agate_helper.py b/tests/unit/test_agate_helper.py deleted file mode 100644 index a2d9fcdb0e2..00000000000 --- a/tests/unit/test_agate_helper.py +++ /dev/null @@ -1,202 +0,0 @@ -import unittest - -import agate - -from datetime import datetime -from decimal import Decimal -from isodate import tzinfo -import os -from shutil import rmtree -from tempfile import mkdtemp -from dbt.clients import agate_helper - -SAMPLE_CSV_DATA = """a,b,c,d,e,f,g -1,n,test,3.2,20180806T11:33:29.320Z,True,NULL -2,y,asdf,900,20180806T11:35:29.320Z,False,a string""" - -SAMPLE_CSV_BOM_DATA = "\ufeff" + SAMPLE_CSV_DATA - - -EXPECTED = [ - [ - 1, - "n", - "test", - Decimal("3.2"), - datetime(2018, 8, 6, 11, 33, 29, 320000, tzinfo=tzinfo.Utc()), - True, - None, - ], - [ - 2, - "y", - "asdf", - 900, - datetime(2018, 8, 6, 11, 35, 29, 320000, tzinfo=tzinfo.Utc()), - False, - "a string", - ], -] - - -EXPECTED_STRINGS = [ - ["1", "n", "test", "3.2", "20180806T11:33:29.320Z", "True", None], - ["2", "y", "asdf", "900", "20180806T11:35:29.320Z", "False", "a string"], -] - - -class TestAgateHelper(unittest.TestCase): - def setUp(self): - self.tempdir = mkdtemp() - - def tearDown(self): - rmtree(self.tempdir) - - def test_from_csv(self): - path = os.path.join(self.tempdir, "input.csv") - with open(path, "wb") as fp: - fp.write(SAMPLE_CSV_DATA.encode("utf-8")) - tbl = agate_helper.from_csv(path, ()) - self.assertEqual(len(tbl), len(EXPECTED)) - for idx, row in enumerate(tbl): - self.assertEqual(list(row), EXPECTED[idx]) - - def test_bom_from_csv(self): - path = os.path.join(self.tempdir, "input.csv") - with open(path, "wb") as fp: - fp.write(SAMPLE_CSV_BOM_DATA.encode("utf-8")) - tbl = agate_helper.from_csv(path, ()) - self.assertEqual(len(tbl), len(EXPECTED)) - for idx, row in enumerate(tbl): - self.assertEqual(list(row), EXPECTED[idx]) - - def test_from_csv_all_reserved(self): - path = os.path.join(self.tempdir, "input.csv") - with open(path, "wb") as fp: - fp.write(SAMPLE_CSV_DATA.encode("utf-8")) - tbl = agate_helper.from_csv(path, tuple("abcdefg")) - self.assertEqual(len(tbl), len(EXPECTED_STRINGS)) - for expected, row in zip(EXPECTED_STRINGS, tbl): - self.assertEqual(list(row), expected) - - def test_from_data(self): - column_names = ["a", "b", "c", "d", "e", "f", "g"] - data = [ - { - "a": "1", - "b": "n", - "c": "test", - "d": "3.2", - "e": "20180806T11:33:29.320Z", - "f": "True", - "g": "NULL", - }, - { - "a": "2", - "b": "y", - "c": "asdf", - "d": "900", - "e": "20180806T11:35:29.320Z", - "f": "False", - "g": "a string", - }, - ] - tbl = agate_helper.table_from_data(data, column_names) - self.assertEqual(len(tbl), len(EXPECTED)) - for idx, row in enumerate(tbl): - self.assertEqual(list(row), EXPECTED[idx]) - - def test_datetime_formats(self): - path = os.path.join(self.tempdir, "input.csv") - datetimes = [ - "20180806T11:33:29.000Z", - "20180806T11:33:29Z", - "20180806T113329Z", - ] - expected = datetime(2018, 8, 6, 11, 33, 29, 0, tzinfo=tzinfo.Utc()) - for dt in datetimes: - with open(path, "wb") as fp: - fp.write("a\n{}".format(dt).encode("utf-8")) - tbl = agate_helper.from_csv(path, ()) - self.assertEqual(tbl[0][0], expected) - - def test_merge_allnull(self): - t1 = agate.Table([(1, "a", None), (2, "b", None)], ("a", "b", "c")) - t2 = agate.Table([(3, "c", None), (4, "d", None)], ("a", "b", "c")) - result = agate_helper.merge_tables([t1, t2]) - self.assertEqual(result.column_names, ("a", "b", "c")) - assert isinstance(result.column_types[0], agate.data_types.Number) - assert isinstance(result.column_types[1], agate.data_types.Text) - assert isinstance(result.column_types[2], agate.data_types.Number) - self.assertEqual(len(result), 4) - - def test_merge_mixed(self): - t1 = agate.Table([(1, "a", None), (2, "b", None)], ("a", "b", "c")) - t2 = agate.Table([(3, "c", "dog"), (4, "d", "cat")], ("a", "b", "c")) - t3 = agate.Table([(3, "c", None), (4, "d", None)], ("a", "b", "c")) - - result = agate_helper.merge_tables([t1, t2]) - self.assertEqual(result.column_names, ("a", "b", "c")) - assert isinstance(result.column_types[0], agate.data_types.Number) - assert isinstance(result.column_types[1], agate.data_types.Text) - assert isinstance(result.column_types[2], agate.data_types.Text) - self.assertEqual(len(result), 4) - - result = agate_helper.merge_tables([t2, t3]) - self.assertEqual(result.column_names, ("a", "b", "c")) - assert isinstance(result.column_types[0], agate.data_types.Number) - assert isinstance(result.column_types[1], agate.data_types.Text) - assert isinstance(result.column_types[2], agate.data_types.Text) - self.assertEqual(len(result), 4) - - result = agate_helper.merge_tables([t1, t2, t3]) - self.assertEqual(result.column_names, ("a", "b", "c")) - assert isinstance(result.column_types[0], agate.data_types.Number) - assert isinstance(result.column_types[1], agate.data_types.Text) - assert isinstance(result.column_types[2], agate.data_types.Text) - self.assertEqual(len(result), 6) - - def test_nocast_string_types(self): - # String fields should not be coerced into a representative type - # See: https://github.com/dbt-labs/dbt-core/issues/2984 - - column_names = ["a", "b", "c", "d", "e"] - result_set = [ - {"a": "0005", "b": "01T00000aabbccdd", "c": "true", "d": 10, "e": False}, - {"a": "0006", "b": "01T00000aabbccde", "c": "false", "d": 11, "e": True}, - ] - - tbl = agate_helper.table_from_data_flat(data=result_set, column_names=column_names) - self.assertEqual(len(tbl), len(result_set)) - - expected = [ - ["0005", "01T00000aabbccdd", "true", Decimal(10), False], - ["0006", "01T00000aabbccde", "false", Decimal(11), True], - ] - - for i, row in enumerate(tbl): - self.assertEqual(list(row), expected[i]) - - def test_nocast_bool_01(self): - # True and False values should not be cast to 1 and 0, and vice versa - # See: https://github.com/dbt-labs/dbt-core/issues/4511 - - column_names = ["a", "b"] - result_set = [ - {"a": True, "b": 1}, - {"a": False, "b": 0}, - ] - - tbl = agate_helper.table_from_data_flat(data=result_set, column_names=column_names) - self.assertEqual(len(tbl), len(result_set)) - - assert isinstance(tbl.column_types[0], agate.data_types.Boolean) - assert isinstance(tbl.column_types[1], agate.data_types.Number) - - expected = [ - [True, Decimal(1)], - [False, Decimal(0)], - ] - - for i, row in enumerate(tbl): - self.assertEqual(list(row), expected[i]) diff --git a/tests/unit/test_base_adapter.py b/tests/unit/test_base_adapter.py deleted file mode 100644 index 66d8af5c5d8..00000000000 --- a/tests/unit/test_base_adapter.py +++ /dev/null @@ -1,184 +0,0 @@ -from argparse import Namespace -from unittest import mock -import pytest - -from dbt.adapters.base.impl import BaseAdapter, ConstraintSupport -import dbt.flags as flags - - -class TestBaseAdapterConstraintRendering: - @pytest.fixture(scope="class", autouse=True) - def setUp(self): - flags.set_from_args(Namespace(), None) - - @pytest.fixture(scope="class") - def connection_manager(request): - mock_connection_manager = mock.Mock() - mock_connection_manager.TYPE = "base" - return mock_connection_manager - - column_constraints = [ - ([{"type": "check"}], ["column_name integer"]), - ([{"type": "check", "name": "test_name"}], ["column_name integer"]), - ( - [{"type": "check", "expression": "test expression"}], - ["column_name integer check (test expression)"], - ), - ([{"type": "not_null"}], ["column_name integer not null"]), - ( - [{"type": "not_null", "expression": "test expression"}], - ["column_name integer not null test expression"], - ), - ([{"type": "unique"}], ["column_name integer unique"]), - ( - [{"type": "unique", "expression": "test expression"}], - ["column_name integer unique test expression"], - ), - ([{"type": "primary_key"}], ["column_name integer primary key"]), - ( - [{"type": "primary_key", "expression": "test expression"}], - ["column_name integer primary key test expression"], - ), - ([{"type": "foreign_key"}], ["column_name integer"]), - ( - [{"type": "foreign_key", "expression": "other_table (c1)"}], - ["column_name integer references other_table (c1)"], - ), - ([{"type": "check"}, {"type": "unique"}], ["column_name integer unique"]), - ] - - @pytest.mark.parametrize("constraints,expected_rendered_constraints", column_constraints) - def test_render_raw_columns_constraints( - self, constraints, expected_rendered_constraints, request - ): - BaseAdapter.ConnectionManager = request.getfixturevalue("connection_manager") - BaseAdapter.CONSTRAINT_SUPPORT = { - constraint: ConstraintSupport.ENFORCED for constraint in BaseAdapter.CONSTRAINT_SUPPORT - } - - rendered_constraints = BaseAdapter.render_raw_columns_constraints( - { - "column_name": { - "name": "column_name", - "data_type": "integer", - "constraints": constraints, - } - } - ) - assert rendered_constraints == expected_rendered_constraints - - column_constraints_unsupported = [ - ([{"type": "check"}], ["column_name integer"]), - ([{"type": "check", "expression": "test expression"}], ["column_name integer"]), - ([{"type": "not_null"}], ["column_name integer"]), - ([{"type": "not_null", "expression": "test expression"}], ["column_name integer"]), - ([{"type": "unique"}], ["column_name integer"]), - ([{"type": "unique", "expression": "test expression"}], ["column_name integer"]), - ([{"type": "primary_key"}], ["column_name integer"]), - ([{"type": "primary_key", "expression": "test expression"}], ["column_name integer"]), - ([{"type": "foreign_key"}], ["column_name integer"]), - ([{"type": "check"}, {"type": "unique"}], ["column_name integer"]), - ] - - @pytest.mark.parametrize( - "constraints,expected_rendered_constraints", column_constraints_unsupported - ) - def test_render_raw_columns_constraints_unsupported( - self, constraints, expected_rendered_constraints, request - ): - BaseAdapter.ConnectionManager = request.getfixturevalue("connection_manager") - BaseAdapter.CONSTRAINT_SUPPORT = { - constraint: ConstraintSupport.NOT_SUPPORTED - for constraint in BaseAdapter.CONSTRAINT_SUPPORT - } - - rendered_constraints = BaseAdapter.render_raw_columns_constraints( - { - "column_name": { - "name": "column_name", - "data_type": "integer", - "constraints": constraints, - } - } - ) - assert rendered_constraints == expected_rendered_constraints - - model_constraints = [ - ([{"type": "check"}], []), - ([{"type": "check", "expression": "test expression"}], ["check (test expression)"]), - ( - [{"type": "check", "expression": "test expression", "name": "test_name"}], - ["constraint test_name check (test expression)"], - ), - ([{"type": "not_null"}], []), - ([{"type": "not_null", "expression": "test expression"}], []), - ([{"type": "unique", "columns": ["c1", "c2"]}], ["unique (c1, c2)"]), - ([{"type": "unique", "columns": ["c1", "c2"]}], ["unique (c1, c2)"]), - ( - [ - { - "type": "unique", - "columns": ["c1", "c2"], - "expression": "test expression", - "name": "test_name", - } - ], - ["constraint test_name unique test expression (c1, c2)"], - ), - ([{"type": "primary_key", "columns": ["c1", "c2"]}], ["primary key (c1, c2)"]), - ( - [{"type": "primary_key", "columns": ["c1", "c2"], "expression": "test expression"}], - ["primary key test expression (c1, c2)"], - ), - ( - [ - { - "type": "primary_key", - "columns": ["c1", "c2"], - "expression": "test expression", - "name": "test_name", - } - ], - ["constraint test_name primary key test expression (c1, c2)"], - ), - ( - [{"type": "foreign_key", "columns": ["c1", "c2"], "expression": "other_table (c1)"}], - ["foreign key (c1, c2) references other_table (c1)"], - ), - ( - [ - { - "type": "foreign_key", - "columns": ["c1", "c2"], - "expression": "other_table (c1)", - "name": "test_name", - } - ], - ["constraint test_name foreign key (c1, c2) references other_table (c1)"], - ), - ] - - @pytest.mark.parametrize("constraints,expected_rendered_constraints", model_constraints) - def test_render_raw_model_constraints( - self, constraints, expected_rendered_constraints, request - ): - BaseAdapter.ConnectionManager = request.getfixturevalue("connection_manager") - BaseAdapter.CONSTRAINT_SUPPORT = { - constraint: ConstraintSupport.ENFORCED for constraint in BaseAdapter.CONSTRAINT_SUPPORT - } - - rendered_constraints = BaseAdapter.render_raw_model_constraints(constraints) - assert rendered_constraints == expected_rendered_constraints - - @pytest.mark.parametrize("constraints,expected_rendered_constraints", model_constraints) - def test_render_raw_model_constraints_unsupported( - self, constraints, expected_rendered_constraints, request - ): - BaseAdapter.ConnectionManager = request.getfixturevalue("connection_manager") - BaseAdapter.CONSTRAINT_SUPPORT = { - constraint: ConstraintSupport.NOT_SUPPORTED - for constraint in BaseAdapter.CONSTRAINT_SUPPORT - } - - rendered_constraints = BaseAdapter.render_raw_model_constraints(constraints) - assert rendered_constraints == [] diff --git a/tests/unit/test_base_column.py b/tests/unit/test_base_column.py deleted file mode 100644 index aaff40621e6..00000000000 --- a/tests/unit/test_base_column.py +++ /dev/null @@ -1,30 +0,0 @@ -import unittest - -import decimal - -from dbt.adapters.base import Column - - -class TestStringType(unittest.TestCase): - def test__character_type(self): - col = Column("fieldname", "character", char_size=10) - - self.assertEqual(col.data_type, "character varying(10)") - - -class TestNumericType(unittest.TestCase): - def test__numeric_type(self): - col = Column( - "fieldname", - "numeric", - numeric_precision=decimal.Decimal("12"), - numeric_scale=decimal.Decimal("2"), - ) - - self.assertEqual(col.data_type, "numeric(12,2)") - - def test__numeric_type_with_no_precision(self): - # PostgreSQL, at least, will allow empty numeric precision - col = Column("fieldname", "numeric", numeric_precision=None) - - self.assertEqual(col.data_type, "numeric") diff --git a/tests/unit/test_base_context.py b/tests/unit/test_base_context.py deleted file mode 100644 index 0dc2d93ddca..00000000000 --- a/tests/unit/test_base_context.py +++ /dev/null @@ -1,22 +0,0 @@ -import os - -from dbt.context.base import BaseContext -from jinja2.runtime import Undefined - - -class TestBaseContext: - def test_log_jinja_undefined(self): - # regression test for CT-2259 - try: - os.environ["DBT_ENV_SECRET_LOG_TEST"] = "cats_are_cool" - BaseContext.log(msg=Undefined(), info=True) - except Exception as e: - assert False, f"Logging an jinja2.Undefined object raises an exception: {e}" - - def test_log_with_dbt_env_secret(self): - # regression test for CT-1783 - try: - os.environ["DBT_ENV_SECRET_LOG_TEST"] = "cats_are_cool" - BaseContext.log({"fact1": "I like cats"}, info=True) - except Exception as e: - assert False, f"Logging while a `DBT_ENV_SECRET` was set raised an exception: {e}" diff --git a/tests/unit/test_behavior_flags.py b/tests/unit/test_behavior_flags.py new file mode 100644 index 00000000000..d899a83f283 --- /dev/null +++ b/tests/unit/test_behavior_flags.py @@ -0,0 +1,66 @@ +import pytest + +from dbt.tracking import ( + disable_tracking, + initialize_from_flags, + track_behavior_change_warn, +) +from dbt_common.behavior_flags import Behavior +from dbt_common.events.event_manager_client import ( + add_callback_to_manager, + cleanup_event_logger, +) + + +@pytest.fixture +def snowplow_tracker(mocker): + # initialize `active_user` without writing the cookie to disk + initialize_from_flags(True, "") + mocker.patch("dbt.tracking.User.set_cookie").return_value = {"id": 42} + + # add the relevant callback to the event manager + add_callback_to_manager(track_behavior_change_warn) + + # don't make a call, catch the request + # to avoid confusion, this is snowplow_tracker's track, not our wrapper that's also named track + snowplow_tracker = mocker.patch("dbt.tracking.tracker.track") + + yield snowplow_tracker + + # teardown + cleanup_event_logger() + disable_tracking() + + +def test_false_evaluation_triggers_snowplow_tracking(snowplow_tracker): + behavior = Behavior( + [{"name": "my_flag", "default": False, "description": "This is a false flag."}], {} + ) + if behavior.my_flag: + # trigger a False evaluation + assert False, "This flag should evaluate to false and skip this line" + assert snowplow_tracker.called + + +def test_true_evaluation_does_not_trigger_snowplow_tracking(snowplow_tracker): + behavior = Behavior( + [{"name": "my_flag", "default": True, "description": "This is a true flag."}], {} + ) + if behavior.my_flag: + pass + else: + # trigger a True evaluation + assert False, "This flag should evaluate to false and skip this line" + assert not snowplow_tracker.called + + +def test_false_evaluation_does_not_trigger_snowplow_tracking_when_disabled(snowplow_tracker): + disable_tracking() + + behavior = Behavior( + [{"name": "my_flag", "default": False, "description": "This is a false flag."}], {} + ) + if behavior.my_flag: + # trigger a False evaluation + assert False, "This flag should evaluate to false and skip this line" + assert not snowplow_tracker.called diff --git a/tests/unit/test_cache.py b/tests/unit/test_cache.py deleted file mode 100644 index 3cc167fc783..00000000000 --- a/tests/unit/test_cache.py +++ /dev/null @@ -1,524 +0,0 @@ -from unittest import TestCase -from dbt.adapters.cache import RelationsCache -from dbt.adapters.base.relation import BaseRelation -from multiprocessing.dummy import Pool as ThreadPool -import dbt.exceptions - -import random -import time -from dbt.flags import set_from_args -from argparse import Namespace - -set_from_args(Namespace(WARN_ERROR=False), None) - - -def make_relation(database, schema, identifier): - return BaseRelation.create(database=database, schema=schema, identifier=identifier) - - -def make_mock_relationship(database, schema, identifier): - return BaseRelation.create( - database=database, schema=schema, identifier=identifier, type="view" - ) - - -class TestCache(TestCase): - def setUp(self): - self.cache = RelationsCache() - - def assert_relations_state(self, database, schema, identifiers): - relations = self.cache.get_relations(database, schema) - for identifier, expect in identifiers.items(): - found = any( - (r.identifier == identifier and r.schema == schema and r.database == database) - for r in relations - ) - msg = "{}.{}.{} was{} found in the cache!".format( - database, schema, identifier, "" if found else " not" - ) - self.assertEqual(expect, found, msg) - - def assert_relations_exist(self, database, schema, *identifiers): - self.assert_relations_state(database, schema, {k: True for k in identifiers}) - - def assert_relations_do_not_exist(self, database, schema, *identifiers): - self.assert_relations_state(database, schema, {k: False for k in identifiers}) - - -class TestEmpty(TestCache): - def test_empty(self): - self.assertEqual(len(self.cache.relations), 0) - self.assertEqual(len(self.cache.get_relations("dbt", "test")), 0) - - -class TestDrop(TestCache): - def setUp(self): - super().setUp() - self.cache.add(make_relation("dbt", "foo", "bar")) - - def test_missing_identifier_ignored(self): - self.cache.drop(make_relation("dbt", "foo", "bar1")) - self.assert_relations_exist("dbt", "foo", "bar") - self.assertEqual(len(self.cache.relations), 1) - - def test_missing_schema_ignored(self): - self.cache.drop(make_relation("dbt", "foo1", "bar")) - self.assert_relations_exist("dbt", "foo", "bar") - self.assertEqual(len(self.cache.relations), 1) - - def test_missing_db_ignored(self): - self.cache.drop(make_relation("dbt1", "foo", "bar")) - self.assert_relations_exist("dbt", "foo", "bar") - self.assertEqual(len(self.cache.relations), 1) - - def test_drop(self): - self.cache.drop(make_relation("dbt", "foo", "bar")) - self.assert_relations_do_not_exist("dbt", "foo", "bar") - self.assertEqual(len(self.cache.relations), 0) - - -class TestAddLink(TestCache): - def setUp(self): - super().setUp() - self.cache.add(make_relation("dbt", "schema", "foo")) - self.cache.add(make_relation("dbt_2", "schema", "bar")) - self.cache.add(make_relation("dbt", "schema_2", "bar")) - - def test_no_src(self): - self.assert_relations_exist("dbt", "schema", "foo") - self.assert_relations_do_not_exist("dbt", "schema", "bar") - - self.cache.add_link( - make_relation("dbt", "schema", "bar"), make_relation("dbt", "schema", "foo") - ) - - self.assert_relations_exist("dbt", "schema", "foo", "bar") - - def test_no_dst(self): - self.assert_relations_exist("dbt", "schema", "foo") - self.assert_relations_do_not_exist("dbt", "schema", "bar") - - self.cache.add_link( - make_relation("dbt", "schema", "foo"), make_relation("dbt", "schema", "bar") - ) - - self.assert_relations_exist("dbt", "schema", "foo", "bar") - - -class TestRename(TestCache): - def setUp(self): - super().setUp() - self.cache.add(make_relation("DBT", "schema", "foo")) - self.assert_relations_exist("DBT", "schema", "foo") - self.assertEqual(self.cache.schemas, {("dbt", "schema")}) - - def test_no_source_error(self): - # dest should be created anyway (it's probably a temp table) - self.cache.rename( - make_relation("DBT", "schema", "bar"), make_relation("DBT", "schema", "baz") - ) - - self.assertEqual(len(self.cache.relations), 2) - self.assert_relations_exist("DBT", "schema", "foo", "baz") - - def test_dest_exists_error(self): - foo = make_relation("DBT", "schema", "foo") - bar = make_relation("DBT", "schema", "bar") - self.cache.add(bar) - self.assert_relations_exist("DBT", "schema", "foo", "bar") - - with self.assertRaises(dbt.exceptions.DbtInternalError): - self.cache.rename(foo, bar) - - self.assert_relations_exist("DBT", "schema", "foo", "bar") - - def test_dest_different_db(self): - self.cache.rename( - make_relation("DBT", "schema", "foo"), make_relation("DBT_2", "schema", "foo") - ) - self.assert_relations_exist("DBT_2", "schema", "foo") - self.assert_relations_do_not_exist("DBT", "schema", "foo") - # we know about both schemas: dbt has nothing, dbt_2 has something. - self.assertEqual(self.cache.schemas, {("dbt_2", "schema"), ("dbt", "schema")}) - self.assertEqual(len(self.cache.relations), 1) - - def test_rename_identifier(self): - self.cache.rename( - make_relation("DBT", "schema", "foo"), make_relation("DBT", "schema", "bar") - ) - - self.assert_relations_exist("DBT", "schema", "bar") - self.assert_relations_do_not_exist("DBT", "schema", "foo") - self.assertEqual(self.cache.schemas, {("dbt", "schema")}) - - relation = self.cache.relations[("dbt", "schema", "bar")] - self.assertEqual(relation.inner.schema, "schema") - self.assertEqual(relation.inner.identifier, "bar") - self.assertEqual(relation.schema, "schema") - self.assertEqual(relation.identifier, "bar") - - def test_rename_db(self): - self.cache.rename( - make_relation("DBT", "schema", "foo"), make_relation("DBT_2", "schema", "foo") - ) - - self.assertEqual(len(self.cache.get_relations("DBT", "schema")), 0) - self.assertEqual(len(self.cache.get_relations("DBT_2", "schema")), 1) - self.assert_relations_exist("DBT_2", "schema", "foo") - self.assert_relations_do_not_exist("DBT", "schema", "foo") - # we know about both schemas: dbt has nothing, dbt_2 has something. - self.assertEqual(self.cache.schemas, {("dbt_2", "schema"), ("dbt", "schema")}) - - relation = self.cache.relations[("dbt_2", "schema", "foo")] - self.assertEqual(relation.inner.database, "DBT_2") - self.assertEqual(relation.inner.schema, "schema") - self.assertEqual(relation.inner.identifier, "foo") - self.assertEqual(relation.database, "dbt_2") - self.assertEqual(relation.schema, "schema") - self.assertEqual(relation.identifier, "foo") - - def test_rename_schema(self): - self.cache.rename( - make_relation("DBT", "schema", "foo"), make_relation("DBT", "schema_2", "foo") - ) - - self.assertEqual(len(self.cache.get_relations("DBT", "schema")), 0) - self.assertEqual(len(self.cache.get_relations("DBT", "schema_2")), 1) - self.assert_relations_exist("DBT", "schema_2", "foo") - self.assert_relations_do_not_exist("DBT", "schema", "foo") - # we know about both schemas: schema has nothing, schema_2 has something. - self.assertEqual(self.cache.schemas, {("dbt", "schema_2"), ("dbt", "schema")}) - - relation = self.cache.relations[("dbt", "schema_2", "foo")] - self.assertEqual(relation.inner.database, "DBT") - self.assertEqual(relation.inner.schema, "schema_2") - self.assertEqual(relation.inner.identifier, "foo") - self.assertEqual(relation.database, "dbt") - self.assertEqual(relation.schema, "schema_2") - self.assertEqual(relation.identifier, "foo") - - -class TestGetRelations(TestCache): - def setUp(self): - super().setUp() - self.relation = make_relation("dbt", "foo", "bar") - self.cache.add(self.relation) - - def test_get_by_name(self): - relations = self.cache.get_relations("dbt", "foo") - self.assertEqual(len(relations), 1) - self.assertIs(relations[0], self.relation) - - def test_get_by_uppercase_schema(self): - relations = self.cache.get_relations("dbt", "FOO") - self.assertEqual(len(relations), 1) - self.assertIs(relations[0], self.relation) - - def test_get_by_uppercase_db(self): - relations = self.cache.get_relations("DBT", "foo") - self.assertEqual(len(relations), 1) - self.assertIs(relations[0], self.relation) - - def test_get_by_uppercase_schema_and_db(self): - relations = self.cache.get_relations("DBT", "FOO") - self.assertEqual(len(relations), 1) - self.assertIs(relations[0], self.relation) - - def test_get_by_wrong_db(self): - relations = self.cache.get_relations("dbt_2", "foo") - self.assertEqual(len(relations), 0) - - def test_get_by_wrong_schema(self): - relations = self.cache.get_relations("dbt", "foo_2") - self.assertEqual(len(relations), 0) - - -class TestAdd(TestCache): - def setUp(self): - super().setUp() - self.relation = make_relation("dbt", "foo", "bar") - self.cache.add(self.relation) - - def test_add(self): - relations = self.cache.get_relations("dbt", "foo") - self.assertEqual(len(relations), 1) - self.assertEqual(len(self.cache.relations), 1) - self.assertIs(relations[0], self.relation) - - def test_add_twice(self): - # add a new relation with same name - self.cache.add(make_relation("dbt", "foo", "bar")) - self.assertEqual(len(self.cache.relations), 1) - self.assertEqual(self.cache.schemas, {("dbt", "foo")}) - self.assert_relations_exist("dbt", "foo", "bar") - - def add_uppercase_schema(self): - self.cache.add(make_relation("dbt", "FOO", "baz")) - - self.assertEqual(len(self.cache.relations), 2) - relations = self.cache.get_relations("dbt", "foo") - self.assertEqual(len(relations), 2) - self.assertEqual(self.cache.schemas, {("dbt", "foo")}) - self.assertIsNot(self.cache.relations[("dbt", "foo", "bar")].inner, None) - self.assertIsNot(self.cache.relations[("dbt", "foo", "baz")].inner, None) - - def add_different_db(self): - self.cache.add(make_relation("dbt_2", "foo", "bar")) - - self.assertEqual(len(self.cache.relations), 2) - self.assertEqual(len(self.cache.get_relations("dbt_2", "foo")), 1) - self.assertEqual(len(self.cache.get_relations("dbt", "foo")), 1) - self.assertEqual(self.cache.schemas, {("dbt", "foo"), ("dbt_2", "foo")}) - self.assertIsNot(self.cache.relations[("dbt", "foo", "bar")].inner, None) - self.assertIsNot(self.cache.relations[("dbt_2", "foo", "bar")].inner, None) - - -class TestLikeDbt(TestCase): - def setUp(self): - self.cache = RelationsCache() - self._sleep = True - - # add a bunch of cache entries - for ident in "abcdef": - self.cache.add(make_relation("dbt", "schema", ident)) - # 'b' references 'a' - self.cache.add_link( - make_relation("dbt", "schema", "a"), make_relation("dbt", "schema", "b") - ) - # and 'c' references 'b' - self.cache.add_link( - make_relation("dbt", "schema", "b"), make_relation("dbt", "schema", "c") - ) - # and 'd' references 'b' - self.cache.add_link( - make_relation("dbt", "schema", "b"), make_relation("dbt", "schema", "d") - ) - # and 'e' references 'a' - self.cache.add_link( - make_relation("dbt", "schema", "a"), make_relation("dbt", "schema", "e") - ) - # and 'f' references 'd' - self.cache.add_link( - make_relation("dbt", "schema", "d"), make_relation("dbt", "schema", "f") - ) - # so drop propagation goes (a -> (b -> (c (d -> f))) e) - - def assert_has_relations(self, expected): - current = set(r.identifier for r in self.cache.get_relations("dbt", "schema")) - self.assertEqual(current, expected) - - def test_drop_inner(self): - self.assert_has_relations(set("abcdef")) - self.cache.drop(make_relation("dbt", "schema", "b")) - self.assert_has_relations({"a", "e"}) - - def test_rename_and_drop(self): - self.assert_has_relations(set("abcdef")) - # drop the backup/tmp - self.cache.drop(make_relation("dbt", "schema", "b__backup")) - self.cache.drop(make_relation("dbt", "schema", "b__tmp")) - self.assert_has_relations(set("abcdef")) - # create a new b__tmp - self.cache.add( - make_relation( - "dbt", - "schema", - "b__tmp", - ) - ) - self.assert_has_relations(set("abcdef") | {"b__tmp"}) - # rename b -> b__backup - self.cache.rename( - make_relation("dbt", "schema", "b"), make_relation("dbt", "schema", "b__backup") - ) - self.assert_has_relations(set("acdef") | {"b__tmp", "b__backup"}) - # rename temp to b - self.cache.rename( - make_relation("dbt", "schema", "b__tmp"), make_relation("dbt", "schema", "b") - ) - self.assert_has_relations(set("abcdef") | {"b__backup"}) - - # drop backup, everything that used to depend on b should be gone, but - # b itself should still exist - self.cache.drop(make_relation("dbt", "schema", "b__backup")) - self.assert_has_relations(set("abe")) - relation = self.cache.relations[("dbt", "schema", "a")] - self.assertEqual(len(relation.referenced_by), 1) - - def _rand_sleep(self): - if not self._sleep: - return - time.sleep(random.random() * 0.1) - - def _target(self, ident): - self._rand_sleep() - self.cache.rename( - make_relation("dbt", "schema", ident), - make_relation("dbt", "schema", ident + "__backup"), - ) - self._rand_sleep() - self.cache.add(make_relation("dbt", "schema", ident + "__tmp")) - self._rand_sleep() - self.cache.rename( - make_relation("dbt", "schema", ident + "__tmp"), make_relation("dbt", "schema", ident) - ) - self._rand_sleep() - self.cache.drop(make_relation("dbt", "schema", ident + "__backup")) - return ident, self.cache.get_relations("dbt", "schema") - - def test_threaded(self): - # add three more short subchains for threads to test on - for ident in "ghijklmno": - make_mock_relationship("test_db", "schema", ident) - self.cache.add(make_relation("dbt", "schema", ident)) - - self.cache.add_link( - make_relation("dbt", "schema", "a"), make_relation("dbt", "schema", "g") - ) - self.cache.add_link( - make_relation("dbt", "schema", "g"), make_relation("dbt", "schema", "h") - ) - self.cache.add_link( - make_relation("dbt", "schema", "h"), make_relation("dbt", "schema", "i") - ) - - self.cache.add_link( - make_relation("dbt", "schema", "a"), make_relation("dbt", "schema", "j") - ) - self.cache.add_link( - make_relation("dbt", "schema", "j"), make_relation("dbt", "schema", "k") - ) - self.cache.add_link( - make_relation("dbt", "schema", "k"), make_relation("dbt", "schema", "l") - ) - - self.cache.add_link( - make_relation("dbt", "schema", "a"), make_relation("dbt", "schema", "m") - ) - self.cache.add_link( - make_relation("dbt", "schema", "m"), make_relation("dbt", "schema", "n") - ) - self.cache.add_link( - make_relation("dbt", "schema", "n"), make_relation("dbt", "schema", "o") - ) - - pool = ThreadPool(4) - results = list(pool.imap_unordered(self._target, ("b", "g", "j", "m"))) - pool.close() - pool.join() - # at a minimum, we expect each table to "see" itself, its parent ('a'), - # and the unrelated table ('a') - min_expect = { - "b": {"a", "b", "e"}, - "g": {"a", "g", "e"}, - "j": {"a", "j", "e"}, - "m": {"a", "m", "e"}, - } - - for ident, relations in results: - seen = set(r.identifier for r in relations) - self.assertTrue(min_expect[ident].issubset(seen)) - - self.assert_has_relations(set("abgjme")) - - def test_threaded_repeated(self): - for _ in range(10): - self.setUp() - self._sleep = False - self.test_threaded() - - -class TestComplexCache(TestCase): - def setUp(self): - self.cache = RelationsCache() - inputs = [ - ("dbt", "foo", "table1"), - ("dbt", "foo", "table3"), - ("dbt", "foo", "table4"), - ("dbt", "bar", "table2"), - ("dbt", "bar", "table3"), - ("dbt_2", "foo", "table1"), - ("dbt_2", "foo", "table2"), - ] - self.inputs = [make_relation(d, s, i) for d, s, i in inputs] - for relation in self.inputs: - self.cache.add(relation) - - # dbt.foo.table3 references dbt.foo.table1 - # (create view dbt.foo.table3 as (select * from dbt.foo.table1...)) - self.cache.add_link( - make_relation("dbt", "foo", "table1"), make_relation("dbt", "foo", "table3") - ) - # dbt.bar.table3 references dbt.foo.table3 - # (create view dbt.bar.table5 as (select * from dbt.foo.table3...)) - self.cache.add_link( - make_relation("dbt", "foo", "table3"), make_relation("dbt", "bar", "table3") - ) - - # dbt.foo.table4 also references dbt.foo.table1 - self.cache.add_link( - make_relation("dbt", "foo", "table1"), make_relation("dbt", "foo", "table4") - ) - - # and dbt_2.foo.table1 references dbt.foo.table1 - self.cache.add_link( - make_relation("dbt", "foo", "table1"), - make_relation("dbt_2", "foo", "table1"), - ) - - def test_get_relations(self): - self.assertEqual(len(self.cache.get_relations("dbt", "foo")), 3) - self.assertEqual(len(self.cache.get_relations("dbt", "bar")), 2) - self.assertEqual(len(self.cache.get_relations("dbt_2", "foo")), 2) - self.assertEqual(len(self.cache.relations), 7) - - def test_drop_one(self): - # dropping dbt.bar.table2 should only drop itself - self.cache.drop(make_relation("dbt", "bar", "table2")) - self.assertEqual(len(self.cache.get_relations("dbt", "foo")), 3) - self.assertEqual(len(self.cache.get_relations("dbt", "bar")), 1) - self.assertEqual(len(self.cache.get_relations("dbt_2", "foo")), 2) - self.assertEqual(len(self.cache.relations), 6) - - def test_drop_many(self): - # dropping dbt.foo.table1 should drop everything but dbt.bar.table2 and - # dbt_2.foo.table2 - self.cache.drop(make_relation("dbt", "foo", "table1")) - self.assertEqual(len(self.cache.get_relations("dbt", "foo")), 0) - self.assertEqual(len(self.cache.get_relations("dbt", "bar")), 1) - self.assertEqual(len(self.cache.get_relations("dbt_2", "foo")), 1) - self.assertEqual(len(self.cache.relations), 2) - - def test_rename_root(self): - self.cache.rename( - make_relation("dbt", "foo", "table1"), make_relation("dbt", "bar", "table1") - ) - retrieved = self.cache.relations[("dbt", "bar", "table1")].inner - self.assertEqual(retrieved.schema, "bar") - self.assertEqual(retrieved.identifier, "table1") - self.assertEqual(len(self.cache.get_relations("dbt", "foo")), 2) - self.assertEqual(len(self.cache.get_relations("dbt", "bar")), 3) - self.assertEqual(len(self.cache.get_relations("dbt_2", "foo")), 2) - self.assertEqual(len(self.cache.relations), 7) - - # make sure drops still cascade from the renamed table - self.cache.drop(make_relation("dbt", "bar", "table1")) - self.assertEqual(len(self.cache.get_relations("dbt", "foo")), 0) - self.assertEqual(len(self.cache.get_relations("dbt", "bar")), 1) - self.assertEqual(len(self.cache.get_relations("dbt_2", "foo")), 1) - self.assertEqual(len(self.cache.relations), 2) - - def test_rename_branch(self): - self.cache.rename( - make_relation("dbt", "foo", "table3"), make_relation("dbt", "foo", "table2") - ) - self.assertEqual(len(self.cache.get_relations("dbt", "foo")), 3) - self.assertEqual(len(self.cache.get_relations("dbt", "bar")), 2) - self.assertEqual(len(self.cache.get_relations("dbt_2", "foo")), 2) - - # make sure drops still cascade through the renamed table - self.cache.drop(make_relation("dbt", "foo", "table1")) - self.assertEqual(len(self.cache.get_relations("dbt", "foo")), 0) - self.assertEqual(len(self.cache.get_relations("dbt", "bar")), 1) - self.assertEqual(len(self.cache.get_relations("dbt_2", "foo")), 1) - self.assertEqual(len(self.cache.relations), 2) diff --git a/tests/unit/test_compilation.py b/tests/unit/test_compilation.py new file mode 100644 index 00000000000..0d5d4b2ea3c --- /dev/null +++ b/tests/unit/test_compilation.py @@ -0,0 +1,195 @@ +import os +import tempfile +from queue import Empty +from unittest import mock + +import pytest + +from dbt.compilation import Graph, Linker +from dbt.graph.cli import parse_difference +from dbt.graph.queue import GraphQueue +from dbt.graph.selector import NodeSelector + + +def _mock_manifest(nodes): + config = mock.MagicMock(enabled=True) + manifest = mock.MagicMock( + nodes={ + n: mock.MagicMock( + unique_id=n, + package_name="pkg", + name=n, + empty=False, + config=config, + fqn=["pkg", n], + is_versioned=False, + ) + for n in nodes + } + ) + manifest.expect.side_effect = lambda n: mock.MagicMock(unique_id=n) + return manifest + + +class TestLinker: + @pytest.fixture + def linker(self) -> Linker: + return Linker() + + def test_linker_add_node(self, linker: Linker) -> None: + expected_nodes = ["A", "B", "C"] + for node in expected_nodes: + linker.add_node(node) + + actual_nodes = linker.nodes() + for node in expected_nodes: + assert node in actual_nodes + + assert len(actual_nodes) == len(expected_nodes) + + def test_linker_write_graph(self, linker: Linker) -> None: + expected_nodes = ["A", "B", "C"] + for node in expected_nodes: + linker.add_node(node) + + manifest = _mock_manifest("ABC") + (fd, fname) = tempfile.mkstemp() + os.close(fd) + try: + linker.write_graph(fname, manifest) + assert os.path.exists(fname) + finally: + os.unlink(fname) + + def assert_would_join(self, queue: GraphQueue) -> None: + """test join() without timeout risk""" + assert queue.inner.unfinished_tasks == 0 + + def _get_graph_queue( + self, + manifest, + linker: Linker, + include=None, + exclude=None, + ) -> GraphQueue: + graph = Graph(linker.graph) + selector = NodeSelector(graph, manifest) + # TODO: The "eager" string below needs to be replaced with programatic access + # to the default value for the indirect selection parameter in + # dbt.cli.params.indirect_selection + # + # Doing that is actually a little tricky, so I'm punting it to a new ticket GH #6397 + spec = parse_difference(include, exclude) + return selector.get_graph_queue(spec) + + def test_linker_add_dependency(self, linker: Linker) -> None: + actual_deps = [("A", "B"), ("A", "C"), ("B", "C")] + + for l, r in actual_deps: + linker.dependency(l, r) + + queue = self._get_graph_queue(_mock_manifest("ABC"), linker) + + got = queue.get(block=False) + assert got.unique_id == "C" + with pytest.raises(Empty): + queue.get(block=False) + assert not queue.empty() + queue.mark_done("C") + assert not queue.empty() + + got = queue.get(block=False) + assert got.unique_id == "B" + with pytest.raises(Empty): + queue.get(block=False) + assert not queue.empty() + queue.mark_done("B") + assert not queue.empty() + + got = queue.get(block=False) + assert got.unique_id == "A" + with pytest.raises(Empty): + queue.get(block=False) + assert queue.empty() + queue.mark_done("A") + self.assert_would_join(queue) + assert queue.empty() + + def test_linker_add_disjoint_dependencies(self, linker: Linker) -> None: + actual_deps = [("A", "B")] + additional_node = "Z" + + for l, r in actual_deps: + linker.dependency(l, r) + linker.add_node(additional_node) + + queue = self._get_graph_queue(_mock_manifest("ABCZ"), linker) + # the first one we get must be B, it has the longest dep chain + first = queue.get(block=False) + assert first.unique_id == "B" + assert not queue.empty() + queue.mark_done("B") + assert not queue.empty() + + second = queue.get(block=False) + assert second.unique_id in {"A", "Z"} + assert not queue.empty() + queue.mark_done(second.unique_id) + assert not queue.empty() + + third = queue.get(block=False) + assert third.unique_id in {"A", "Z"} + with pytest.raises(Empty): + queue.get(block=False) + assert second.unique_id != third.unique_id + assert queue.empty() + queue.mark_done(third.unique_id) + self.assert_would_join(queue) + assert queue.empty() + + def test_linker_dependencies_limited_to_some_nodes(self, linker: Linker) -> None: + actual_deps = [("A", "B"), ("B", "C"), ("C", "D")] + + for l, r in actual_deps: + linker.dependency(l, r) + + queue = self._get_graph_queue(_mock_manifest("ABCD"), linker, ["B"]) + got = queue.get(block=False) + assert got.unique_id == "B" + assert queue.empty() + queue.mark_done("B") + self.assert_would_join(queue) + + queue_2 = queue = self._get_graph_queue(_mock_manifest("ABCD"), linker, ["A", "B"]) + got = queue_2.get(block=False) + assert got.unique_id == "B" + assert not queue_2.empty() + with pytest.raises(Empty): + queue_2.get(block=False) + queue_2.mark_done("B") + assert not queue_2.empty() + + got = queue_2.get(block=False) + assert got.unique_id == "A" + assert queue_2.empty() + with pytest.raises(Empty): + queue_2.get(block=False) + assert queue_2.empty() + queue_2.mark_done("A") + self.assert_would_join(queue_2) + + def test__find_cycles__cycles(self, linker: Linker) -> None: + actual_deps = [("A", "B"), ("B", "C"), ("C", "A")] + + for l, r in actual_deps: + linker.dependency(l, r) + + assert linker.find_cycles() is not None + + def test__find_cycles__no_cycles(self, linker: Linker) -> None: + actual_deps = [("A", "B"), ("B", "C"), ("C", "D")] + + for l, r in actual_deps: + linker.dependency(l, r) + + assert linker.find_cycles() is None diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py deleted file mode 100644 index 2ebe5766986..00000000000 --- a/tests/unit/test_config.py +++ /dev/null @@ -1,1382 +0,0 @@ -from copy import deepcopy -from contextlib import contextmanager -import json -import os -import shutil -import tempfile -import unittest -import pytest -from argparse import Namespace - -from unittest import mock -import yaml - -import dbt.config -from dbt.constants import DEPENDENCIES_FILE_NAME, PACKAGES_FILE_NAME -import dbt.exceptions -import dbt.tracking -from dbt import flags -from dbt.adapters.factory import load_plugin -from dbt.adapters.postgres import PostgresCredentials -from dbt.contracts.connection import QueryComment, DEFAULT_QUERY_COMMENT -from dbt.contracts.project import PackageConfig, LocalPackage, GitPackage -from dbt.node_types import NodeType -from dbt.semver import VersionSpecifier -from dbt.task.base import ConfiguredTask - -from dbt.flags import set_from_args - -from .utils import normalize - -INITIAL_ROOT = os.getcwd() - - -@contextmanager -def temp_cd(path): - current_path = os.getcwd() - os.chdir(path) - try: - yield - finally: - os.chdir(current_path) - - -@contextmanager -def raises_nothing(): - yield - - -def empty_profile_renderer(): - return dbt.config.renderer.ProfileRenderer({}) - - -def empty_project_renderer(): - return dbt.config.renderer.DbtProjectYamlRenderer() - - -model_config = { - "my_package_name": { - "enabled": True, - "adwords": { - "adwords_ads": {"materialized": "table", "enabled": True, "schema": "analytics"} - }, - "snowplow": { - "snowplow_sessions": { - "sort": "timestamp", - "materialized": "incremental", - "dist": "user_id", - "unique_key": "id", - }, - "base": { - "snowplow_events": { - "sort": ["timestamp", "userid"], - "materialized": "table", - "sort_type": "interleaved", - "dist": "userid", - } - }, - }, - } -} - -model_fqns = frozenset( - ( - ("my_package_name", "snowplow", "snowplow_sessions"), - ("my_package_name", "snowplow", "base", "snowplow_events"), - ("my_package_name", "adwords", "adwords_ads"), - ) -) - - -class Args: - def __init__( - self, - profiles_dir=None, - threads=None, - profile=None, - cli_vars=None, - version_check=None, - project_dir=None, - target=None, - ): - self.profile = profile - self.threads = threads - self.target = target - if profiles_dir is not None: - self.profiles_dir = profiles_dir - flags.PROFILES_DIR = profiles_dir - if cli_vars is not None: - self.vars = cli_vars - if version_check is not None: - self.version_check = version_check - if project_dir is not None: - self.project_dir = project_dir - - -class BaseConfigTest(unittest.TestCase): - """Subclass this, and before calling the superclass setUp, set - self.profiles_dir and self.project_dir. - """ - - def setUp(self): - self.default_project_data = { - "version": "0.0.1", - "name": "my_test_project", - "profile": "default", - "config-version": 2, - } - self.default_profile_data = { - "default": { - "outputs": { - "postgres": { - "type": "postgres", - "host": "postgres-db-hostname", - "port": 5555, - "user": "db_user", - "pass": "db_pass", - "dbname": "postgres-db-name", - "schema": "postgres-schema", - "threads": 7, - }, - "with-vars": { - "type": "{{ env_var('env_value_type') }}", - "host": "{{ env_var('env_value_host') }}", - "port": "{{ env_var('env_value_port') | as_number }}", - "user": "{{ env_var('env_value_user') }}", - "pass": "{{ env_var('env_value_pass') }}", - "dbname": "{{ env_var('env_value_dbname') }}", - "schema": "{{ env_var('env_value_schema') }}", - }, - "cli-and-env-vars": { - "type": "{{ env_var('env_value_type') }}", - "host": "{{ var('cli_value_host') }}", - "port": "{{ env_var('env_value_port') | as_number }}", - "user": "{{ env_var('env_value_user') }}", - "pass": "{{ env_var('env_value_pass') }}", - "dbname": "{{ env_var('env_value_dbname') }}", - "schema": "{{ env_var('env_value_schema') }}", - }, - }, - "target": "postgres", - }, - "other": { - "outputs": { - "other-postgres": { - "type": "postgres", - "host": "other-postgres-db-hostname", - "port": 4444, - "user": "other_db_user", - "pass": "other_db_pass", - "dbname": "other-postgres-db-name", - "schema": "other-postgres-schema", - "threads": 2, - } - }, - "target": "other-postgres", - }, - "empty_profile_data": {}, - } - self.args = Namespace( - profiles_dir=self.profiles_dir, - cli_vars={}, - version_check=True, - project_dir=self.project_dir, - target=None, - threads=None, - profile=None, - ) - set_from_args(self.args, None) - self.env_override = { - "env_value_type": "postgres", - "env_value_host": "env-postgres-host", - "env_value_port": "6543", - "env_value_user": "env-postgres-user", - "env_value_pass": "env-postgres-pass", - "env_value_dbname": "env-postgres-dbname", - "env_value_schema": "env-postgres-schema", - "env_value_profile": "default", - } - - def assertRaisesOrReturns(self, exc): - if exc is None: - return raises_nothing() - else: - return self.assertRaises(exc) - - -class BaseFileTest(BaseConfigTest): - def setUp(self): - self.project_dir = normalize(tempfile.mkdtemp()) - self.profiles_dir = normalize(tempfile.mkdtemp()) - super().setUp() - - def tearDown(self): - try: - shutil.rmtree(self.project_dir) - except EnvironmentError: - pass - try: - shutil.rmtree(self.profiles_dir) - except EnvironmentError: - pass - - def project_path(self, name): - return os.path.join(self.project_dir, name) - - def profile_path(self, name): - return os.path.join(self.profiles_dir, name) - - def write_project(self, project_data=None): - if project_data is None: - project_data = self.project_data - with open(self.project_path("dbt_project.yml"), "w") as fp: - yaml.dump(project_data, fp) - - def write_packages(self, package_data): - with open(self.project_path("packages.yml"), "w") as fp: - yaml.dump(package_data, fp) - - def write_profile(self, profile_data=None): - if profile_data is None: - profile_data = self.profile_data - with open(self.profile_path("profiles.yml"), "w") as fp: - yaml.dump(profile_data, fp) - - def write_empty_profile(self): - with open(self.profile_path("profiles.yml"), "w") as fp: - yaml.dump("", fp) - - -class TestProfile(BaseConfigTest): - def setUp(self): - self.profiles_dir = "/invalid-path" - self.project_dir = "/invalid-project-path" - super().setUp() - - def from_raw_profiles(self): - renderer = empty_profile_renderer() - return dbt.config.Profile.from_raw_profiles(self.default_profile_data, "default", renderer) - - def test_from_raw_profiles(self): - profile = self.from_raw_profiles() - self.assertEqual(profile.profile_name, "default") - self.assertEqual(profile.target_name, "postgres") - self.assertEqual(profile.threads, 7) - self.assertTrue(profile.user_config.send_anonymous_usage_stats) - self.assertIsNone(profile.user_config.use_colors) - self.assertTrue(isinstance(profile.credentials, PostgresCredentials)) - self.assertEqual(profile.credentials.type, "postgres") - self.assertEqual(profile.credentials.host, "postgres-db-hostname") - self.assertEqual(profile.credentials.port, 5555) - self.assertEqual(profile.credentials.user, "db_user") - self.assertEqual(profile.credentials.password, "db_pass") - self.assertEqual(profile.credentials.schema, "postgres-schema") - self.assertEqual(profile.credentials.database, "postgres-db-name") - - def test_config_override(self): - self.default_profile_data["config"] = { - "send_anonymous_usage_stats": False, - "use_colors": False, - } - profile = self.from_raw_profiles() - self.assertEqual(profile.profile_name, "default") - self.assertEqual(profile.target_name, "postgres") - self.assertFalse(profile.user_config.send_anonymous_usage_stats) - self.assertFalse(profile.user_config.use_colors) - - def test_partial_config_override(self): - self.default_profile_data["config"] = { - "send_anonymous_usage_stats": False, - "printer_width": 60, - } - profile = self.from_raw_profiles() - self.assertEqual(profile.profile_name, "default") - self.assertEqual(profile.target_name, "postgres") - self.assertFalse(profile.user_config.send_anonymous_usage_stats) - self.assertIsNone(profile.user_config.use_colors) - self.assertEqual(profile.user_config.printer_width, 60) - - def test_missing_type(self): - del self.default_profile_data["default"]["outputs"]["postgres"]["type"] - with self.assertRaises(dbt.exceptions.DbtProfileError) as exc: - self.from_raw_profiles() - self.assertIn("type", str(exc.exception)) - self.assertIn("postgres", str(exc.exception)) - self.assertIn("default", str(exc.exception)) - - def test_bad_type(self): - self.default_profile_data["default"]["outputs"]["postgres"]["type"] = "invalid" - with self.assertRaises(dbt.exceptions.DbtProfileError) as exc: - self.from_raw_profiles() - self.assertIn("Credentials", str(exc.exception)) - self.assertIn("postgres", str(exc.exception)) - self.assertIn("default", str(exc.exception)) - - def test_invalid_credentials(self): - del self.default_profile_data["default"]["outputs"]["postgres"]["host"] - with self.assertRaises(dbt.exceptions.DbtProfileError) as exc: - self.from_raw_profiles() - self.assertIn("Credentials", str(exc.exception)) - self.assertIn("postgres", str(exc.exception)) - self.assertIn("default", str(exc.exception)) - - def test_missing_target(self): - profile = self.default_profile_data["default"] - del profile["target"] - profile["outputs"]["default"] = profile["outputs"]["postgres"] - profile = self.from_raw_profiles() - self.assertEqual(profile.profile_name, "default") - self.assertEqual(profile.target_name, "default") - self.assertEqual(profile.credentials.type, "postgres") - - def test_extra_path(self): - self.default_project_data.update( - { - "model-paths": ["models"], - "source-paths": ["other-models"], - } - ) - with self.assertRaises(dbt.exceptions.DbtProjectError) as exc: - project_from_config_norender(self.default_project_data) - - self.assertIn("source-paths and model-paths", str(exc.exception)) - self.assertIn("cannot both be defined.", str(exc.exception)) - - def test_profile_invalid_project(self): - renderer = empty_profile_renderer() - with self.assertRaises(dbt.exceptions.DbtProjectError) as exc: - dbt.config.Profile.from_raw_profiles( - self.default_profile_data, "invalid-profile", renderer - ) - - self.assertEqual(exc.exception.result_type, "invalid_project") - self.assertIn("Could not find", str(exc.exception)) - self.assertIn("invalid-profile", str(exc.exception)) - - def test_profile_invalid_target(self): - renderer = empty_profile_renderer() - with self.assertRaises(dbt.exceptions.DbtProfileError) as exc: - dbt.config.Profile.from_raw_profiles( - self.default_profile_data, "default", renderer, target_override="nope" - ) - - self.assertIn("nope", str(exc.exception)) - self.assertIn("- postgres", str(exc.exception)) - self.assertIn("- with-vars", str(exc.exception)) - - def test_no_outputs(self): - renderer = empty_profile_renderer() - - with self.assertRaises(dbt.exceptions.DbtProfileError) as exc: - dbt.config.Profile.from_raw_profiles( - {"some-profile": {"target": "blah"}}, "some-profile", renderer - ) - self.assertIn("outputs not specified", str(exc.exception)) - self.assertIn("some-profile", str(exc.exception)) - - def test_neq(self): - profile = self.from_raw_profiles() - self.assertNotEqual(profile, object()) - - def test_eq(self): - renderer = empty_profile_renderer() - profile = dbt.config.Profile.from_raw_profiles( - deepcopy(self.default_profile_data), "default", renderer - ) - - other = dbt.config.Profile.from_raw_profiles( - deepcopy(self.default_profile_data), "default", renderer - ) - self.assertEqual(profile, other) - - def test_invalid_env_vars(self): - self.env_override["env_value_port"] = "hello" - renderer = empty_profile_renderer() - with mock.patch.dict(os.environ, self.env_override): - with self.assertRaises(dbt.exceptions.DbtProfileError) as exc: - dbt.config.Profile.from_raw_profile_info( - self.default_profile_data["default"], - "default", - renderer, - target_override="with-vars", - ) - self.assertIn("Could not convert value 'hello' into type 'number'", str(exc.exception)) - - -class TestProfileFile(BaseFileTest): - def setUp(self): - super().setUp() - self.write_profile(self.default_profile_data) - - def from_raw_profile_info(self, raw_profile=None, profile_name="default", **kwargs): - if raw_profile is None: - raw_profile = self.default_profile_data["default"] - renderer = empty_profile_renderer() - kw = { - "raw_profile": raw_profile, - "profile_name": profile_name, - "renderer": renderer, - } - kw.update(kwargs) - return dbt.config.Profile.from_raw_profile_info(**kw) - - def from_args(self, project_profile_name="default", **kwargs): - kw = { - "project_profile_name": project_profile_name, - "renderer": empty_profile_renderer(), - "threads_override": self.args.threads, - "target_override": self.args.target, - "profile_name_override": self.args.profile, - } - kw.update(kwargs) - return dbt.config.Profile.render(**kw) - - def test_profile_simple(self): - profile = self.from_args() - from_raw = self.from_raw_profile_info() - - self.assertEqual(profile.profile_name, "default") - self.assertEqual(profile.target_name, "postgres") - self.assertEqual(profile.threads, 7) - self.assertTrue(profile.user_config.send_anonymous_usage_stats) - self.assertIsNone(profile.user_config.use_colors) - self.assertTrue(isinstance(profile.credentials, PostgresCredentials)) - self.assertEqual(profile.credentials.type, "postgres") - self.assertEqual(profile.credentials.host, "postgres-db-hostname") - self.assertEqual(profile.credentials.port, 5555) - self.assertEqual(profile.credentials.user, "db_user") - self.assertEqual(profile.credentials.password, "db_pass") - self.assertEqual(profile.credentials.schema, "postgres-schema") - self.assertEqual(profile.credentials.database, "postgres-db-name") - self.assertEqual(profile, from_raw) - - def test_profile_override(self): - self.args.profile = "other" - self.args.threads = 3 - set_from_args(self.args, None) - profile = self.from_args() - from_raw = self.from_raw_profile_info( - self.default_profile_data["other"], - "other", - threads_override=3, - ) - - self.assertEqual(profile.profile_name, "other") - self.assertEqual(profile.target_name, "other-postgres") - self.assertEqual(profile.threads, 3) - self.assertTrue(profile.user_config.send_anonymous_usage_stats) - self.assertIsNone(profile.user_config.use_colors) - self.assertTrue(isinstance(profile.credentials, PostgresCredentials)) - self.assertEqual(profile.credentials.type, "postgres") - self.assertEqual(profile.credentials.host, "other-postgres-db-hostname") - self.assertEqual(profile.credentials.port, 4444) - self.assertEqual(profile.credentials.user, "other_db_user") - self.assertEqual(profile.credentials.password, "other_db_pass") - self.assertEqual(profile.credentials.schema, "other-postgres-schema") - self.assertEqual(profile.credentials.database, "other-postgres-db-name") - self.assertEqual(profile, from_raw) - - def test_env_vars(self): - self.args.target = "with-vars" - with mock.patch.dict(os.environ, self.env_override): - profile = self.from_args() - from_raw = self.from_raw_profile_info(target_override="with-vars") - - self.assertEqual(profile.profile_name, "default") - self.assertEqual(profile.target_name, "with-vars") - self.assertEqual(profile.threads, 1) - self.assertTrue(profile.user_config.send_anonymous_usage_stats) - self.assertIsNone(profile.user_config.use_colors) - self.assertEqual(profile.credentials.type, "postgres") - self.assertEqual(profile.credentials.host, "env-postgres-host") - self.assertEqual(profile.credentials.port, 6543) - self.assertEqual(profile.credentials.user, "env-postgres-user") - self.assertEqual(profile.credentials.password, "env-postgres-pass") - self.assertEqual(profile, from_raw) - - def test_env_vars_env_target(self): - self.default_profile_data["default"]["target"] = "{{ env_var('env_value_target') }}" - self.write_profile(self.default_profile_data) - self.env_override["env_value_target"] = "with-vars" - with mock.patch.dict(os.environ, self.env_override): - profile = self.from_args() - from_raw = self.from_raw_profile_info(target_override="with-vars") - - self.assertEqual(profile.profile_name, "default") - self.assertEqual(profile.target_name, "with-vars") - self.assertEqual(profile.threads, 1) - self.assertTrue(profile.user_config.send_anonymous_usage_stats) - self.assertIsNone(profile.user_config.use_colors) - self.assertEqual(profile.credentials.type, "postgres") - self.assertEqual(profile.credentials.host, "env-postgres-host") - self.assertEqual(profile.credentials.port, 6543) - self.assertEqual(profile.credentials.user, "env-postgres-user") - self.assertEqual(profile.credentials.password, "env-postgres-pass") - self.assertEqual(profile, from_raw) - - def test_invalid_env_vars(self): - self.env_override["env_value_port"] = "hello" - self.args.target = "with-vars" - with mock.patch.dict(os.environ, self.env_override): - with self.assertRaises(dbt.exceptions.DbtProfileError) as exc: - self.from_args() - - self.assertIn("Could not convert value 'hello' into type 'number'", str(exc.exception)) - - def test_cli_and_env_vars(self): - self.args.target = "cli-and-env-vars" - self.args.vars = {"cli_value_host": "cli-postgres-host"} - renderer = dbt.config.renderer.ProfileRenderer({"cli_value_host": "cli-postgres-host"}) - with mock.patch.dict(os.environ, self.env_override): - profile = self.from_args(renderer=renderer) - from_raw = self.from_raw_profile_info( - target_override="cli-and-env-vars", - renderer=renderer, - ) - - self.assertEqual(profile.profile_name, "default") - self.assertEqual(profile.target_name, "cli-and-env-vars") - self.assertEqual(profile.threads, 1) - self.assertTrue(profile.user_config.send_anonymous_usage_stats) - self.assertIsNone(profile.user_config.use_colors) - self.assertEqual(profile.credentials.type, "postgres") - self.assertEqual(profile.credentials.host, "cli-postgres-host") - self.assertEqual(profile.credentials.port, 6543) - self.assertEqual(profile.credentials.user, "env-postgres-user") - self.assertEqual(profile.credentials.password, "env-postgres-pass") - self.assertEqual(profile, from_raw) - - def test_no_profile(self): - with self.assertRaises(dbt.exceptions.DbtProjectError) as exc: - self.from_args(project_profile_name=None) - self.assertIn("no profile was specified", str(exc.exception)) - - def test_empty_profile(self): - self.write_empty_profile() - with self.assertRaises(dbt.exceptions.DbtProfileError) as exc: - self.from_args() - self.assertIn("profiles.yml is empty", str(exc.exception)) - - def test_profile_with_empty_profile_data(self): - renderer = empty_profile_renderer() - with self.assertRaises(dbt.exceptions.DbtProfileError) as exc: - dbt.config.Profile.from_raw_profiles( - self.default_profile_data, "empty_profile_data", renderer - ) - self.assertIn("Profile empty_profile_data in profiles.yml is empty", str(exc.exception)) - - -def project_from_config_norender( - cfg, packages=None, path="/invalid-root-path", verify_version=False -): - if packages is None: - packages = {} - partial = dbt.config.project.PartialProject.from_dicts( - path, - project_dict=cfg, - packages_dict=packages, - selectors_dict={}, - verify_version=verify_version, - ) - # no rendering - rendered = dbt.config.project.RenderComponents( - project_dict=partial.project_dict, - packages_dict=partial.packages_dict, - selectors_dict=partial.selectors_dict, - ) - return partial.create_project(rendered) - - -def project_from_config_rendered( - cfg, - packages=None, - path="/invalid-root-path", - verify_version=False, - packages_specified_path=PACKAGES_FILE_NAME, -): - if packages is None: - packages = {} - partial = dbt.config.project.PartialProject.from_dicts( - path, - project_dict=cfg, - packages_dict=packages, - selectors_dict={}, - verify_version=verify_version, - packages_specified_path=packages_specified_path, - ) - return partial.render(empty_project_renderer()) - - -class TestProject(BaseConfigTest): - def setUp(self): - self.profiles_dir = "/invalid-profiles-path" - self.project_dir = "/invalid-root-path" - super().setUp() - self.default_project_data["project-root"] = self.project_dir - - def test_defaults(self): - project = project_from_config_norender(self.default_project_data) - self.assertEqual(project.project_name, "my_test_project") - self.assertEqual(project.version, "0.0.1") - self.assertEqual(project.profile_name, "default") - self.assertEqual(project.project_root, "/invalid-root-path") - self.assertEqual(project.model_paths, ["models"]) - self.assertEqual(project.macro_paths, ["macros"]) - self.assertEqual(project.seed_paths, ["seeds"]) - self.assertEqual(project.test_paths, ["tests"]) - self.assertEqual(project.analysis_paths, ["analyses"]) - self.assertEqual( - set(project.docs_paths), set(["models", "seeds", "snapshots", "analyses", "macros"]) - ) - self.assertEqual(project.asset_paths, []) - self.assertEqual(project.target_path, "target") - self.assertEqual(project.clean_targets, ["target"]) - self.assertEqual(project.log_path, "logs") - self.assertEqual(project.packages_install_path, "dbt_packages") - self.assertEqual(project.quoting, {}) - self.assertEqual(project.models, {}) - self.assertEqual(project.on_run_start, []) - self.assertEqual(project.on_run_end, []) - self.assertEqual(project.seeds, {}) - self.assertEqual(project.dbt_version, [VersionSpecifier.from_version_string(">=0.0.0")]) - self.assertEqual(project.packages, PackageConfig(packages=[])) - # just make sure str() doesn't crash anything, that's always - # embarrassing - str(project) - - def test_eq(self): - project = project_from_config_norender(self.default_project_data) - other = project_from_config_norender(self.default_project_data) - self.assertEqual(project, other) - - def test_neq(self): - project = project_from_config_norender(self.default_project_data) - self.assertNotEqual(project, object()) - - def test_implicit_overrides(self): - self.default_project_data.update( - { - "model-paths": ["other-models"], - "target-path": "other-target", - } - ) - project = project_from_config_norender(self.default_project_data) - self.assertEqual( - set(project.docs_paths), - set(["other-models", "seeds", "snapshots", "analyses", "macros"]), - ) - self.assertEqual(project.clean_targets, ["other-target"]) - - def test_hashed_name(self): - project = project_from_config_norender(self.default_project_data) - self.assertEqual(project.hashed_name(), "754cd47eac1d6f50a5f7cd399ec43da4") - - def test_all_overrides(self): - # log-path is not tested because it is set exclusively from flags, not cfg - self.default_project_data.update( - { - "model-paths": ["other-models"], - "macro-paths": ["other-macros"], - "seed-paths": ["other-seeds"], - "test-paths": ["other-tests"], - "analysis-paths": ["other-analyses"], - "docs-paths": ["docs"], - "asset-paths": ["other-assets"], - "target-path": "other-target", - "clean-targets": ["another-target"], - "packages-install-path": "other-dbt_packages", - "quoting": {"identifier": False}, - "models": { - "pre-hook": ["{{ logging.log_model_start_event() }}"], - "post-hook": ["{{ logging.log_model_end_event() }}"], - "my_test_project": { - "first": { - "enabled": False, - "sub": { - "enabled": True, - }, - }, - "second": { - "materialized": "table", - }, - }, - "third_party": { - "third": { - "materialized": "view", - }, - }, - }, - "on-run-start": [ - "{{ logging.log_run_start_event() }}", - ], - "on-run-end": [ - "{{ logging.log_run_end_event() }}", - ], - "seeds": { - "my_test_project": { - "enabled": True, - "schema": "seed_data", - "post-hook": "grant select on {{ this }} to bi_user", - }, - }, - "tests": {"my_test_project": {"fail_calc": "sum(failures)"}}, - "require-dbt-version": ">=0.1.0", - } - ) - packages = { - "packages": [ - { - "local": "foo", - }, - {"git": "git@example.com:dbt-labs/dbt-utils.git", "revision": "test-rev"}, - ], - } - project = project_from_config_norender(self.default_project_data, packages=packages) - self.assertEqual(project.project_name, "my_test_project") - self.assertEqual(project.version, "0.0.1") - self.assertEqual(project.profile_name, "default") - self.assertEqual(project.project_root, "/invalid-root-path") - self.assertEqual(project.model_paths, ["other-models"]) - self.assertEqual(project.macro_paths, ["other-macros"]) - self.assertEqual(project.seed_paths, ["other-seeds"]) - self.assertEqual(project.test_paths, ["other-tests"]) - self.assertEqual(project.analysis_paths, ["other-analyses"]) - self.assertEqual(project.docs_paths, ["docs"]) - self.assertEqual(project.asset_paths, ["other-assets"]) - self.assertEqual(project.target_path, "other-target") - self.assertEqual(project.clean_targets, ["another-target"]) - self.assertEqual(project.packages_install_path, "other-dbt_packages") - self.assertEqual(project.quoting, {"identifier": False}) - self.assertEqual( - project.models, - { - "pre-hook": ["{{ logging.log_model_start_event() }}"], - "post-hook": ["{{ logging.log_model_end_event() }}"], - "my_test_project": { - "first": { - "enabled": False, - "sub": { - "enabled": True, - }, - }, - "second": { - "materialized": "table", - }, - }, - "third_party": { - "third": { - "materialized": "view", - }, - }, - }, - ) - self.assertEqual(project.on_run_start, ["{{ logging.log_run_start_event() }}"]) - self.assertEqual(project.on_run_end, ["{{ logging.log_run_end_event() }}"]) - self.assertEqual( - project.seeds, - { - "my_test_project": { - "enabled": True, - "schema": "seed_data", - "post-hook": "grant select on {{ this }} to bi_user", - }, - }, - ) - self.assertEqual( - project.tests, - { - "my_test_project": {"fail_calc": "sum(failures)"}, - }, - ) - self.assertEqual(project.dbt_version, [VersionSpecifier.from_version_string(">=0.1.0")]) - self.assertEqual( - project.packages, - PackageConfig( - packages=[ - LocalPackage(local="foo"), - GitPackage(git="git@example.com:dbt-labs/dbt-utils.git", revision="test-rev"), - ] - ), - ) - str(project) # this does the equivalent of project.to_project_config(with_packages=True) - json.dumps(project.to_project_config()) - - def test_string_run_hooks(self): - self.default_project_data.update( - { - "on-run-start": "{{ logging.log_run_start_event() }}", - "on-run-end": "{{ logging.log_run_end_event() }}", - } - ) - project = project_from_config_rendered(self.default_project_data) - self.assertEqual(project.on_run_start, ["{{ logging.log_run_start_event() }}"]) - self.assertEqual(project.on_run_end, ["{{ logging.log_run_end_event() }}"]) - - def test_invalid_project_name(self): - self.default_project_data["name"] = "invalid-project-name" - with self.assertRaises(dbt.exceptions.DbtProjectError) as exc: - project_from_config_norender(self.default_project_data) - - self.assertIn("invalid-project-name", str(exc.exception)) - - def test_no_project(self): - renderer = empty_project_renderer() - with self.assertRaises(dbt.exceptions.DbtProjectError) as exc: - dbt.config.Project.from_project_root(self.project_dir, renderer) - - self.assertIn("No dbt_project.yml", str(exc.exception)) - - def test_invalid_version(self): - self.default_project_data["require-dbt-version"] = "hello!" - with self.assertRaises(dbt.exceptions.DbtProjectError): - project_from_config_norender(self.default_project_data) - - def test_unsupported_version(self): - self.default_project_data["require-dbt-version"] = ">99999.0.0" - # allowed, because the RuntimeConfig checks, not the Project itself - project_from_config_norender(self.default_project_data) - - def test_none_values(self): - self.default_project_data.update( - { - "models": None, - "seeds": None, - "on-run-end": None, - "on-run-start": None, - } - ) - project = project_from_config_rendered(self.default_project_data) - self.assertEqual(project.models, {}) - self.assertEqual(project.on_run_start, []) - self.assertEqual(project.on_run_end, []) - self.assertEqual(project.seeds, {}) - - def test_nested_none_values(self): - self.default_project_data.update( - { - "models": {"vars": None, "pre-hook": None, "post-hook": None}, - "seeds": {"vars": None, "pre-hook": None, "post-hook": None, "column_types": None}, - } - ) - project = project_from_config_rendered(self.default_project_data) - self.assertEqual(project.models, {"vars": {}, "pre-hook": [], "post-hook": []}) - self.assertEqual( - project.seeds, {"vars": {}, "pre-hook": [], "post-hook": [], "column_types": {}} - ) - - @pytest.mark.skipif(os.name == "nt", reason="crashes CI for Windows") - def test_cycle(self): - models = {} - models["models"] = models - self.default_project_data.update( - { - "models": models, - } - ) - with self.assertRaises(dbt.exceptions.DbtProjectError) as exc: - project_from_config_rendered(self.default_project_data) - - assert "Cycle detected" in str(exc.exception) - - def test_query_comment_disabled(self): - self.default_project_data.update( - { - "query-comment": None, - } - ) - project = project_from_config_norender(self.default_project_data) - self.assertEqual(project.query_comment.comment, "") - self.assertEqual(project.query_comment.append, False) - - self.default_project_data.update( - { - "query-comment": "", - } - ) - project = project_from_config_norender(self.default_project_data) - self.assertEqual(project.query_comment.comment, "") - self.assertEqual(project.query_comment.append, False) - - def test_default_query_comment(self): - project = project_from_config_norender(self.default_project_data) - self.assertEqual(project.query_comment, QueryComment()) - - def test_default_query_comment_append(self): - self.default_project_data.update( - { - "query-comment": {"append": True}, - } - ) - project = project_from_config_norender(self.default_project_data) - self.assertEqual(project.query_comment.comment, DEFAULT_QUERY_COMMENT) - self.assertEqual(project.query_comment.append, True) - - def test_custom_query_comment_append(self): - self.default_project_data.update( - { - "query-comment": {"comment": "run by user test", "append": True}, - } - ) - project = project_from_config_norender(self.default_project_data) - self.assertEqual(project.query_comment.comment, "run by user test") - self.assertEqual(project.query_comment.append, True) - - def test_packages_from_dependencies(self): - packages = { - "packages": [ - { - "git": "{{ env_var('some_package') }}", - "warn-unpinned": True, - } - ], - } - - project = project_from_config_rendered( - self.default_project_data, packages, packages_specified_path=DEPENDENCIES_FILE_NAME - ) - git_package = project.packages.packages[0] - # packages did not render because packages_specified_path=DEPENDENCIES_FILE_NAME - assert git_package.git == "{{ env_var('some_package') }}" - - -class TestProjectFile(BaseFileTest): - def setUp(self): - super().setUp() - self.write_project(self.default_project_data) - # and after the fact, add the project root - self.default_project_data["project-root"] = self.project_dir - - def test_from_project_root(self): - renderer = empty_project_renderer() - project = dbt.config.Project.from_project_root(self.project_dir, renderer) - from_config = project_from_config_norender(self.default_project_data) - self.assertEqual(project, from_config) - self.assertEqual(project.version, "0.0.1") - self.assertEqual(project.project_name, "my_test_project") - - def test_with_invalid_package(self): - renderer = empty_project_renderer() - self.write_packages({"invalid": ["not a package of any kind"]}) - with self.assertRaises(dbt.exceptions.DbtProjectError): - dbt.config.Project.from_project_root(self.project_dir, renderer) - - -class InheritsFromConfiguredTask(ConfiguredTask): - def run(self): - pass - - -class TestConfiguredTask(BaseFileTest): - def setUp(self): - super().setUp() - self.write_project(self.default_project_data) - self.write_profile(self.default_profile_data) - - def tearDown(self): - super().tearDown() - # These tests will change the directory to the project path, - # so it's necessary to change it back at the end. - os.chdir(INITIAL_ROOT) - - def test_configured_task_dir_change(self): - self.assertEqual(os.getcwd(), INITIAL_ROOT) - self.assertNotEqual(INITIAL_ROOT, self.project_dir) - InheritsFromConfiguredTask.from_args(self.args) - self.assertEqual(os.path.realpath(os.getcwd()), os.path.realpath(self.project_dir)) - - def test_configured_task_dir_change_with_bad_path(self): - self.args.project_dir = "bad_path" - with self.assertRaises(dbt.exceptions.DbtRuntimeError): - InheritsFromConfiguredTask.from_args(self.args) - - -class TestVariableProjectFile(BaseFileTest): - def setUp(self): - super().setUp() - self.default_project_data["version"] = "{{ var('cli_version') }}" - self.default_project_data["name"] = "blah" - self.default_project_data["profile"] = "{{ env_var('env_value_profile') }}" - self.write_project(self.default_project_data) - # and after the fact, add the project root - self.default_project_data["project-root"] = self.project_dir - - def test_cli_and_env_vars(self): - renderer = dbt.config.renderer.DbtProjectYamlRenderer(None, {"cli_version": "0.1.2"}) - with mock.patch.dict(os.environ, self.env_override): - project = dbt.config.Project.from_project_root( - self.project_dir, - renderer, - ) - - self.assertEqual(renderer.ctx_obj.env_vars, {"env_value_profile": "default"}) - self.assertEqual(project.version, "0.1.2") - self.assertEqual(project.project_name, "blah") - self.assertEqual(project.profile_name, "default") - - -class TestRuntimeConfig(BaseConfigTest): - def setUp(self): - self.profiles_dir = "/invalid-profiles-path" - self.project_dir = "/invalid-root-path" - super().setUp() - self.default_project_data["project-root"] = self.project_dir - - def get_project(self): - return project_from_config_norender( - self.default_project_data, verify_version=self.args.version_check - ) - - def get_profile(self): - renderer = empty_profile_renderer() - return dbt.config.Profile.from_raw_profiles( - self.default_profile_data, self.default_project_data["profile"], renderer - ) - - def from_parts(self, exc=None): - with self.assertRaisesOrReturns(exc) as err: - project = self.get_project() - profile = self.get_profile() - - result = dbt.config.RuntimeConfig.from_parts(project, profile, self.args) - - if exc is None: - return result - else: - return err - - def test_from_parts(self): - project = self.get_project() - profile = self.get_profile() - config = dbt.config.RuntimeConfig.from_parts(project, profile, self.args) - - self.assertEqual(config.cli_vars, {}) - self.assertEqual(config.to_profile_info(), profile.to_profile_info()) - # we should have the default quoting set in the full config, but not in - # the project - # TODO(jeb): Adapters must assert that quoting is populated? - expected_project = project.to_project_config() - self.assertEqual(expected_project["quoting"], {}) - - expected_project["quoting"] = { - "database": True, - "identifier": True, - "schema": True, - } - self.assertEqual(config.to_project_config(), expected_project) - - def test_str(self): - project = self.get_project() - profile = self.get_profile() - config = dbt.config.RuntimeConfig.from_parts(project, profile, {}) - - # to make sure nothing terrible happens - str(config) - - def test_validate_fails(self): - project = self.get_project() - profile = self.get_profile() - # invalid - must be boolean - profile.user_config.use_colors = 100 - with self.assertRaises(dbt.exceptions.DbtProjectError): - dbt.config.RuntimeConfig.from_parts(project, profile, {}) - - def test_supported_version(self): - self.default_project_data["require-dbt-version"] = ">0.0.0" - conf = self.from_parts() - self.assertEqual(set(x.to_version_string() for x in conf.dbt_version), {">0.0.0"}) - - def test_unsupported_version(self): - self.default_project_data["require-dbt-version"] = ">99999.0.0" - raised = self.from_parts(dbt.exceptions.DbtProjectError) - self.assertIn("This version of dbt is not supported", str(raised.exception)) - - def test_unsupported_version_no_check(self): - self.default_project_data["require-dbt-version"] = ">99999.0.0" - self.args.version_check = False - set_from_args(self.args, None) - conf = self.from_parts() - self.assertEqual(set(x.to_version_string() for x in conf.dbt_version), {">99999.0.0"}) - - def test_supported_version_range(self): - self.default_project_data["require-dbt-version"] = [">0.0.0", "<=99999.0.0"] - conf = self.from_parts() - self.assertEqual( - set(x.to_version_string() for x in conf.dbt_version), {">0.0.0", "<=99999.0.0"} - ) - - def test_unsupported_version_range(self): - self.default_project_data["require-dbt-version"] = [">0.0.0", "<=0.0.1"] - raised = self.from_parts(dbt.exceptions.DbtProjectError) - self.assertIn("This version of dbt is not supported", str(raised.exception)) - - def test_unsupported_version_range_bad_config(self): - self.default_project_data["require-dbt-version"] = [">0.0.0", "<=0.0.1"] - self.default_project_data["some-extra-field-not-allowed"] = True - raised = self.from_parts(dbt.exceptions.DbtProjectError) - self.assertIn("This version of dbt is not supported", str(raised.exception)) - - def test_unsupported_version_range_no_check(self): - self.default_project_data["require-dbt-version"] = [">0.0.0", "<=0.0.1"] - self.args.version_check = False - set_from_args(self.args, None) - conf = self.from_parts() - self.assertEqual( - set(x.to_version_string() for x in conf.dbt_version), {">0.0.0", "<=0.0.1"} - ) - - def test_impossible_version_range(self): - self.default_project_data["require-dbt-version"] = [">99999.0.0", "<=0.0.1"] - raised = self.from_parts(dbt.exceptions.DbtProjectError) - self.assertIn( - "The package version requirement can never be satisfied", str(raised.exception) - ) - - def test_unsupported_version_extra_config(self): - self.default_project_data["some-extra-field-not-allowed"] = True - raised = self.from_parts(dbt.exceptions.DbtProjectError) - self.assertIn("Additional properties are not allowed", str(raised.exception)) - - def test_archive_not_allowed(self): - self.default_project_data["archive"] = [ - { - "source_schema": "a", - "target_schema": "b", - "tables": [ - { - "source_table": "seed", - "target_table": "archive_actual", - "updated_at": "updated_at", - "unique_key": """id || '-' || first_name""", - }, - ], - } - ] - with self.assertRaises(dbt.exceptions.DbtProjectError): - self.get_project() - - def test__warn_for_unused_resource_config_paths_empty(self): - project = self.from_parts() - dbt.flags.WARN_ERROR = True - try: - project.warn_for_unused_resource_config_paths( - { - "models": frozenset( - ( - ("my_test_project", "foo", "bar"), - ("my_test_project", "foo", "baz"), - ) - ) - }, - [], - ) - finally: - dbt.flags.WARN_ERROR = False - - -class TestRuntimeConfigWithConfigs(BaseConfigTest): - def setUp(self): - self.profiles_dir = "/invalid-profiles-path" - self.project_dir = "/invalid-root-path" - super().setUp() - self.default_project_data["project-root"] = self.project_dir - self.default_project_data["models"] = { - "enabled": True, - "my_test_project": { - "foo": { - "materialized": "view", - "bar": { - "materialized": "table", - }, - }, - "baz": { - "materialized": "table", - }, - }, - } - self.used = { - "models": frozenset( - ( - ("my_test_project", "foo", "bar"), - ("my_test_project", "foo", "baz"), - ) - ) - } - - def get_project(self): - return project_from_config_norender(self.default_project_data, verify_version=True) - - def get_profile(self): - renderer = empty_profile_renderer() - return dbt.config.Profile.from_raw_profiles( - self.default_profile_data, self.default_project_data["profile"], renderer - ) - - def from_parts(self, exc=None): - with self.assertRaisesOrReturns(exc) as err: - project = self.get_project() - profile = self.get_profile() - - result = dbt.config.RuntimeConfig.from_parts(project, profile, self.args) - - if exc is None: - return result - else: - return err - - def test__warn_for_unused_resource_config_paths(self): - project = self.from_parts() - with mock.patch("dbt.config.runtime.warn_or_error") as warn_or_error_patch: - project.warn_for_unused_resource_config_paths(self.used, []) - warn_or_error_patch.assert_called_once() - event = warn_or_error_patch.call_args[0][0] - assert type(event).__name__ == "UnusedResourceConfigPath" - msg = event.message() - expected_msg = "- models.my_test_project.baz" - assert expected_msg in msg - - -class TestRuntimeConfigFiles(BaseFileTest): - def setUp(self): - super().setUp() - self.write_profile(self.default_profile_data) - self.write_project(self.default_project_data) - # and after the fact, add the project root - self.default_project_data["project-root"] = self.project_dir - - def test_from_args(self): - with temp_cd(self.project_dir): - config = dbt.config.RuntimeConfig.from_args(self.args) - self.assertEqual(config.version, "0.0.1") - self.assertEqual(config.profile_name, "default") - # on osx, for example, these are not necessarily equal due to /private - self.assertTrue(os.path.samefile(config.project_root, self.project_dir)) - self.assertEqual(config.model_paths, ["models"]) - self.assertEqual(config.macro_paths, ["macros"]) - self.assertEqual(config.seed_paths, ["seeds"]) - self.assertEqual(config.test_paths, ["tests"]) - self.assertEqual(config.analysis_paths, ["analyses"]) - self.assertEqual( - set(config.docs_paths), set(["models", "seeds", "snapshots", "analyses", "macros"]) - ) - self.assertEqual(config.asset_paths, []) - self.assertEqual(config.target_path, "target") - self.assertEqual(config.clean_targets, ["target"]) - self.assertEqual(config.log_path, "logs") - self.assertEqual(config.packages_install_path, "dbt_packages") - self.assertEqual(config.quoting, {"database": True, "identifier": True, "schema": True}) - self.assertEqual(config.models, {}) - self.assertEqual(config.on_run_start, []) - self.assertEqual(config.on_run_end, []) - self.assertEqual(config.seeds, {}) - self.assertEqual(config.packages, PackageConfig(packages=[])) - self.assertEqual(config.project_name, "my_test_project") - - -class TestVariableRuntimeConfigFiles(BaseFileTest): - def setUp(self): - super().setUp() - self.default_project_data.update( - { - "version": "{{ var('cli_version') }}", - "name": "blah", - "profile": "{{ env_var('env_value_profile') }}", - "on-run-end": [ - "{{ env_var('env_value_profile') }}", - ], - "models": { - "foo": { - "post-hook": "{{ env_var('env_value_profile') }}", - }, - "bar": { - # just gibberish, make sure it gets interpreted - "materialized": "{{ env_var('env_value_profile') }}", - }, - }, - "seeds": { - "foo": { - "post-hook": "{{ env_var('env_value_profile') }}", - }, - "bar": { - # just gibberish, make sure it gets interpreted - "materialized": "{{ env_var('env_value_profile') }}", - }, - }, - } - ) - self.write_project(self.default_project_data) - self.write_profile(self.default_profile_data) - # and after the fact, add the project root - self.default_project_data["project-root"] = self.project_dir - - def test_cli_and_env_vars(self): - self.args.target = "cli-and-env-vars" - self.args.vars = {"cli_value_host": "cli-postgres-host", "cli_version": "0.1.2"} - self.args.project_dir = self.project_dir - set_from_args(self.args, None) - with mock.patch.dict(os.environ, self.env_override): - config = dbt.config.RuntimeConfig.from_args(self.args) - - self.assertEqual(config.version, "0.1.2") - self.assertEqual(config.project_name, "blah") - self.assertEqual(config.profile_name, "default") - self.assertEqual(config.credentials.host, "cli-postgres-host") - self.assertEqual(config.credentials.user, "env-postgres-user") - # make sure hooks are not interpreted - self.assertEqual(config.on_run_end, ["{{ env_var('env_value_profile') }}"]) - self.assertEqual(config.models["foo"]["post-hook"], "{{ env_var('env_value_profile') }}") - self.assertEqual(config.models["bar"]["materialized"], "default") # rendered! - self.assertEqual(config.seeds["foo"]["post-hook"], "{{ env_var('env_value_profile') }}") - self.assertEqual(config.seeds["bar"]["materialized"], "default") # rendered! - - -class TestVarLookups(unittest.TestCase): - def setUp(self): - self.initial_src_vars = { - # globals - "foo": 123, - "bar": "hello", - # project-scoped - "my_project": { - "bar": "goodbye", - "baz": True, - }, - "other_project": { - "foo": 456, - }, - } - self.src_vars = deepcopy(self.initial_src_vars) - self.dst = {"vars": deepcopy(self.initial_src_vars)} - - self.projects = ["my_project", "other_project", "third_project"] - load_plugin("postgres") - self.local_var_search = mock.MagicMock( - fqn=["my_project", "my_model"], resource_type=NodeType.Model, package_name="my_project" - ) - self.other_var_search = mock.MagicMock( - fqn=["other_project", "model"], - resource_type=NodeType.Model, - package_name="other_project", - ) - self.third_var_search = mock.MagicMock( - fqn=["third_project", "third_model"], - resource_type=NodeType.Model, - package_name="third_project", - ) - - def test_lookups(self): - vars_provider = dbt.config.project.VarProvider(self.initial_src_vars) - - expected = [ - (self.local_var_search, "foo", 123), - (self.other_var_search, "foo", 456), - (self.third_var_search, "foo", 123), - (self.local_var_search, "bar", "goodbye"), - (self.other_var_search, "bar", "hello"), - (self.third_var_search, "bar", "hello"), - (self.local_var_search, "baz", True), - (self.other_var_search, "baz", None), - (self.third_var_search, "baz", None), - ] - for node, key, expected_value in expected: - value = vars_provider.vars_for(node, "postgres").get(key) - assert value == expected_value diff --git a/tests/unit/test_connection_retries.py b/tests/unit/test_connection_retries.py deleted file mode 100644 index 9076adb7ef9..00000000000 --- a/tests/unit/test_connection_retries.py +++ /dev/null @@ -1,59 +0,0 @@ -import functools -import pytest -from requests.exceptions import RequestException -from dbt.exceptions import ConnectionError -from dbt.utils import _connection_exception_retry - - -def no_retry_fn(): - return "success" - - -class TestNoRetries: - def test_no_retry(self): - fn_to_retry = functools.partial(no_retry_fn) - result = _connection_exception_retry(fn_to_retry, 3) - - expected = "success" - - assert result == expected - - -def no_success_fn(): - raise RequestException("You'll never pass") - return "failure" - - -class TestMaxRetries: - def test_no_retry(self): - fn_to_retry = functools.partial(no_success_fn) - - with pytest.raises(ConnectionError): - _connection_exception_retry(fn_to_retry, 3) - - -def single_retry_fn(): - global counter - if counter == 0: - counter += 1 - raise RequestException("You won't pass this one time") - elif counter == 1: - counter += 1 - return "success on 2" - - return "How did we get here?" - - -class TestSingleRetry: - def test_no_retry(self): - global counter - counter = 0 - - fn_to_retry = functools.partial(single_retry_fn) - result = _connection_exception_retry(fn_to_retry, 3) - expected = "success on 2" - - # We need to test the return value here, not just that it did not throw an error. - # If the value is not being passed it causes cryptic errors - assert result == expected - assert counter == 2 diff --git a/tests/unit/test_constraint_parsing.py b/tests/unit/test_constraint_parsing.py deleted file mode 100644 index f613a358e7c..00000000000 --- a/tests/unit/test_constraint_parsing.py +++ /dev/null @@ -1,14 +0,0 @@ -import pytest - -from dbt.contracts.graph.unparsed import UnparsedColumn, HasColumnTests -from dbt.exceptions import ParsingError -from dbt.parser.schemas import ParserRef - - -def test_column_parse(): - unparsed_col = HasColumnTests( - columns=[UnparsedColumn(name="TestCol", constraints=[{"type": "!INVALID!"}])] - ) - - with pytest.raises(ParsingError): - ParserRef.from_target(unparsed_col) diff --git a/tests/unit/test_contracts_graph_node_args.py b/tests/unit/test_contracts_graph_node_args.py deleted file mode 100644 index 1f632491a21..00000000000 --- a/tests/unit/test_contracts_graph_node_args.py +++ /dev/null @@ -1,19 +0,0 @@ -from dbt.contracts.graph.node_args import ModelNodeArgs - - -class TestModelNodeArgs: - def test_model_node_args_unique_id(self) -> None: - model_node_args = ModelNodeArgs( - name="name", package_name="package", identifier="identifier", schema="schema" - ) - assert model_node_args.unique_id == "model.package.name" - - def test_model_node_args_unique_id_with_version(self) -> None: - model_node_args = ModelNodeArgs( - name="name", - package_name="package", - identifier="identifier", - schema="schema", - version="1", - ) - assert model_node_args.unique_id == "model.package.name.v1" diff --git a/tests/unit/test_core_dbt_utils.py b/tests/unit/test_core_dbt_utils.py deleted file mode 100644 index 455b2fee01f..00000000000 --- a/tests/unit/test_core_dbt_utils.py +++ /dev/null @@ -1,73 +0,0 @@ -import requests -import tarfile -import unittest - -from dbt.exceptions import ConnectionError -from dbt.utils import _connection_exception_retry as connection_exception_retry - - -class TestCoreDbtUtils(unittest.TestCase): - def test_connection_exception_retry_none(self): - Counter._reset() - connection_exception_retry(lambda: Counter._add(), 5) - self.assertEqual(1, counter) - - def test_connection_exception_retry_success_requests_exception(self): - Counter._reset() - connection_exception_retry(lambda: Counter._add_with_requests_exception(), 5) - self.assertEqual(2, counter) # 2 = original attempt returned None, plus 1 retry - - def test_connection_exception_retry_max(self): - Counter._reset() - with self.assertRaises(ConnectionError): - connection_exception_retry(lambda: Counter._add_with_exception(), 5) - self.assertEqual(6, counter) # 6 = original attempt plus 5 retries - - def test_connection_exception_retry_success_failed_untar(self): - Counter._reset() - connection_exception_retry(lambda: Counter._add_with_untar_exception(), 5) - self.assertEqual(2, counter) # 2 = original attempt returned ReadError, plus 1 retry - - def test_connection_exception_retry_success_failed_eofexception(self): - Counter._reset() - connection_exception_retry(lambda: Counter._add_with_eof_exception(), 5) - self.assertEqual(2, counter) # 2 = original attempt returned EOFError, plus 1 retry - - -counter: int = 0 - - -class Counter: - def _add(): - global counter - counter += 1 - - # All exceptions that Requests explicitly raises inherit from - # requests.exceptions.RequestException so we want to make sure that raises plus one exception - # that inherit from it for sanity - def _add_with_requests_exception(): - global counter - counter += 1 - if counter < 2: - raise requests.exceptions.RequestException - - def _add_with_exception(): - global counter - counter += 1 - raise requests.exceptions.ConnectionError - - def _add_with_untar_exception(): - global counter - counter += 1 - if counter < 2: - raise tarfile.ReadError - - def _add_with_eof_exception(): - global counter - counter += 1 - if counter < 2: - raise EOFError - - def _reset(): - global counter - counter = 0 diff --git a/tests/unit/test_deprecations.py b/tests/unit/test_deprecations.py index ca8b8006cbc..5ac7cb003fa 100644 --- a/tests/unit/test_deprecations.py +++ b/tests/unit/test_deprecations.py @@ -1,15 +1,38 @@ -from dbt.internal_deprecations import deprecated -from dbt.flags import set_from_args -from argparse import Namespace +import pytest +import dbt.deprecations as deprecations -@deprecated(reason="just because", version="1.23.0", suggested_action="Make some updates") -def to_be_decorated(): - return 5 +@pytest.fixture(scope="function") +def active_deprecations(): + deprecations.reset_deprecations() + assert not deprecations.active_deprecations -# simple test that the return value is not modified -def test_deprecated_func(): - set_from_args(Namespace(WARN_ERROR=False), None) - assert hasattr(to_be_decorated, "__wrapped__") - assert to_be_decorated() == 5 + yield deprecations.active_deprecations + + deprecations.reset_deprecations() + + +@pytest.fixture(scope="function") +def buffered_deprecations(): + deprecations.buffered_deprecations.clear() + assert not deprecations.buffered_deprecations + + yield deprecations.buffered_deprecations + + deprecations.buffered_deprecations.clear() + + +def test_buffer_deprecation(active_deprecations, buffered_deprecations): + deprecations.buffer("project-flags-moved") + + assert active_deprecations == set() + assert len(buffered_deprecations) == 1 + + +def test_fire_buffered_deprecations(active_deprecations, buffered_deprecations): + deprecations.buffer("project-flags-moved") + deprecations.fire_buffered_deprecations() + + assert active_deprecations == set(["project-flags-moved"]) + assert len(buffered_deprecations) == 0 diff --git a/tests/unit/test_events.py b/tests/unit/test_events.py index 8c3cd240b3d..17e2e2d90f8 100644 --- a/tests/unit/test_events.py +++ b/tests/unit/test_events.py @@ -1,25 +1,31 @@ -import pytest +import logging import re from typing import TypeVar -from dbt.contracts.results import TimingInfo -from dbt.events import AdapterLogger, types +import pytest + +from dbt.adapters.events import types as adapter_types +from dbt.adapters.events.logging import AdapterLogger +from dbt.artifacts.schemas.results import RunStatus, TimingInfo +from dbt.artifacts.schemas.run import RunResult +from dbt.events import types as core_types from dbt.events.base_types import ( - BaseEvent, + CoreBaseEvent, DebugLevel, DynamicLevel, ErrorLevel, InfoLevel, TestLevel, WarnLevel, - msg_from_base_event, ) -from dbt.events.functions import msg_to_dict, msg_to_json -from dbt.events.helpers import get_json_string_utcnow -from dbt.flags import set_from_args -from argparse import Namespace - -set_from_args(Namespace(WARN_ERROR=False), None) +from dbt.events.types import RunResultError +from dbt.task.printer import print_run_result_error +from dbt_common.events import types +from dbt_common.events.base_types import msg_from_base_event +from dbt_common.events.event_manager import EventManager, TestEventManager +from dbt_common.events.event_manager_client import ctx_set_event_manager +from dbt_common.events.functions import msg_to_dict, msg_to_json +from dbt_common.events.helpers import get_json_string_utcnow # takes in a class and finds any subclasses for it @@ -51,14 +57,16 @@ def test_formatting(self): logger.debug("hello {}", "world") # enters lower in the call stack to test that it formats correctly - event = types.AdapterEventDebug(name="dbt_tests", base_msg="hello {}", args=["world"]) + event = adapter_types.AdapterEventDebug( + name="dbt_tests", base_msg="hello {}", args=["world"] + ) assert "hello world" in event.message() # tests that it doesn't throw logger.debug("1 2 {}", "3") # enters lower in the call stack to test that it formats correctly - event = types.AdapterEventDebug(name="dbt_tests", base_msg="1 2 {}", args=[3]) + event = adapter_types.AdapterEventDebug(name="dbt_tests", base_msg="1 2 {}", args=[3]) assert "1 2 3" in event.message() # tests that it doesn't throw @@ -67,25 +75,31 @@ def test_formatting(self): # enters lower in the call stack to test that it formats correctly # in this case it's that we didn't attempt to replace anything since there # were no args passed after the initial message - event = types.AdapterEventDebug(name="dbt_tests", base_msg="boop{x}boop", args=[]) + event = adapter_types.AdapterEventDebug(name="dbt_tests", base_msg="boop{x}boop", args=[]) assert "boop{x}boop" in event.message() # ensure AdapterLogger and subclasses makes all base_msg members # of type string; when someone writes logger.debug(a) where a is # any non-string object - event = types.AdapterEventDebug(name="dbt_tests", base_msg=[1, 2, 3], args=[3]) + event = adapter_types.AdapterEventDebug(name="dbt_tests", base_msg=[1, 2, 3], args=[3]) assert isinstance(event.base_msg, str) - event = types.JinjaLogDebug(msg=[1, 2, 3]) + event = core_types.JinjaLogDebug(msg=[1, 2, 3]) assert isinstance(event.msg, str) + def test_set_adapter_dependency_log_level(self): + logger = AdapterLogger("dbt_tests") + package_log = logging.getLogger("test_package_log") + logger.set_adapter_dependency_log_level("test_package_log", "DEBUG") + package_log.debug("debug message") + class TestEventCodes: # checks to see if event codes are duplicated to keep codes singluar and clear. # also checks that event codes follow correct namming convention ex. E001 def test_event_codes(self): - all_concrete = get_all_subclasses(BaseEvent) + all_concrete = get_all_subclasses(CoreBaseEvent) all_codes = set() for event_cls in all_concrete: @@ -103,101 +117,110 @@ def test_event_codes(self): # N.B. Events instantiated here include the module prefix in order to # avoid having the entire list twice in the code. # A - pre-project loading - types.MainReportVersion(version=""), - types.MainReportArgs(args={}), - types.MainTrackingUserState(user_state=""), - types.MergedFromState(num_merged=0, sample=[]), - types.MissingProfileTarget(profile_name="", target_name=""), - types.InvalidOptionYAML(option_name="vars"), - types.LogDbtProjectError(), - types.LogDbtProfileError(), - types.StarterProjectPath(dir=""), - types.ConfigFolderDirectory(dir=""), - types.NoSampleProfileFound(adapter=""), - types.ProfileWrittenWithSample(name="", path=""), - types.ProfileWrittenWithTargetTemplateYAML(name="", path=""), - types.ProfileWrittenWithProjectTemplateYAML(name="", path=""), - types.SettingUpProfile(), - types.InvalidProfileTemplateYAML(), - types.ProjectNameAlreadyExists(name=""), - types.ProjectCreated(project_name=""), + core_types.MainReportVersion(version=""), + core_types.MainReportArgs(args={}), + core_types.MainTrackingUserState(user_state=""), + core_types.MissingProfileTarget(profile_name="", target_name=""), + core_types.InvalidOptionYAML(option_name="vars"), + core_types.LogDbtProjectError(), + core_types.LogDbtProfileError(), + core_types.StarterProjectPath(dir=""), + core_types.ConfigFolderDirectory(dir=""), + core_types.NoSampleProfileFound(adapter=""), + core_types.ProfileWrittenWithSample(name="", path=""), + core_types.ProfileWrittenWithTargetTemplateYAML(name="", path=""), + core_types.ProfileWrittenWithProjectTemplateYAML(name="", path=""), + core_types.SettingUpProfile(), + core_types.InvalidProfileTemplateYAML(), + core_types.ProjectNameAlreadyExists(name=""), + core_types.ProjectCreated(project_name=""), # D - Deprecations ====================== - types.PackageRedirectDeprecation(old_name="", new_name=""), - types.PackageInstallPathDeprecation(), - types.ConfigSourcePathDeprecation(deprecated_path="", exp_path=""), - types.ConfigDataPathDeprecation(deprecated_path="", exp_path=""), - types.AdapterDeprecationWarning(old_name="", new_name=""), - types.MetricAttributesRenamed(metric_name=""), - types.ExposureNameDeprecation(exposure=""), - types.InternalDeprecation(name="", reason="", suggested_action="", version=""), - types.EnvironmentVariableRenamed(old_name="", new_name=""), - types.ConfigLogPathDeprecation(deprecated_path=""), - types.ConfigTargetPathDeprecation(deprecated_path=""), - types.CollectFreshnessReturnSignature(), + core_types.PackageRedirectDeprecation(old_name="", new_name=""), + core_types.PackageInstallPathDeprecation(), + core_types.ConfigSourcePathDeprecation(deprecated_path="", exp_path=""), + core_types.ConfigDataPathDeprecation(deprecated_path="", exp_path=""), + adapter_types.AdapterDeprecationWarning(old_name="", new_name=""), + core_types.MetricAttributesRenamed(metric_name=""), + core_types.ExposureNameDeprecation(exposure=""), + core_types.InternalDeprecation(name="", reason="", suggested_action="", version=""), + core_types.EnvironmentVariableRenamed(old_name="", new_name=""), + core_types.ConfigLogPathDeprecation(deprecated_path=""), + core_types.ConfigTargetPathDeprecation(deprecated_path=""), + adapter_types.CollectFreshnessReturnSignature(), + core_types.TestsConfigDeprecation(deprecated_path="", exp_path=""), + core_types.ProjectFlagsMovedDeprecation(), + core_types.SpacesInResourceNameDeprecation(unique_id="", level=""), + core_types.ResourceNamesWithSpacesDeprecation( + count_invalid_names=1, show_debug_hint=True, level="" + ), + core_types.PackageMaterializationOverrideDeprecation( + package_name="my_package", materialization_name="view" + ), + core_types.SourceFreshnessProjectHooksNotRun(), # E - DB Adapter ====================== - types.AdapterEventDebug(), - types.AdapterEventInfo(), - types.AdapterEventWarning(), - types.AdapterEventError(), - types.AdapterRegistered(adapter_name="dbt-awesome", adapter_version="1.2.3"), - types.NewConnection(conn_type="", conn_name=""), - types.ConnectionReused(conn_name=""), - types.ConnectionLeftOpenInCleanup(conn_name=""), - types.ConnectionClosedInCleanup(conn_name=""), - types.RollbackFailed(conn_name=""), - types.ConnectionClosed(conn_name=""), - types.ConnectionLeftOpen(conn_name=""), - types.Rollback(conn_name=""), - types.CacheMiss(conn_name="", database="", schema=""), - types.ListRelations(database="", schema=""), - types.ConnectionUsed(conn_type="", conn_name=""), - types.SQLQuery(conn_name="", sql=""), - types.SQLQueryStatus(status="", elapsed=0.1), - types.SQLCommit(conn_name=""), - types.ColTypeChange( + adapter_types.AdapterEventDebug(), + adapter_types.AdapterEventInfo(), + adapter_types.AdapterEventWarning(), + adapter_types.AdapterEventError(), + adapter_types.AdapterRegistered(adapter_name="dbt-awesome", adapter_version="1.2.3"), + adapter_types.NewConnection(conn_type="", conn_name=""), + adapter_types.ConnectionReused(conn_name=""), + adapter_types.ConnectionLeftOpenInCleanup(conn_name=""), + adapter_types.ConnectionClosedInCleanup(conn_name=""), + adapter_types.RollbackFailed(conn_name=""), + adapter_types.ConnectionClosed(conn_name=""), + adapter_types.ConnectionLeftOpen(conn_name=""), + adapter_types.Rollback(conn_name=""), + adapter_types.CacheMiss(conn_name="", database="", schema=""), + adapter_types.ListRelations(database="", schema=""), + adapter_types.ConnectionUsed(conn_type="", conn_name=""), + adapter_types.SQLQuery(conn_name="", sql=""), + adapter_types.SQLQueryStatus(status="", elapsed=0.1), + adapter_types.SQLCommit(conn_name=""), + adapter_types.ColTypeChange( orig_type="", new_type="", table={"database": "", "schema": "", "identifier": ""}, ), - types.SchemaCreation(relation={"database": "", "schema": "", "identifier": ""}), - types.SchemaDrop(relation={"database": "", "schema": "", "identifier": ""}), - types.CacheAction( + adapter_types.SchemaCreation(relation={"database": "", "schema": "", "identifier": ""}), + adapter_types.SchemaDrop(relation={"database": "", "schema": "", "identifier": ""}), + adapter_types.CacheAction( action="adding_relation", ref_key={"database": "", "schema": "", "identifier": ""}, ref_key_2={"database": "", "schema": "", "identifier": ""}, ), - types.CacheDumpGraph(before_after="before", action="rename", dump=dict()), - types.AdapterImportError(exc=""), - types.PluginLoadError(exc_info=""), - types.NewConnectionOpening(connection_state=""), - types.CodeExecution(conn_name="", code_content=""), - types.CodeExecutionStatus(status="", elapsed=0.1), - types.CatalogGenerationError(exc=""), - types.WriteCatalogFailure(num_exceptions=0), - types.CatalogWritten(path=""), - types.CannotGenerateDocs(), - types.BuildingCatalog(), - types.DatabaseErrorRunningHook(hook_type=""), - types.HooksRunning(num_hooks=0, hook_type=""), - types.FinishedRunningStats(stat_line="", execution="", execution_time=0), - types.ConstraintNotEnforced(constraint="", adapter=""), - types.ConstraintNotSupported(constraint="", adapter=""), + adapter_types.CacheDumpGraph(before_after="before", action="rename", dump=dict()), + adapter_types.AdapterImportError(exc=""), + adapter_types.PluginLoadError(exc_info=""), + adapter_types.NewConnectionOpening(connection_state=""), + adapter_types.CodeExecution(conn_name="", code_content=""), + adapter_types.CodeExecutionStatus(status="", elapsed=0.1), + adapter_types.CatalogGenerationError(exc=""), + adapter_types.WriteCatalogFailure(num_exceptions=0), + adapter_types.CatalogWritten(path=""), + adapter_types.CannotGenerateDocs(), + adapter_types.BuildingCatalog(), + adapter_types.DatabaseErrorRunningHook(hook_type=""), + adapter_types.HooksRunning(num_hooks=0, hook_type=""), + adapter_types.FinishedRunningStats(stat_line="", execution="", execution_time=0), + adapter_types.ConstraintNotEnforced(constraint="", adapter=""), + adapter_types.ConstraintNotSupported(constraint="", adapter=""), # I - Project parsing ====================== - types.InputFileDiffError(category="testing", file_id="my_file"), - types.InvalidValueForField(field_name="test", field_value="test"), - types.ValidationWarning(resource_type="model", field_name="access", node_name="my_macro"), - types.ParsePerfInfoPath(path=""), - types.PartialParsingErrorProcessingFile(file=""), - types.PartialParsingFile(file_id=""), - types.PartialParsingError(exc_info={}), - types.PartialParsingSkipParsing(), - types.UnableToPartialParse(reason="something went wrong"), - types.StateCheckVarsHash(vars="testing", target="testing", profile="testing"), - types.PartialParsingNotEnabled(), - types.ParsedFileLoadFailed(path="", exc="", exc_info=""), - types.PartialParsingEnabled(deleted=0, added=0, changed=0), - types.PartialParsingFile(file_id=""), - types.InvalidDisabledTargetInTestNode( + core_types.InputFileDiffError(category="testing", file_id="my_file"), + core_types.InvalidValueForField(field_name="test", field_value="test"), + core_types.ValidationWarning(resource_type="model", field_name="access", node_name="my_macro"), + core_types.ParsePerfInfoPath(path=""), + core_types.PartialParsingErrorProcessingFile(file=""), + core_types.PartialParsingFile(file_id=""), + core_types.PartialParsingError(exc_info={}), + core_types.PartialParsingSkipParsing(), + core_types.UnableToPartialParse(reason="something went wrong"), + core_types.StateCheckVarsHash(vars="testing", target="testing", profile="testing"), + core_types.PartialParsingNotEnabled(), + core_types.ParsedFileLoadFailed(path="", exc="", exc_info=""), + core_types.PartialParsingEnabled(deleted=0, added=0, changed=0), + core_types.PartialParsingFile(file_id=""), + core_types.InvalidDisabledTargetInTestNode( resource_type_title="", unique_id="", original_file_path="", @@ -205,18 +228,18 @@ def test_event_codes(self): target_name="", target_package="", ), - types.UnusedResourceConfigPath(unused_config_paths=[]), - types.SeedIncreased(package_name="", name=""), - types.SeedExceedsLimitSamePath(package_name="", name=""), - types.SeedExceedsLimitAndPathChanged(package_name="", name=""), - types.SeedExceedsLimitChecksumChanged(package_name="", name="", checksum_name=""), - types.UnusedTables(unused_tables=[]), - types.WrongResourceSchemaFile( + core_types.UnusedResourceConfigPath(unused_config_paths=[]), + core_types.SeedIncreased(package_name="", name=""), + core_types.SeedExceedsLimitSamePath(package_name="", name=""), + core_types.SeedExceedsLimitAndPathChanged(package_name="", name=""), + core_types.SeedExceedsLimitChecksumChanged(package_name="", name="", checksum_name=""), + core_types.UnusedTables(unused_tables=[]), + core_types.WrongResourceSchemaFile( patch_name="", resource_type="", file_path="", plural_resource_type="" ), - types.NoNodeForYamlKey(patch_name="", yaml_key="", file_path=""), - types.MacroNotFoundForPatch(patch_name=""), - types.NodeNotFoundOrDisabled( + core_types.NoNodeForYamlKey(patch_name="", yaml_key="", file_path=""), + core_types.MacroNotFoundForPatch(patch_name=""), + core_types.NodeNotFoundOrDisabled( original_file_path="", unique_id="", resource_type_title="", @@ -225,79 +248,99 @@ def test_event_codes(self): target_package="", disabled="", ), - types.JinjaLogWarning(), - types.JinjaLogInfo(msg=""), - types.JinjaLogDebug(msg=""), - types.UnpinnedRefNewVersionAvailable( + core_types.JinjaLogWarning(), + core_types.JinjaLogInfo(msg=""), + core_types.JinjaLogDebug(msg=""), + core_types.UnpinnedRefNewVersionAvailable( ref_node_name="", ref_node_package="", ref_node_version="", ref_max_version="" ), - types.DeprecatedModel(model_name="", model_version="", deprecation_date=""), - types.DeprecatedReference( + core_types.DeprecatedModel(model_name="", model_version="", deprecation_date=""), + core_types.DeprecatedReference( model_name="", ref_model_name="", ref_model_package="", ref_model_deprecation_date="", ref_model_latest_version="", ), - types.UpcomingReferenceDeprecation( + core_types.UpcomingReferenceDeprecation( model_name="", ref_model_name="", ref_model_package="", ref_model_deprecation_date="", ref_model_latest_version="", ), - types.UnsupportedConstraintMaterialization(materialized=""), - types.ParseInlineNodeError(exc=""), + core_types.UnsupportedConstraintMaterialization(materialized=""), + core_types.ParseInlineNodeError(exc=""), + core_types.SemanticValidationFailure(msg=""), + core_types.UnversionedBreakingChange( + breaking_changes=[], + model_name="", + model_file_path="", + contract_enforced_disabled=True, + columns_removed=[], + column_type_changes=[], + enforced_column_constraint_removed=[], + enforced_model_constraint_removed=[], + materialization_changed=[], + ), + core_types.WarnStateTargetEqual(state_path=""), + core_types.FreshnessConfigProblem(msg=""), + core_types.SemanticValidationFailure(msg=""), # M - Deps generation ====================== - types.GitSparseCheckoutSubdirectory(subdir=""), - types.GitProgressCheckoutRevision(revision=""), - types.GitProgressUpdatingExistingDependency(dir=""), - types.GitProgressPullingNewDependency(dir=""), - types.GitNothingToDo(sha=""), - types.GitProgressUpdatedCheckoutRange(start_sha="", end_sha=""), - types.GitProgressCheckedOutAt(end_sha=""), - types.RegistryProgressGETRequest(url=""), - types.RegistryProgressGETResponse(url="", resp_code=1234), - types.SelectorReportInvalidSelector(valid_selectors="", spec_method="", raw_spec=""), - types.DepsNoPackagesFound(), - types.DepsStartPackageInstall(package_name=""), - types.DepsInstallInfo(version_name=""), - types.DepsUpdateAvailable(version_latest=""), - types.DepsUpToDate(), - types.DepsListSubdirectory(subdirectory=""), - types.DepsNotifyUpdatesAvailable(packages=["my_pkg", "other_pkg"]), + core_types.GitSparseCheckoutSubdirectory(subdir=""), + core_types.GitProgressCheckoutRevision(revision=""), + core_types.GitProgressUpdatingExistingDependency(dir=""), + core_types.GitProgressPullingNewDependency(dir=""), + core_types.GitNothingToDo(sha=""), + core_types.GitProgressUpdatedCheckoutRange(start_sha="", end_sha=""), + core_types.GitProgressCheckedOutAt(end_sha=""), + core_types.RegistryProgressGETRequest(url=""), + core_types.RegistryProgressGETResponse(url="", resp_code=1234), + core_types.SelectorReportInvalidSelector(valid_selectors="", spec_method="", raw_spec=""), + core_types.DepsNoPackagesFound(), + core_types.DepsStartPackageInstall(package_name=""), + core_types.DepsInstallInfo(version_name=""), + core_types.DepsUpdateAvailable(version_latest=""), + core_types.DepsUpToDate(), + core_types.DepsListSubdirectory(subdirectory=""), + core_types.DepsNotifyUpdatesAvailable(packages=["my_pkg", "other_pkg"]), types.RetryExternalCall(attempt=0, max=0), types.RecordRetryException(exc=""), - types.RegistryIndexProgressGETRequest(url=""), - types.RegistryIndexProgressGETResponse(url="", resp_code=1234), - types.RegistryResponseUnexpectedType(response=""), - types.RegistryResponseMissingTopKeys(response=""), - types.RegistryResponseMissingNestedKeys(response=""), - types.RegistryResponseExtraNestedKeys(response=""), - types.DepsSetDownloadDirectory(path=""), - types.SemanticValidationFailure(msg=""), + core_types.RegistryIndexProgressGETRequest(url=""), + core_types.RegistryIndexProgressGETResponse(url="", resp_code=1234), + core_types.RegistryResponseUnexpectedType(response=""), + core_types.RegistryResponseMissingTopKeys(response=""), + core_types.RegistryResponseMissingNestedKeys(response=""), + core_types.RegistryResponseExtraNestedKeys(response=""), + core_types.DepsSetDownloadDirectory(path=""), + core_types.DepsLockUpdating(lock_filepath=""), + core_types.DepsAddPackage(package_name="", version="", packages_filepath=""), + core_types.DepsFoundDuplicatePackage(removed_package={}), + core_types.DepsScrubbedPackageName(package_name=""), + core_types.DepsUnpinned(revision="", git=""), + core_types.NoNodesForSelectionCriteria(spec_raw=""), # Q - Node execution ====================== - types.RunningOperationCaughtError(exc=""), - types.CompileComplete(), - types.FreshnessCheckComplete(), - types.SeedHeader(header=""), - types.SQLRunnerException(exc=""), - types.LogTestResult( + core_types.RunningOperationCaughtError(exc=""), + core_types.CompileComplete(), + core_types.FreshnessCheckComplete(), + core_types.SeedHeader(header=""), + core_types.SQLRunnerException(exc=""), + core_types.LogTestResult( name="", index=0, num_models=0, execution_time=0, num_failures=0, ), - types.LogStartLine(description="", index=0, total=0), - types.LogModelResult( + core_types.LogStartLine(description="", index=0, total=0), + core_types.LogModelResult( description="", status="", index=0, total=0, execution_time=0, ), - types.LogSnapshotResult( + core_types.LogSnapshotResult( status="", description="", cfg={}, @@ -305,7 +348,7 @@ def test_event_codes(self): total=0, execution_time=0, ), - types.LogSeedResult( + core_types.LogSeedResult( status="", index=0, total=0, @@ -313,100 +356,113 @@ def test_event_codes(self): schema="", relation="", ), - types.LogFreshnessResult( + core_types.LogFreshnessResult( source_name="", table_name="", index=0, total=0, execution_time=0, ), - types.LogCancelLine(conn_name=""), - types.DefaultSelector(name=""), - types.NodeStart(), - types.NodeFinished(), - types.QueryCancelationUnsupported(type=""), - types.ConcurrencyLine(num_threads=0, target_name=""), - types.WritingInjectedSQLForNode(), - types.NodeCompiling(), - types.NodeExecuting(), - types.LogHookStartLine( + core_types.LogNodeNoOpResult( + description="", + status="", + index=0, + total=0, + execution_time=0, + ), + core_types.LogCancelLine(conn_name=""), + core_types.DefaultSelector(name=""), + core_types.NodeStart(), + core_types.NodeFinished(), + core_types.QueryCancelationUnsupported(type=""), + core_types.ConcurrencyLine(num_threads=0, target_name=""), + core_types.WritingInjectedSQLForNode(), + core_types.NodeCompiling(), + core_types.NodeExecuting(), + core_types.LogHookStartLine( statement="", index=0, total=0, ), - types.LogHookEndLine( + core_types.LogHookEndLine( statement="", status="", index=0, total=0, execution_time=0, ), - types.SkippingDetails( + core_types.SkippingDetails( resource_type="", schema="", node_name="", index=0, total=0, ), - types.NothingToDo(), - types.RunningOperationUncaughtError(exc=""), - types.EndRunResult(), - types.NoNodesSelected(), - types.DepsUnpinned(revision="", git=""), - types.NoNodesForSelectionCriteria(spec_raw=""), - types.CommandCompleted( - command="", success=True, elapsed=0.1, completed_at=get_json_string_utcnow() + core_types.NothingToDo(), + core_types.RunningOperationUncaughtError(exc=""), + core_types.EndRunResult(), + core_types.NoNodesSelected(), + core_types.CommandCompleted( + command="", + success=True, + elapsed=0.1, + completed_at=get_json_string_utcnow(), + ), + core_types.ShowNode(node_name="", preview="", is_inline=True, unique_id="model.test.my_model"), + core_types.CompiledNode( + node_name="", compiled="", is_inline=True, unique_id="model.test.my_model" + ), + core_types.SnapshotTimestampWarning( + snapshot_time_data_type="DATETIME", updated_at_data_type="DATETIMEZ" ), - types.ShowNode(node_name="", preview="", is_inline=True, unique_id="model.test.my_model"), - types.CompiledNode(node_name="", compiled="", is_inline=True, unique_id="model.test.my_model"), # W - Node testing ====================== - types.CatchableExceptionOnRun(exc=""), - types.InternalErrorOnRun(build_path="", exc=""), - types.GenericExceptionOnRun(build_path="", unique_id="", exc=""), - types.NodeConnectionReleaseError(node_name="", exc=""), - types.FoundStats(stat_line=""), + core_types.CatchableExceptionOnRun(exc=""), + core_types.InternalErrorOnRun(build_path="", exc=""), + core_types.GenericExceptionOnRun(build_path="", unique_id="", exc=""), + core_types.NodeConnectionReleaseError(node_name="", exc=""), + core_types.FoundStats(stat_line=""), # Z - misc ====================== - types.MainKeyboardInterrupt(), - types.MainEncounteredError(exc=""), - types.MainStackTrace(stack_trace=""), + core_types.MainKeyboardInterrupt(), + core_types.MainEncounteredError(exc=""), + core_types.MainStackTrace(stack_trace=""), types.SystemCouldNotWrite(path="", reason="", exc=""), types.SystemExecutingCmd(cmd=[""]), types.SystemStdOut(bmsg=str(b"")), types.SystemStdErr(bmsg=str(b"")), types.SystemReportReturnCode(returncode=0), - types.TimingInfoCollected(), - types.LogDebugStackTrace(), - types.CheckCleanPath(path=""), - types.ConfirmCleanPath(path=""), - types.ProtectedCleanPath(path=""), - types.FinishedCleanPaths(), - types.OpenCommand(open_cmd="", profiles_dir=""), - types.RunResultWarning(resource_type="", node_name="", path=""), - types.RunResultFailure(resource_type="", node_name="", path=""), - types.StatsLine(stats={"error": 0, "skip": 0, "pass": 0, "warn": 0, "total": 0}), - types.RunResultError(msg=""), - types.RunResultErrorNoMessage(status=""), - types.SQLCompiledPath(path=""), - types.CheckNodeTestFailure(relation_name=""), - types.FirstRunResultError(msg=""), - types.AfterFirstRunResultError(msg=""), - types.EndOfRunSummary(num_errors=0, num_warnings=0, keyboard_interrupt=False), - types.LogSkipBecauseError(schema="", relation="", index=0, total=0), - types.EnsureGitInstalled(), - types.DepsCreatingLocalSymlink(), - types.DepsSymlinkNotAvailable(), - types.DisableTracking(), - types.SendingEvent(kwargs=""), - types.SendEventFailure(), - types.FlushEvents(), - types.FlushEventsFailure(), + core_types.TimingInfoCollected(), + core_types.LogDebugStackTrace(), + core_types.CheckCleanPath(path=""), + core_types.ConfirmCleanPath(path=""), + core_types.ProtectedCleanPath(path=""), + core_types.FinishedCleanPaths(), + core_types.OpenCommand(open_cmd="", profiles_dir=""), + core_types.RunResultWarning(resource_type="", node_name="", path=""), + core_types.RunResultFailure(resource_type="", node_name="", path=""), + core_types.StatsLine(stats={"error": 0, "skip": 0, "pass": 0, "warn": 0, "total": 0}), + core_types.RunResultError(msg=""), + core_types.RunResultErrorNoMessage(status=""), + core_types.SQLCompiledPath(path=""), + core_types.CheckNodeTestFailure(relation_name=""), + core_types.EndOfRunSummary(num_errors=0, num_warnings=0, keyboard_interrupt=False), + core_types.MarkSkippedChildren(unique_id="", status="skipped"), + core_types.LogSkipBecauseError(schema="", relation="", index=0, total=0), + core_types.EnsureGitInstalled(), + core_types.DepsCreatingLocalSymlink(), + core_types.DepsSymlinkNotAvailable(), + core_types.DisableTracking(), + core_types.SendingEvent(kwargs=""), + core_types.SendEventFailure(), + core_types.FlushEvents(), + core_types.FlushEventsFailure(), types.Formatting(), - types.TrackingInitializeFailure(), - types.RunResultWarningMessage(), - types.DebugCmdOut(), - types.DebugCmdResult(), - types.ListCmdOut(), + core_types.TrackingInitializeFailure(), + core_types.RunResultWarningMessage(), + core_types.DebugCmdOut(), + core_types.DebugCmdResult(), + core_types.ListCmdOut(), types.Note(msg="This is a note."), + core_types.ResourceReport(), ] @@ -417,7 +473,7 @@ class TestEventJSONSerialization: # just fine and others won't. def test_all_serializable(self): all_non_abstract_events = set( - get_all_subclasses(BaseEvent), + get_all_subclasses(CoreBaseEvent), ) all_event_values_list = list(map(lambda x: x.__class__, sample_values)) diff = all_non_abstract_events.difference(set(all_event_values_list)) @@ -485,3 +541,34 @@ def test_bad_serialization(): str(excinfo.value) == "[Note]: Unable to parse dict {'param_event_doesnt_have': 'This should break'}" ) + + +def test_single_run_error(): + + try: + # Add a recording event manager to the context, so we can test events. + event_mgr = TestEventManager() + ctx_set_event_manager(event_mgr) + + error_result = RunResult( + status=RunStatus.Error, + timing=[], + thread_id="", + execution_time=0.0, + node=None, + adapter_response=dict(), + message="oh no!", + failures=[], + ) + + print_run_result_error(error_result) + events = [e for e in event_mgr.event_history if isinstance(e[0], RunResultError)] + + assert len(events) == 1 + assert events[0][0].msg == "oh no!" + + finally: + # Set an empty event manager unconditionally on exit. This is an early + # attempt at unit testing events, and we need to think about how it + # could be done in a thread safe way in the long run. + ctx_set_event_manager(EventManager()) diff --git a/tests/unit/test_flags.py b/tests/unit/test_flags.py deleted file mode 100644 index 69d8913b675..00000000000 --- a/tests/unit/test_flags.py +++ /dev/null @@ -1,340 +0,0 @@ -import os -from unittest import TestCase -from argparse import Namespace -import pytest - -from dbt import flags -from dbt.contracts.project import UserConfig -from dbt.graph.selector_spec import IndirectSelection -from dbt.helper_types import WarnErrorOptions - -# Skip due to interface for flag updated -pytestmark = pytest.mark.skip - - -class TestFlags(TestCase): - def setUp(self): - self.args = Namespace() - self.user_config = UserConfig() - - def test__flags(self): - - # use_experimental_parser - self.user_config.use_experimental_parser = True - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.USE_EXPERIMENTAL_PARSER, True) - os.environ["DBT_USE_EXPERIMENTAL_PARSER"] = "false" - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.USE_EXPERIMENTAL_PARSER, False) - setattr(self.args, "use_experimental_parser", True) - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.USE_EXPERIMENTAL_PARSER, True) - # cleanup - os.environ.pop("DBT_USE_EXPERIMENTAL_PARSER") - delattr(self.args, "use_experimental_parser") - flags.USE_EXPERIMENTAL_PARSER = False - self.user_config.use_experimental_parser = None - - # static_parser - self.user_config.static_parser = False - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.STATIC_PARSER, False) - os.environ["DBT_STATIC_PARSER"] = "true" - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.STATIC_PARSER, True) - setattr(self.args, "static_parser", False) - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.STATIC_PARSER, False) - # cleanup - os.environ.pop("DBT_STATIC_PARSER") - delattr(self.args, "static_parser") - flags.STATIC_PARSER = True - self.user_config.static_parser = None - - # warn_error - self.user_config.warn_error = False - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.WARN_ERROR, False) - os.environ["DBT_WARN_ERROR"] = "true" - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.WARN_ERROR, True) - setattr(self.args, "warn_error", False) - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.WARN_ERROR, False) - # cleanup - os.environ.pop("DBT_WARN_ERROR") - delattr(self.args, "warn_error") - flags.WARN_ERROR = False - self.user_config.warn_error = None - - # warn_error_options - self.user_config.warn_error_options = '{"include": "all"}' - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.WARN_ERROR_OPTIONS, WarnErrorOptions(include="all")) - os.environ["DBT_WARN_ERROR_OPTIONS"] = '{"include": []}' - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.WARN_ERROR_OPTIONS, WarnErrorOptions(include=[])) - setattr(self.args, "warn_error_options", '{"include": "all"}') - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.WARN_ERROR_OPTIONS, WarnErrorOptions(include="all")) - # cleanup - os.environ.pop("DBT_WARN_ERROR_OPTIONS") - delattr(self.args, "warn_error_options") - self.user_config.warn_error_options = None - - # write_json - self.user_config.write_json = True - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.WRITE_JSON, True) - os.environ["DBT_WRITE_JSON"] = "false" - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.WRITE_JSON, False) - setattr(self.args, "write_json", True) - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.WRITE_JSON, True) - # cleanup - os.environ.pop("DBT_WRITE_JSON") - delattr(self.args, "write_json") - - # partial_parse - self.user_config.partial_parse = True - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.PARTIAL_PARSE, True) - os.environ["DBT_PARTIAL_PARSE"] = "false" - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.PARTIAL_PARSE, False) - setattr(self.args, "partial_parse", True) - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.PARTIAL_PARSE, True) - # cleanup - os.environ.pop("DBT_PARTIAL_PARSE") - delattr(self.args, "partial_parse") - self.user_config.partial_parse = False - - # use_colors - self.user_config.use_colors = True - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.USE_COLORS, True) - os.environ["DBT_USE_COLORS"] = "false" - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.USE_COLORS, False) - setattr(self.args, "use_colors", True) - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.USE_COLORS, True) - # cleanup - os.environ.pop("DBT_USE_COLORS") - delattr(self.args, "use_colors") - - # debug - self.user_config.debug = True - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.DEBUG, True) - os.environ["DBT_DEBUG"] = "True" - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.DEBUG, True) - os.environ["DBT_DEBUG"] = "False" - setattr(self.args, "debug", True) - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.DEBUG, True) - # cleanup - os.environ.pop("DBT_DEBUG") - delattr(self.args, "debug") - self.user_config.debug = None - - # log_format -- text, json, default - self.user_config.log_format = "text" - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.LOG_FORMAT, "text") - os.environ["DBT_LOG_FORMAT"] = "json" - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.LOG_FORMAT, "json") - setattr(self.args, "log_format", "text") - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.LOG_FORMAT, "text") - # cleanup - os.environ.pop("DBT_LOG_FORMAT") - delattr(self.args, "log_format") - self.user_config.log_format = None - - # version_check - self.user_config.version_check = True - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.VERSION_CHECK, True) - os.environ["DBT_VERSION_CHECK"] = "false" - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.VERSION_CHECK, False) - setattr(self.args, "version_check", True) - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.VERSION_CHECK, True) - # cleanup - os.environ.pop("DBT_VERSION_CHECK") - delattr(self.args, "version_check") - - # fail_fast - self.user_config.fail_fast = True - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.FAIL_FAST, True) - os.environ["DBT_FAIL_FAST"] = "false" - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.FAIL_FAST, False) - setattr(self.args, "fail_fast", True) - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.FAIL_FAST, True) - # cleanup - os.environ.pop("DBT_FAIL_FAST") - delattr(self.args, "fail_fast") - self.user_config.fail_fast = False - - # send_anonymous_usage_stats - self.user_config.send_anonymous_usage_stats = True - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.SEND_ANONYMOUS_USAGE_STATS, True) - os.environ["DBT_SEND_ANONYMOUS_USAGE_STATS"] = "false" - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.SEND_ANONYMOUS_USAGE_STATS, False) - setattr(self.args, "send_anonymous_usage_stats", True) - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.SEND_ANONYMOUS_USAGE_STATS, True) - os.environ["DO_NOT_TRACK"] = "1" - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.SEND_ANONYMOUS_USAGE_STATS, False) - # cleanup - os.environ.pop("DBT_SEND_ANONYMOUS_USAGE_STATS") - os.environ.pop("DO_NOT_TRACK") - delattr(self.args, "send_anonymous_usage_stats") - - # printer_width - self.user_config.printer_width = 100 - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.PRINTER_WIDTH, 100) - os.environ["DBT_PRINTER_WIDTH"] = "80" - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.PRINTER_WIDTH, 80) - setattr(self.args, "printer_width", "120") - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.PRINTER_WIDTH, 120) - # cleanup - os.environ.pop("DBT_PRINTER_WIDTH") - delattr(self.args, "printer_width") - self.user_config.printer_width = None - - # indirect_selection - self.user_config.indirect_selection = "eager" - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.INDIRECT_SELECTION, IndirectSelection.Eager) - self.user_config.indirect_selection = "cautious" - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.INDIRECT_SELECTION, IndirectSelection.Cautious) - self.user_config.indirect_selection = "buildable" - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.INDIRECT_SELECTION, IndirectSelection.Buildable) - self.user_config.indirect_selection = None - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.INDIRECT_SELECTION, IndirectSelection.Eager) - os.environ["DBT_INDIRECT_SELECTION"] = "cautious" - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.INDIRECT_SELECTION, IndirectSelection.Cautious) - setattr(self.args, "indirect_selection", "cautious") - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.INDIRECT_SELECTION, IndirectSelection.Cautious) - # cleanup - os.environ.pop("DBT_INDIRECT_SELECTION") - delattr(self.args, "indirect_selection") - self.user_config.indirect_selection = None - - # quiet - self.user_config.quiet = True - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.QUIET, True) - # cleanup - self.user_config.quiet = None - - # no_print - self.user_config.no_print = True - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.NO_PRINT, True) - # cleanup - self.user_config.no_print = None - - # cache_selected_only - self.user_config.cache_selected_only = True - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.CACHE_SELECTED_ONLY, True) - os.environ["DBT_CACHE_SELECTED_ONLY"] = "false" - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.CACHE_SELECTED_ONLY, False) - setattr(self.args, "cache_selected_only", True) - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.CACHE_SELECTED_ONLY, True) - # cleanup - os.environ.pop("DBT_CACHE_SELECTED_ONLY") - delattr(self.args, "cache_selected_only") - self.user_config.cache_selected_only = False - - # target_path/log_path - flags.set_from_args(self.args, self.user_config) - self.assertIsNone(flags.LOG_PATH) - os.environ["DBT_LOG_PATH"] = "a/b/c" - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.LOG_PATH, "a/b/c") - setattr(self.args, "log_path", "d/e/f") - flags.set_from_args(self.args, self.user_config) - self.assertEqual(flags.LOG_PATH, "d/e/f") - # cleanup - os.environ.pop("DBT_LOG_PATH") - delattr(self.args, "log_path") - - def test__flags_are_mutually_exclusive(self): - # options from user config - self.user_config.warn_error = False - self.user_config.warn_error_options = '{"include":"all"}' - with pytest.raises(ValueError): - flags.set_from_args(self.args, self.user_config) - # cleanup - self.user_config.warn_error = None - self.user_config.warn_error_options = None - - # options from args - setattr(self.args, "warn_error", False) - setattr(self.args, "warn_error_options", '{"include":"all"}') - with pytest.raises(ValueError): - flags.set_from_args(self.args, self.user_config) - # cleanup - delattr(self.args, "warn_error") - delattr(self.args, "warn_error_options") - - # options from environment - os.environ["DBT_WARN_ERROR"] = "false" - os.environ["DBT_WARN_ERROR_OPTIONS"] = '{"include": []}' - with pytest.raises(ValueError): - flags.set_from_args(self.args, self.user_config) - # cleanup - os.environ.pop("DBT_WARN_ERROR") - os.environ.pop("DBT_WARN_ERROR_OPTIONS") - - # options from user config + args - self.user_config.warn_error = False - setattr(self.args, "warn_error_options", '{"include":"all"}') - with pytest.raises(ValueError): - flags.set_from_args(self.args, self.user_config) - # cleanup - self.user_config.warn_error = None - delattr(self.args, "warn_error_options") - - # options from user config + environ - self.user_config.warn_error = False - os.environ["DBT_WARN_ERROR_OPTIONS"] = '{"include": []}' - with pytest.raises(ValueError): - flags.set_from_args(self.args, self.user_config) - # cleanup - self.user_config.warn_error = None - os.environ.pop("DBT_WARN_ERROR_OPTIONS") - - # options from args + environ - setattr(self.args, "warn_error", False) - os.environ["DBT_WARN_ERROR_OPTIONS"] = '{"include": []}' - with pytest.raises(ValueError): - flags.set_from_args(self.args, self.user_config) - # cleanup - delattr(self.args, "warn_error") - os.environ.pop("DBT_WARN_ERROR_OPTIONS") diff --git a/tests/unit/test_functions.py b/tests/unit/test_functions.py index 30112d8507a..7d118dd7033 100644 --- a/tests/unit/test_functions.py +++ b/tests/unit/test_functions.py @@ -1,10 +1,13 @@ from argparse import Namespace + import pytest import dbt.flags as flags -from dbt.events.functions import msg_to_dict, warn_or_error, setup_event_logger -from dbt.events.types import InfoLevel, NoNodesForSelectionCriteria -from dbt.exceptions import EventCompilationError +from dbt.adapters.events.types import AdapterDeprecationWarning +from dbt.events.types import NoNodesForSelectionCriteria +from dbt_common.events.functions import msg_to_dict, warn_or_error +from dbt_common.events.types import InfoLevel, RetryExternalCall +from dbt_common.exceptions import EventCompilationError @pytest.mark.parametrize( @@ -28,6 +31,25 @@ def test_warn_or_error_warn_error_options(warn_error_options, expect_compilation warn_or_error(NoNodesForSelectionCriteria()) +@pytest.mark.parametrize( + "error_cls", + [ + NoNodesForSelectionCriteria, # core event + AdapterDeprecationWarning, # adapter event + RetryExternalCall, # common event + ], +) +def test_warn_error_options_captures_all_events(error_cls): + args = Namespace(warn_error_options={"include": [error_cls.__name__]}) + flags.set_from_args(args, {}) + with pytest.raises(EventCompilationError): + warn_or_error(error_cls()) + + args = Namespace(warn_error_options={"include": "*", "exclude": [error_cls.__name__]}) + flags.set_from_args(args, {}) + warn_or_error(error_cls()) + + @pytest.mark.parametrize( "warn_error,expect_compilation_exception", [ @@ -59,13 +81,3 @@ def __init__(self): assert ( False ), f"We expect `msg_to_dict` to gracefully handle exceptions, but it raised {exc}" - - -def test_setup_event_logger_specify_max_bytes(mocker): - patched_file_handler = mocker.patch("dbt.events.eventmgr.RotatingFileHandler") - args = Namespace(log_file_max_bytes=1234567) - flags.set_from_args(args, {}) - setup_event_logger(flags.get_flags()) - patched_file_handler.assert_called_once_with( - filename="logs/dbt.log", encoding="utf8", maxBytes=1234567, backupCount=5 - ) diff --git a/tests/unit/test_graph.py b/tests/unit/test_graph.py deleted file mode 100644 index fe1dc7e868a..00000000000 --- a/tests/unit/test_graph.py +++ /dev/null @@ -1,379 +0,0 @@ -import os - -from argparse import Namespace -import unittest -from unittest.mock import MagicMock, patch - -from dbt.adapters.postgres import Plugin as PostgresPlugin -from dbt.adapters.factory import reset_adapters, register_adapter -import dbt.clients.system -import dbt.compilation -import dbt.exceptions -import dbt.flags -import dbt.parser -import dbt.config -import dbt.utils -import dbt.parser.manifest -from dbt import tracking -from dbt.contracts.files import SourceFile, FileHash, FilePath -from dbt.contracts.graph.manifest import MacroManifest, ManifestStateCheck -from dbt.graph import NodeSelector, parse_difference -from dbt.events.functions import setup_event_logger - -try: - from queue import Empty -except ImportError: - from Queue import Empty - -from .utils import config_from_parts_or_dicts, generate_name_macros, inject_plugin - - -class GraphTest(unittest.TestCase): - def tearDown(self): - self.mock_filesystem_search.stop() - self.mock_hook_constructor.stop() - self.load_state_check.stop() - self.load_source_file_patcher.stop() - reset_adapters() - - def setUp(self): - # create various attributes - self.graph_result = None - tracking.do_not_track() - self.profile = { - "outputs": { - "test": { - "type": "postgres", - "threads": 4, - "host": "thishostshouldnotexist", - "port": 5432, - "user": "root", - "pass": "password", - "dbname": "dbt", - "schema": "dbt_test", - } - }, - "target": "test", - } - self.macro_manifest = MacroManifest( - {n.unique_id: n for n in generate_name_macros("test_models_compile")} - ) - self.mock_models = [] # used by filesystem_searcher - - # Create file filesystem searcher - self.filesystem_search = patch("dbt.parser.read_files.filesystem_search") - - def mock_filesystem_search(project, relative_dirs, extension, ignore_spec): - if "sql" not in extension: - return [] - if "models" not in relative_dirs: - return [] - return [model.path for model in self.mock_models] - - self.mock_filesystem_search = self.filesystem_search.start() - self.mock_filesystem_search.side_effect = mock_filesystem_search - - # Create HookParser patcher - self.hook_patcher = patch.object(dbt.parser.hooks.HookParser, "__new__") - - def create_hook_patcher(cls, project, manifest, root_project): - result = MagicMock(project=project, manifest=manifest, root_project=root_project) - result.__iter__.side_effect = lambda: iter([]) - return result - - self.mock_hook_constructor = self.hook_patcher.start() - self.mock_hook_constructor.side_effect = create_hook_patcher - - # Create the Manifest.state_check patcher - @patch("dbt.parser.manifest.ManifestLoader.build_manifest_state_check") - def _mock_state_check(self): - all_projects = self.all_projects - return ManifestStateCheck( - project_env_vars_hash=FileHash.from_contents(""), - profile_env_vars_hash=FileHash.from_contents(""), - vars_hash=FileHash.from_contents("vars"), - project_hashes={name: FileHash.from_contents(name) for name in all_projects}, - profile_hash=FileHash.from_contents("profile"), - ) - - self.load_state_check = patch( - "dbt.parser.manifest.ManifestLoader.build_manifest_state_check" - ) - self.mock_state_check = self.load_state_check.start() - self.mock_state_check.side_effect = _mock_state_check - - # Create the source file patcher - self.load_source_file_patcher = patch("dbt.parser.read_files.load_source_file") - self.mock_source_file = self.load_source_file_patcher.start() - - def mock_load_source_file(path, parse_file_type, project_name, saved_files): - for sf in self.mock_models: - if sf.path == path: - source_file = sf - source_file.project_name = project_name - source_file.parse_file_type = parse_file_type - return source_file - - self.mock_source_file.side_effect = mock_load_source_file - - @patch("dbt.parser.hooks.HookParser.get_path") - def _mock_hook_path(self): - path = FilePath( - searched_path=".", - project_root=os.path.normcase(os.getcwd()), - relative_path="dbt_project.yml", - modification_time=0.0, - ) - return path - - def get_config(self, extra_cfg=None): - if extra_cfg is None: - extra_cfg = {} - - cfg = { - "name": "test_models_compile", - "version": "0.1", - "profile": "test", - "project-root": os.path.abspath("."), - "config-version": 2, - } - cfg.update(extra_cfg) - - config = config_from_parts_or_dicts(project=cfg, profile=self.profile) - dbt.flags.set_from_args(Namespace(), config) - setup_event_logger(dbt.flags.get_flags()) - object.__setattr__(dbt.flags.get_flags(), "PARTIAL_PARSE", False) - return config - - def get_compiler(self, project): - return dbt.compilation.Compiler(project) - - def use_models(self, models): - for k, v in models.items(): - path = FilePath( - searched_path="models", - project_root=os.path.normcase(os.getcwd()), - relative_path="{}.sql".format(k), - modification_time=0.0, - ) - # FileHash can't be empty or 'search_key' will be None - source_file = SourceFile(path=path, checksum=FileHash.from_contents("abc")) - source_file.contents = v - self.mock_models.append(source_file) - - def load_manifest(self, config): - inject_plugin(PostgresPlugin) - register_adapter(config) - loader = dbt.parser.manifest.ManifestLoader(config, {config.project_name: config}) - loader.manifest.macros = self.macro_manifest.macros - loader.load() - return loader.manifest - - def test__single_model(self): - self.use_models( - { - "model_one": "select * from events", - } - ) - - config = self.get_config() - manifest = self.load_manifest(config) - - compiler = self.get_compiler(config) - linker = compiler.compile(manifest) - - self.assertEqual(list(linker.nodes()), ["model.test_models_compile.model_one"]) - - self.assertEqual(list(linker.edges()), []) - - def test__two_models_simple_ref(self): - self.use_models( - { - "model_one": "select * from events", - "model_two": "select * from {{ref('model_one')}}", - } - ) - - config = self.get_config() - manifest = self.load_manifest(config) - compiler = self.get_compiler(config) - linker = compiler.compile(manifest) - - self.assertCountEqual( - linker.nodes(), - [ - "model.test_models_compile.model_one", - "model.test_models_compile.model_two", - ], - ) - - self.assertCountEqual( - linker.edges(), - [ - ( - "model.test_models_compile.model_one", - "model.test_models_compile.model_two", - ) - ], - ) - - def test__two_models_package_ref(self): - self.use_models( - { - "model_one": "select * from events", - "model_two": "select * from {{ref('test_models_compile', 'model_one')}}", - } - ) - - config = self.get_config() - manifest = self.load_manifest(config) - compiler = self.get_compiler(config) - linker = compiler.compile(manifest) - - self.assertCountEqual( - linker.nodes(), - [ - "model.test_models_compile.model_one", - "model.test_models_compile.model_two", - ], - ) - - self.assertCountEqual( - linker.edges(), - [ - ( - "model.test_models_compile.model_one", - "model.test_models_compile.model_two", - ) - ], - ) - - def test__model_materializations(self): - self.use_models( - { - "model_one": "select * from events", - "model_two": "select * from {{ref('model_one')}}", - "model_three": "select * from events", - "model_four": "select * from events", - } - ) - - cfg = { - "models": { - "materialized": "table", - "test_models_compile": { - "model_one": {"materialized": "table"}, - "model_two": {"materialized": "view"}, - "model_three": {"materialized": "ephemeral"}, - }, - } - } - - config = self.get_config(cfg) - manifest = self.load_manifest(config) - - expected_materialization = { - "model_one": "table", - "model_two": "view", - "model_three": "ephemeral", - "model_four": "table", - } - - for model, expected in expected_materialization.items(): - key = "model.test_models_compile.{}".format(model) - actual = manifest.nodes[key].config.materialized - self.assertEqual(actual, expected) - - def test__model_incremental(self): - self.use_models({"model_one": "select * from events"}) - - cfg = { - "models": { - "test_models_compile": { - "model_one": {"materialized": "incremental", "unique_key": "id"}, - } - } - } - - config = self.get_config(cfg) - manifest = self.load_manifest(config) - compiler = self.get_compiler(config) - linker = compiler.compile(manifest) - - node = "model.test_models_compile.model_one" - - self.assertEqual(list(linker.nodes()), [node]) - self.assertEqual(list(linker.edges()), []) - - self.assertEqual(manifest.nodes[node].config.materialized, "incremental") - - def test__dependency_list(self): - self.use_models( - { - "model_1": "select * from events", - "model_2": 'select * from {{ ref("model_1") }}', - "model_3": """ - select * from {{ ref("model_1") }} - union all - select * from {{ ref("model_2") }} - """, - "model_4": 'select * from {{ ref("model_3") }}', - } - ) - - config = self.get_config() - manifest = self.load_manifest(config) - compiler = self.get_compiler(config) - graph = compiler.compile(manifest) - - models = ("model_1", "model_2", "model_3", "model_4") - model_ids = ["model.test_models_compile.{}".format(m) for m in models] - - manifest = MagicMock( - nodes={ - n: MagicMock( - unique_id=n, - name=n.split(".")[-1], - package_name="test_models_compile", - fqn=["test_models_compile", n], - empty=False, - config=MagicMock(enabled=True), - ) - for n in model_ids - } - ) - manifest.expect.side_effect = lambda n: MagicMock(unique_id=n) - selector = NodeSelector(graph, manifest) - # TODO: The "eager" string below needs to be replaced with programatic access - # to the default value for the indirect selection parameter in - # dbt.cli.params.indirect_selection - # - # Doing that is actually a little tricky, so I'm punting it to a new ticket GH #6397 - queue = selector.get_graph_queue(parse_difference(None, None, "eager")) - - for model_id in model_ids: - self.assertFalse(queue.empty()) - got = queue.get(block=False) - self.assertEqual(got.unique_id, model_id) - with self.assertRaises(Empty): - queue.get(block=False) - queue.mark_done(got.unique_id) - self.assertTrue(queue.empty()) - - def test__partial_parse(self): - config = self.get_config() - - manifest = self.load_manifest(config) - - # we need a loader to compare the two manifests - loader = dbt.parser.manifest.ManifestLoader(config, {config.project_name: config}) - loader.manifest = manifest.deepcopy() - - is_partial_parsable, _ = loader.is_partial_parsable(manifest) - self.assertTrue(is_partial_parsable) - manifest.metadata.dbt_version = "0.0.1a1" - is_partial_parsable, _ = loader.is_partial_parsable(manifest) - self.assertFalse(is_partial_parsable) - manifest.metadata.dbt_version = "99999.99.99" - is_partial_parsable, _ = loader.is_partial_parsable(manifest) - self.assertFalse(is_partial_parsable) diff --git a/tests/unit/test_graph_selection.py b/tests/unit/test_graph_selection.py index 572c8fed10d..5d5cbf7469d 100644 --- a/tests/unit/test_graph_selection.py +++ b/tests/unit/test_graph_selection.py @@ -1,22 +1,14 @@ +import string from unittest import mock +import networkx as nx import pytest -import string -import dbt.exceptions -import dbt.graph.selector as graph_selector import dbt.graph.cli as graph_cli +import dbt.graph.selector as graph_selector +import dbt_common.exceptions from dbt.node_types import NodeType -import networkx as nx - -from dbt import flags - -from argparse import Namespace -from dbt.contracts.project import UserConfig - -flags.set_from_args(Namespace(), UserConfig()) - def _get_graph(): integer_graph = nx.balanced_tree(2, 2, nx.DiGraph()) @@ -58,7 +50,7 @@ def _get_manifest(graph): @pytest.fixture def graph(): - return graph_selector.Graph(_get_graph()) + return _get_graph() @pytest.fixture @@ -122,16 +114,9 @@ def id_macro(arg): @pytest.mark.parametrize("include,exclude,expected", run_specs, ids=id_macro) -def test_run_specs(include, exclude, expected): - graph = _get_graph() - manifest = _get_manifest(graph) +def test_run_specs(include, exclude, expected, graph, manifest): selector = graph_selector.NodeSelector(graph, manifest) - # TODO: The "eager" string below needs to be replaced with programatic access - # to the default value for the indirect selection parameter in - # dbt.cli.params.indirect_selection - # - # Doing that is actually a little tricky, so I'm punting it to a new ticket GH #6397 - spec = graph_cli.parse_difference(include, exclude, "eager") + spec = graph_cli.parse_difference(include, exclude) selected, _ = selector.select_nodes(spec) assert selected == expected @@ -214,5 +199,5 @@ def test_parse_specs( @pytest.mark.parametrize("invalid", invalid_specs, ids=lambda k: str(k)) def test_invalid_specs(invalid): - with pytest.raises(dbt.exceptions.DbtRuntimeError): + with pytest.raises(dbt_common.exceptions.DbtRuntimeError): graph_selector.SelectionCriteria.from_single_spec(invalid) diff --git a/tests/unit/test_helper_types.py b/tests/unit/test_helper_types.py deleted file mode 100644 index f0aa077b46e..00000000000 --- a/tests/unit/test_helper_types.py +++ /dev/null @@ -1,45 +0,0 @@ -import pytest - -from dbt.helper_types import IncludeExclude, WarnErrorOptions -from dbt.dataclass_schema import ValidationError - - -class TestIncludeExclude: - def test_init_invalid(self): - with pytest.raises(ValidationError): - IncludeExclude(include="invalid") - - with pytest.raises(ValidationError): - IncludeExclude(include=["ItemA"], exclude=["ItemB"]) - - @pytest.mark.parametrize( - "include,exclude,expected_includes", - [ - ("all", [], True), - ("*", [], True), - ("*", ["ItemA"], False), - (["ItemA"], [], True), - (["ItemA", "ItemB"], [], True), - ], - ) - def test_includes(self, include, exclude, expected_includes): - include_exclude = IncludeExclude(include=include, exclude=exclude) - - assert include_exclude.includes("ItemA") == expected_includes - - -class TestWarnErrorOptions: - def test_init(self): - with pytest.raises(ValidationError): - WarnErrorOptions(include=["InvalidError"]) - - with pytest.raises(ValidationError): - WarnErrorOptions(include="*", exclude=["InvalidError"]) - - warn_error_options = WarnErrorOptions(include=["NoNodesForSelectionCriteria"]) - assert warn_error_options.include == ["NoNodesForSelectionCriteria"] - assert warn_error_options.exclude == [] - - warn_error_options = WarnErrorOptions(include="*", exclude=["NoNodesForSelectionCriteria"]) - assert warn_error_options.include == "*" - assert warn_error_options.exclude == ["NoNodesForSelectionCriteria"] diff --git a/tests/unit/test_inject_ctes.py b/tests/unit/test_inject_ctes.py deleted file mode 100644 index 8b459f92e7c..00000000000 --- a/tests/unit/test_inject_ctes.py +++ /dev/null @@ -1,197 +0,0 @@ -from dbt.compilation import inject_ctes_into_sql -from dbt.contracts.graph.nodes import InjectedCTE -import re - - -def norm_whitespace(string): - _RE_COMBINE_WHITESPACE = re.compile(r"\s+") - string = _RE_COMBINE_WHITESPACE.sub(" ", string).strip() - return string - - -def test_inject_ctes_simple1(): - starting_sql = "select * from __dbt__cte__base" - ctes = [ - InjectedCTE( - id="model.test.base", - sql=" __dbt__cte__base as (\n\n\nselect * from test16873767336887004702_test_ephemeral.seed\n)", - ) - ] - expected_sql = """with __dbt__cte__base as ( - select * from test16873767336887004702_test_ephemeral.seed - ) select * from __dbt__cte__base""" - - generated_sql = inject_ctes_into_sql(starting_sql, ctes) - assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) - - -def test_inject_ctes_simple2(): - starting_sql = "select * from __dbt__cte__ephemeral_level_two" - ctes = [ - InjectedCTE( - id="model.test.ephemeral_level_two", - sql=' __dbt__cte__ephemeral_level_two as (\n\nselect * from "dbt"."test16873757769710148165_test_ephemeral"."source_table"\n)', - ) - ] - expected_sql = """with __dbt__cte__ephemeral_level_two as ( - select * from "dbt"."test16873757769710148165_test_ephemeral"."source_table" - ) select * from __dbt__cte__ephemeral_level_two""" - - generated_sql = inject_ctes_into_sql(starting_sql, ctes) - assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) - - -def test_inject_ctes_multiple_ctes(): - - starting_sql = "select * from __dbt__cte__ephemeral" - ctes = [ - InjectedCTE( - id="model.test.ephemeral_level_two", - sql=' __dbt__cte__ephemeral_level_two as (\n\nselect * from "dbt"."test16873735573223965828_test_ephemeral"."source_table"\n)', - ), - InjectedCTE( - id="model.test.ephemeral", - sql=" __dbt__cte__ephemeral as (\n\nselect * from __dbt__cte__ephemeral_level_two\n)", - ), - ] - expected_sql = """with __dbt__cte__ephemeral_level_two as ( - select * from "dbt"."test16873735573223965828_test_ephemeral"."source_table" - ), __dbt__cte__ephemeral as ( - select * from __dbt__cte__ephemeral_level_two - ) select * from __dbt__cte__ephemeral""" - - generated_sql = inject_ctes_into_sql(starting_sql, ctes) - assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) - - -def test_inject_ctes_multiple_ctes_more_complex(): - starting_sql = """select * from __dbt__cte__female_only - union all - select * from "dbt"."test16873757723266827902_test_ephemeral"."double_dependent" where gender = 'Male'""" - ctes = [ - InjectedCTE( - id="model.test.base", - sql=" __dbt__cte__base as (\n\n\nselect * from test16873757723266827902_test_ephemeral.seed\n)", - ), - InjectedCTE( - id="model.test.base_copy", - sql=" __dbt__cte__base_copy as (\n\n\nselect * from __dbt__cte__base\n)", - ), - InjectedCTE( - id="model.test.female_only", - sql=" __dbt__cte__female_only as (\n\n\nselect * from __dbt__cte__base_copy where gender = 'Female'\n)", - ), - ] - expected_sql = """with __dbt__cte__base as ( - select * from test16873757723266827902_test_ephemeral.seed - ), __dbt__cte__base_copy as ( - select * from __dbt__cte__base - ), __dbt__cte__female_only as ( - select * from __dbt__cte__base_copy where gender = 'Female' - ) select * from __dbt__cte__female_only - union all - select * from "dbt"."test16873757723266827902_test_ephemeral"."double_dependent" where gender = 'Male'""" - - generated_sql = inject_ctes_into_sql(starting_sql, ctes) - assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) - - -def test_inject_ctes_starting_with1(): - starting_sql = """ - with internal_cte as (select * from sessions) - select * from internal_cte - """ - ctes = [ - InjectedCTE( - id="cte_id_1", - sql="__dbt__cte__ephemeral as (select * from table)", - ), - InjectedCTE( - id="cte_id_2", - sql="__dbt__cte__events as (select id, type from events)", - ), - ] - expected_sql = """with __dbt__cte__ephemeral as (select * from table), - __dbt__cte__events as (select id, type from events), - internal_cte as (select * from sessions) - select * from internal_cte""" - - generated_sql = inject_ctes_into_sql(starting_sql, ctes) - assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) - - -def test_inject_ctes_starting_with2(): - starting_sql = """with my_other_cool_cte as ( - select id, name from __dbt__cte__ephemeral - where id > 1000 - ) - select name, id from my_other_cool_cte""" - ctes = [ - InjectedCTE( - id="model.singular_tests_ephemeral.ephemeral", - sql=' __dbt__cte__ephemeral as (\n\n\nwith my_cool_cte as (\n select name, id from "dbt"."test16873917221900185954_test_singular_tests_ephemeral"."base"\n)\nselect id, name from my_cool_cte where id is not null\n)', - ) - ] - expected_sql = """with __dbt__cte__ephemeral as ( - with my_cool_cte as ( - select name, id from "dbt"."test16873917221900185954_test_singular_tests_ephemeral"."base" - ) - select id, name from my_cool_cte where id is not null - ), my_other_cool_cte as ( - select id, name from __dbt__cte__ephemeral - where id > 1000 - ) - select name, id from my_other_cool_cte""" - - generated_sql = inject_ctes_into_sql(starting_sql, ctes) - assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) - - -def test_inject_ctes_comment_with(): - # Test injection with a comment containing "with" - starting_sql = """ - --- This is sql with a comment - select * from __dbt__cte__base - """ - ctes = [ - InjectedCTE( - id="model.test.base", - sql=" __dbt__cte__base as (\n\n\nselect * from test16873767336887004702_test_ephemeral.seed\n)", - ) - ] - expected_sql = """with __dbt__cte__base as ( - select * from test16873767336887004702_test_ephemeral.seed - ) --- This is sql with a comment - select * from __dbt__cte__base""" - - generated_sql = inject_ctes_into_sql(starting_sql, ctes) - assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) - - -def test_inject_ctes_with_recursive(): - # Test injection with "recursive" keyword - starting_sql = """ - with recursive t(n) as ( - select * from __dbt__cte__first_ephemeral_model - union all - select n+1 from t where n < 100 - ) - select sum(n) from t - """ - ctes = [ - InjectedCTE( - id="model.test.first_ephemeral_model", - sql=" __dbt__cte__first_ephemeral_model as (\n\nselect 1 as fun\n)", - ) - ] - expected_sql = """with recursive __dbt__cte__first_ephemeral_model as ( - select 1 as fun - ), t(n) as ( - select * from __dbt__cte__first_ephemeral_model - union all - select n+1 from t where n < 100 - ) - select sum(n) from t - """ - generated_sql = inject_ctes_into_sql(starting_sql, ctes) - assert norm_whitespace(generated_sql) == norm_whitespace(expected_sql) diff --git a/tests/unit/test_internal_deprecations.py b/tests/unit/test_internal_deprecations.py new file mode 100644 index 00000000000..69d30132ef4 --- /dev/null +++ b/tests/unit/test_internal_deprecations.py @@ -0,0 +1,12 @@ +from dbt.internal_deprecations import deprecated + + +@deprecated(reason="just because", version="1.23.0", suggested_action="Make some updates") +def to_be_decorated(): + return 5 + + +# simple test that the return value is not modified +def test_deprecated_func(): + assert hasattr(to_be_decorated, "__wrapped__") + assert to_be_decorated() == 5 diff --git a/tests/unit/test_jinja.py b/tests/unit/test_jinja.py deleted file mode 100644 index cdf7f808f0c..00000000000 --- a/tests/unit/test_jinja.py +++ /dev/null @@ -1,838 +0,0 @@ -from contextlib import contextmanager -import pytest -import unittest -import yaml - -from dbt.clients.jinja import get_rendered -from dbt.clients.jinja import get_template -from dbt.clients.jinja import extract_toplevel_blocks -from dbt.exceptions import CompilationError, JinjaRenderingError - - -@contextmanager -def returns(value): - yield value - - -@contextmanager -def raises(value): - with pytest.raises(value) as exc: - yield exc - - -def expected_id(arg): - if isinstance(arg, list): - return "_".join(arg) - - -jinja_tests = [ - # strings - ( - """foo: bar""", - returns("bar"), - returns("bar"), - ), - ( - '''foo: "bar"''', - returns("bar"), - returns("bar"), - ), - ( - '''foo: "'bar'"''', - returns("'bar'"), - returns("'bar'"), - ), - ( - """foo: '"bar"'""", - returns('"bar"'), - returns('"bar"'), - ), - ( - '''foo: "{{ 'bar' | as_text }}"''', - returns("bar"), - returns("bar"), - ), - ( - '''foo: "{{ 'bar' | as_bool }}"''', - returns("bar"), - raises(JinjaRenderingError), - ), - ( - '''foo: "{{ 'bar' | as_number }}"''', - returns("bar"), - raises(JinjaRenderingError), - ), - ( - '''foo: "{{ 'bar' | as_native }}"''', - returns("bar"), - returns("bar"), - ), - # ints - ( - """foo: 1""", - returns("1"), - returns("1"), - ), - ( - '''foo: "1"''', - returns("1"), - returns("1"), - ), - ( - '''foo: "'1'"''', - returns("'1'"), - returns("'1'"), - ), - ( - """foo: '"1"'""", - returns('"1"'), - returns('"1"'), - ), - ( - '''foo: "{{ 1 }}"''', - returns("1"), - returns("1"), - ), - ( - '''foo: "{{ '1' }}"''', - returns("1"), - returns("1"), - ), - ( - '''foo: "'{{ 1 }}'"''', - returns("'1'"), - returns("'1'"), - ), - ( - '''foo: "'{{ '1' }}'"''', - returns("'1'"), - returns("'1'"), - ), - ( - '''foo: "{{ 1 | as_text }}"''', - returns("1"), - returns("1"), - ), - ( - '''foo: "{{ 1 | as_bool }}"''', - returns("1"), - raises(JinjaRenderingError), - ), - ( - '''foo: "{{ 1 | as_number }}"''', - returns("1"), - returns(1), - ), - ( - '''foo: "{{ 1 | as_native }}"''', - returns("1"), - returns(1), - ), - ( - '''foo: "{{ '1' | as_text }}"''', - returns("1"), - returns("1"), - ), - ( - '''foo: "{{ '1' | as_bool }}"''', - returns("1"), - raises(JinjaRenderingError), - ), - ( - '''foo: "{{ '1' | as_number }}"''', - returns("1"), - returns(1), - ), - ( - '''foo: "{{ '1' | as_native }}"''', - returns("1"), - returns(1), - ), - # booleans. - # Note the discrepancy with true vs True: `true` is recognized by jinja but - # not literal_eval, but `True` is recognized by ast.literal_eval. - # For extra fun, yaml recognizes both. - # unquoted true - ( - '''foo: "{{ True }}"''', - returns("True"), - returns("True"), - ), - ( - '''foo: "{{ True | as_text }}"''', - returns("True"), - returns("True"), - ), - ( - '''foo: "{{ True | as_bool }}"''', - returns("True"), - returns(True), - ), - ( - '''foo: "{{ True | as_number }}"''', - returns("True"), - raises(JinjaRenderingError), - ), - ( - '''foo: "{{ True | as_native }}"''', - returns("True"), - returns(True), - ), - # unquoted true - ( - '''foo: "{{ true }}"''', - returns("True"), - returns("True"), - ), - ( - '''foo: "{{ true | as_text }}"''', - returns("True"), - returns("True"), - ), - ( - '''foo: "{{ true | as_bool }}"''', - returns("True"), - returns(True), - ), - ( - '''foo: "{{ true | as_number }}"''', - returns("True"), - raises(JinjaRenderingError), - ), - ( - '''foo: "{{ true | as_native }}"''', - returns("True"), - returns(True), - ), - ( - '''foo: "{{ 'true' | as_text }}"''', - returns("true"), - returns("true"), - ), - # quoted 'true' - ( - '''foo: "'{{ true }}'"''', - returns("'True'"), - returns("'True'"), - ), # jinja true -> python True -> str(True) -> "True" -> quoted - ( - '''foo: "'{{ true | as_text }}'"''', - returns("'True'"), - returns("'True'"), - ), - ( - '''foo: "'{{ true | as_bool }}'"''', - returns("'True'"), - returns("'True'"), - ), - ( - '''foo: "'{{ true | as_number }}'"''', - returns("'True'"), - returns("'True'"), - ), - ( - '''foo: "'{{ true | as_native }}'"''', - returns("'True'"), - returns("'True'"), - ), - # unquoted True - ( - '''foo: "{{ True }}"''', - returns("True"), - returns("True"), - ), - ( - '''foo: "{{ True | as_text }}"''', - returns("True"), - returns("True"), - ), # True -> string 'True' -> text -> str('True') -> 'True' - ( - '''foo: "{{ True | as_bool }}"''', - returns("True"), - returns(True), - ), - ( - '''foo: "{{ True | as_number }}"''', - returns("True"), - raises(JinjaRenderingError), - ), - ( - '''foo: "{{ True | as_native }}"''', - returns("True"), - returns(True), - ), - # quoted 'True' within rendering - ( - '''foo: "{{ 'True' | as_text }}"''', - returns("True"), - returns("True"), - ), - # 'True' -> string 'True' -> text -> str('True') -> 'True' - ( - '''foo: "{{ 'True' | as_bool }}"''', - returns("True"), - returns(True), - ), - # quoted 'True' outside rendering - ( - '''foo: "'{{ True }}'"''', - returns("'True'"), - returns("'True'"), - ), - ( - '''foo: "'{{ True | as_bool }}'"''', - returns("'True'"), - returns("'True'"), - ), - # yaml turns 'yes' into a boolean true - ( - """foo: yes""", - returns("True"), - returns("True"), - ), - ( - '''foo: "yes"''', - returns("yes"), - returns("yes"), - ), - # concatenation - ( - '''foo: "{{ (a_int + 100) | as_native }}"''', - returns("200"), - returns(200), - ), - ( - '''foo: "{{ (a_str ~ 100) | as_native }}"''', - returns("100100"), - returns(100100), - ), - ( - '''foo: "{{( a_int ~ 100) | as_native }}"''', - returns("100100"), - returns(100100), - ), - # multiple nodes -> always str - ( - '''foo: "{{ a_str | as_native }}{{ a_str | as_native }}"''', - returns("100100"), - returns("100100"), - ), - ( - '''foo: "{{ a_int | as_native }}{{ a_int | as_native }}"''', - returns("100100"), - returns("100100"), - ), - ( - '''foo: "'{{ a_int | as_native }}{{ a_int | as_native }}'"''', - returns("'100100'"), - returns("'100100'"), - ), - ( - """foo:""", - returns("None"), - returns("None"), - ), - ( - """foo: null""", - returns("None"), - returns("None"), - ), - ( - '''foo: ""''', - returns(""), - returns(""), - ), - ( - '''foo: "{{ '' | as_native }}"''', - returns(""), - returns(""), - ), - # very annoying, but jinja 'none' is yaml 'null'. - ( - '''foo: "{{ none | as_native }}"''', - returns("None"), - returns(None), - ), - # make sure we don't include comments in the output (see #2707) - ( - '''foo: "{# #}hello"''', - returns("hello"), - returns("hello"), - ), - ( - '''foo: "{% if false %}{% endif %}hello"''', - returns("hello"), - returns("hello"), - ), -] - - -@pytest.mark.parametrize("value,text_expectation,native_expectation", jinja_tests, ids=expected_id) -def test_jinja_rendering(value, text_expectation, native_expectation): - foo_value = yaml.safe_load(value)["foo"] - ctx = {"a_str": "100", "a_int": 100, "b_str": "hello"} - with text_expectation as text_result: - assert text_result == get_rendered(foo_value, ctx, native=False) - - with native_expectation as native_result: - assert native_result == get_rendered(foo_value, ctx, native=True) - - -class TestJinja(unittest.TestCase): - def test_do(self): - s = "{% set my_dict = {} %}\n{% do my_dict.update(a=1) %}" - - template = get_template(s, {}) - mod = template.make_module() - self.assertEqual(mod.my_dict, {"a": 1}) - - def test_regular_render(self): - s = '{{ "some_value" | as_native }}' - value = get_rendered(s, {}, native=False) - assert value == "some_value" - s = "{{ 1991 | as_native }}" - value = get_rendered(s, {}, native=False) - assert value == "1991" - - s = '{{ "some_value" | as_text }}' - value = get_rendered(s, {}, native=False) - assert value == "some_value" - s = "{{ 1991 | as_text }}" - value = get_rendered(s, {}, native=False) - assert value == "1991" - - def test_native_render(self): - s = '{{ "some_value" | as_native }}' - value = get_rendered(s, {}, native=True) - assert value == "some_value" - s = "{{ 1991 | as_native }}" - value = get_rendered(s, {}, native=True) - assert value == 1991 - - s = '{{ "some_value" | as_text }}' - value = get_rendered(s, {}, native=True) - assert value == "some_value" - s = "{{ 1991 | as_text }}" - value = get_rendered(s, {}, native=True) - assert value == "1991" - - -class TestBlockLexer(unittest.TestCase): - def test_basic(self): - body = '{{ config(foo="bar") }}\r\nselect * from this.that\r\n' - block_data = " \n\r\t{%- mytype foo %}" + body + "{%endmytype -%}" - blocks = extract_toplevel_blocks( - block_data, allowed_blocks={"mytype"}, collect_raw_data=False - ) - self.assertEqual(len(blocks), 1) - self.assertEqual(blocks[0].block_type_name, "mytype") - self.assertEqual(blocks[0].block_name, "foo") - self.assertEqual(blocks[0].contents, body) - self.assertEqual(blocks[0].full_block, block_data) - - def test_multiple(self): - body_one = '{{ config(foo="bar") }}\r\nselect * from this.that\r\n' - body_two = ( - "{{ config(bar=1)}}\r\nselect * from {% if foo %} thing " - "{% else %} other_thing {% endif %}" - ) - - block_data = ( - " {% mytype foo %}" - + body_one - + "{% endmytype %}" - + "\r\n{% othertype bar %}" - + body_two - + "{% endothertype %}" - ) - blocks = extract_toplevel_blocks( - block_data, allowed_blocks={"mytype", "othertype"}, collect_raw_data=False - ) - self.assertEqual(len(blocks), 2) - - def test_comments(self): - body = '{{ config(foo="bar") }}\r\nselect * from this.that\r\n' - comment = "{# my comment #}" - block_data = " \n\r\t{%- mytype foo %}" + body + "{%endmytype -%}" - blocks = extract_toplevel_blocks( - comment + block_data, allowed_blocks={"mytype"}, collect_raw_data=False - ) - self.assertEqual(len(blocks), 1) - self.assertEqual(blocks[0].block_type_name, "mytype") - self.assertEqual(blocks[0].block_name, "foo") - self.assertEqual(blocks[0].contents, body) - self.assertEqual(blocks[0].full_block, block_data) - - def test_evil_comments(self): - body = '{{ config(foo="bar") }}\r\nselect * from this.that\r\n' - comment = "{# external comment {% othertype bar %} select * from thing.other_thing{% endothertype %} #}" - block_data = " \n\r\t{%- mytype foo %}" + body + "{%endmytype -%}" - blocks = extract_toplevel_blocks( - comment + block_data, allowed_blocks={"mytype"}, collect_raw_data=False - ) - self.assertEqual(len(blocks), 1) - self.assertEqual(blocks[0].block_type_name, "mytype") - self.assertEqual(blocks[0].block_name, "foo") - self.assertEqual(blocks[0].contents, body) - self.assertEqual(blocks[0].full_block, block_data) - - def test_nested_comments(self): - body = '{# my comment #} {{ config(foo="bar") }}\r\nselect * from {# my other comment embedding {% endmytype %} #} this.that\r\n' - block_data = " \n\r\t{%- mytype foo %}" + body + "{% endmytype -%}" - comment = "{# external comment {% othertype bar %} select * from thing.other_thing{% endothertype %} #}" - blocks = extract_toplevel_blocks( - comment + block_data, allowed_blocks={"mytype"}, collect_raw_data=False - ) - self.assertEqual(len(blocks), 1) - self.assertEqual(blocks[0].block_type_name, "mytype") - self.assertEqual(blocks[0].block_name, "foo") - self.assertEqual(blocks[0].contents, body) - self.assertEqual(blocks[0].full_block, block_data) - - def test_complex_file(self): - blocks = extract_toplevel_blocks( - complex_snapshot_file, allowed_blocks={"mytype", "myothertype"}, collect_raw_data=False - ) - self.assertEqual(len(blocks), 3) - self.assertEqual(blocks[0].block_type_name, "mytype") - self.assertEqual(blocks[0].block_name, "foo") - self.assertEqual(blocks[0].full_block, "{% mytype foo %} some stuff {% endmytype %}") - self.assertEqual(blocks[0].contents, " some stuff ") - self.assertEqual(blocks[1].block_type_name, "mytype") - self.assertEqual(blocks[1].block_name, "bar") - self.assertEqual(blocks[1].full_block, bar_block) - self.assertEqual(blocks[1].contents, bar_block[16:-15].rstrip()) - self.assertEqual(blocks[2].block_type_name, "myothertype") - self.assertEqual(blocks[2].block_name, "x") - self.assertEqual(blocks[2].full_block, x_block.strip()) - self.assertEqual( - blocks[2].contents, - x_block[len("\n{% myothertype x %}") : -len("{% endmyothertype %}\n")], - ) - - def test_peaceful_macro_coexistence(self): - body = "{# my macro #} {% macro foo(a, b) %} do a thing {%- endmacro %} {# my model #} {% a b %} test {% enda %}" - blocks = extract_toplevel_blocks( - body, allowed_blocks={"macro", "a"}, collect_raw_data=True - ) - self.assertEqual(len(blocks), 4) - self.assertEqual(blocks[0].full_block, "{# my macro #} ") - self.assertEqual(blocks[1].block_type_name, "macro") - self.assertEqual(blocks[1].block_name, "foo") - self.assertEqual(blocks[1].contents, " do a thing") - self.assertEqual(blocks[2].full_block, " {# my model #} ") - self.assertEqual(blocks[3].block_type_name, "a") - self.assertEqual(blocks[3].block_name, "b") - self.assertEqual(blocks[3].contents, " test ") - - def test_macro_with_trailing_data(self): - body = "{# my macro #} {% macro foo(a, b) %} do a thing {%- endmacro %} {# my model #} {% a b %} test {% enda %} raw data so cool" - blocks = extract_toplevel_blocks( - body, allowed_blocks={"macro", "a"}, collect_raw_data=True - ) - self.assertEqual(len(blocks), 5) - self.assertEqual(blocks[0].full_block, "{# my macro #} ") - self.assertEqual(blocks[1].block_type_name, "macro") - self.assertEqual(blocks[1].block_name, "foo") - self.assertEqual(blocks[1].contents, " do a thing") - self.assertEqual(blocks[2].full_block, " {# my model #} ") - self.assertEqual(blocks[3].block_type_name, "a") - self.assertEqual(blocks[3].block_name, "b") - self.assertEqual(blocks[3].contents, " test ") - self.assertEqual(blocks[4].full_block, " raw data so cool") - - def test_macro_with_crazy_args(self): - body = """{% macro foo(a, b=asdf("cool this is 'embedded'" * 3) + external_var, c)%}cool{# block comment with {% endmacro %} in it #} stuff here {% endmacro %}""" - blocks = extract_toplevel_blocks(body, allowed_blocks={"macro"}, collect_raw_data=False) - self.assertEqual(len(blocks), 1) - self.assertEqual(blocks[0].block_type_name, "macro") - self.assertEqual(blocks[0].block_name, "foo") - self.assertEqual( - blocks[0].contents, "cool{# block comment with {% endmacro %} in it #} stuff here " - ) - - def test_materialization_parse(self): - body = "{% materialization xxx, default %} ... {% endmaterialization %}" - blocks = extract_toplevel_blocks( - body, allowed_blocks={"materialization"}, collect_raw_data=False - ) - self.assertEqual(len(blocks), 1) - self.assertEqual(blocks[0].block_type_name, "materialization") - self.assertEqual(blocks[0].block_name, "xxx") - self.assertEqual(blocks[0].full_block, body) - - body = '{% materialization xxx, adapter="other" %} ... {% endmaterialization %}' - blocks = extract_toplevel_blocks( - body, allowed_blocks={"materialization"}, collect_raw_data=False - ) - self.assertEqual(len(blocks), 1) - self.assertEqual(blocks[0].block_type_name, "materialization") - self.assertEqual(blocks[0].block_name, "xxx") - self.assertEqual(blocks[0].full_block, body) - - def test_nested_not_ok(self): - # we don't allow nesting same blocks - body = "{% myblock a %} {% myblock b %} {% endmyblock %} {% endmyblock %}" - with self.assertRaises(CompilationError): - extract_toplevel_blocks(body, allowed_blocks={"myblock"}) - - def test_incomplete_block_failure(self): - fullbody = "{% myblock foo %} {% endmyblock %}" - for length in range(len("{% myblock foo %}"), len(fullbody) - 1): - body = fullbody[:length] - with self.assertRaises(CompilationError): - extract_toplevel_blocks(body, allowed_blocks={"myblock"}) - - def test_wrong_end_failure(self): - body = "{% myblock foo %} {% endotherblock %}" - with self.assertRaises(CompilationError): - extract_toplevel_blocks(body, allowed_blocks={"myblock", "otherblock"}) - - def test_comment_no_end_failure(self): - body = "{# " - with self.assertRaises(CompilationError): - extract_toplevel_blocks(body) - - def test_comment_only(self): - body = "{# myblock #}" - blocks = extract_toplevel_blocks(body) - self.assertEqual(len(blocks), 1) - blocks = extract_toplevel_blocks(body, collect_raw_data=False) - self.assertEqual(len(blocks), 0) - - def test_comment_block_self_closing(self): - # test the case where a comment start looks a lot like it closes itself - # (but it doesn't in jinja!) - body = "{#} {% myblock foo %} {#}" - blocks = extract_toplevel_blocks(body, collect_raw_data=False) - self.assertEqual(len(blocks), 0) - - def test_embedded_self_closing_comment_block(self): - body = "{% myblock foo %} {#}{% endmyblock %} {#}{% endmyblock %}" - blocks = extract_toplevel_blocks(body, allowed_blocks={"myblock"}, collect_raw_data=False) - self.assertEqual(len(blocks), 1) - self.assertEqual(blocks[0].full_block, body) - self.assertEqual(blocks[0].contents, " {#}{% endmyblock %} {#}") - - def test_set_statement(self): - body = "{% set x = 1 %}{% myblock foo %}hi{% endmyblock %}" - blocks = extract_toplevel_blocks(body, allowed_blocks={"myblock"}, collect_raw_data=False) - self.assertEqual(len(blocks), 1) - self.assertEqual(blocks[0].full_block, "{% myblock foo %}hi{% endmyblock %}") - - def test_set_block(self): - body = "{% set x %}1{% endset %}{% myblock foo %}hi{% endmyblock %}" - blocks = extract_toplevel_blocks(body, allowed_blocks={"myblock"}, collect_raw_data=False) - self.assertEqual(len(blocks), 1) - self.assertEqual(blocks[0].full_block, "{% myblock foo %}hi{% endmyblock %}") - - def test_crazy_set_statement(self): - body = '{% set x = (thing("{% myblock foo %}")) %}{% otherblock bar %}x{% endotherblock %}{% set y = otherthing("{% myblock foo %}") %}' - blocks = extract_toplevel_blocks( - body, allowed_blocks={"otherblock"}, collect_raw_data=False - ) - self.assertEqual(len(blocks), 1) - self.assertEqual(blocks[0].full_block, "{% otherblock bar %}x{% endotherblock %}") - self.assertEqual(blocks[0].block_type_name, "otherblock") - - def test_do_statement(self): - body = "{% do thing.update() %}{% myblock foo %}hi{% endmyblock %}" - blocks = extract_toplevel_blocks(body, allowed_blocks={"myblock"}, collect_raw_data=False) - self.assertEqual(len(blocks), 1) - self.assertEqual(blocks[0].full_block, "{% myblock foo %}hi{% endmyblock %}") - - def test_deceptive_do_statement(self): - body = "{% do thing %}{% myblock foo %}hi{% endmyblock %}" - blocks = extract_toplevel_blocks(body, allowed_blocks={"myblock"}, collect_raw_data=False) - self.assertEqual(len(blocks), 1) - self.assertEqual(blocks[0].full_block, "{% myblock foo %}hi{% endmyblock %}") - - def test_do_block(self): - body = "{% do %}thing.update(){% enddo %}{% myblock foo %}hi{% endmyblock %}" - blocks = extract_toplevel_blocks( - body, allowed_blocks={"do", "myblock"}, collect_raw_data=False - ) - self.assertEqual(len(blocks), 2) - self.assertEqual(blocks[0].contents, "thing.update()") - self.assertEqual(blocks[0].block_type_name, "do") - self.assertEqual(blocks[1].full_block, "{% myblock foo %}hi{% endmyblock %}") - - def test_crazy_do_statement(self): - body = '{% do (thing("{% myblock foo %}")) %}{% otherblock bar %}x{% endotherblock %}{% do otherthing("{% myblock foo %}") %}{% myblock x %}hi{% endmyblock %}' - blocks = extract_toplevel_blocks( - body, allowed_blocks={"myblock", "otherblock"}, collect_raw_data=False - ) - self.assertEqual(len(blocks), 2) - self.assertEqual(blocks[0].full_block, "{% otherblock bar %}x{% endotherblock %}") - self.assertEqual(blocks[0].block_type_name, "otherblock") - self.assertEqual(blocks[1].full_block, "{% myblock x %}hi{% endmyblock %}") - self.assertEqual(blocks[1].block_type_name, "myblock") - - def test_awful_jinja(self): - blocks = extract_toplevel_blocks( - if_you_do_this_you_are_awful, - allowed_blocks={"snapshot", "materialization"}, - collect_raw_data=False, - ) - self.assertEqual(len(blocks), 2) - self.assertEqual(len([b for b in blocks if b.block_type_name == "__dbt__data"]), 0) - self.assertEqual(blocks[0].block_type_name, "snapshot") - self.assertEqual( - blocks[0].contents, - "\n ".join( - [ - """{% set x = ("{% endsnapshot %}" + (40 * '%})')) %}""", - "{# {% endsnapshot %} #}", - "{% embedded %}", - " some block data right here", - "{% endembedded %}", - ] - ), - ) - self.assertEqual(blocks[1].block_type_name, "materialization") - self.assertEqual(blocks[1].contents, "\nhi\n") - - def test_quoted_endblock_within_block(self): - body = '{% myblock something -%} {% set x = ("{% endmyblock %}") %} {% endmyblock %}' - blocks = extract_toplevel_blocks(body, allowed_blocks={"myblock"}, collect_raw_data=False) - self.assertEqual(len(blocks), 1) - self.assertEqual(blocks[0].block_type_name, "myblock") - self.assertEqual(blocks[0].contents, '{% set x = ("{% endmyblock %}") %} ') - - def test_docs_block(self): - body = '{% docs __my_doc__ %} asdf {# nope {% enddocs %}} #} {% enddocs %} {% docs __my_other_doc__ %} asdf "{% enddocs %}' - blocks = extract_toplevel_blocks(body, allowed_blocks={"docs"}, collect_raw_data=False) - self.assertEqual(len(blocks), 2) - self.assertEqual(blocks[0].block_type_name, "docs") - self.assertEqual(blocks[0].contents, " asdf {# nope {% enddocs %}} #} ") - self.assertEqual(blocks[0].block_name, "__my_doc__") - self.assertEqual(blocks[1].block_type_name, "docs") - self.assertEqual(blocks[1].contents, ' asdf "') - self.assertEqual(blocks[1].block_name, "__my_other_doc__") - - def test_docs_block_expr(self): - body = '{% docs more_doc %} asdf {{ "{% enddocs %}" ~ "}}" }}{% enddocs %}' - blocks = extract_toplevel_blocks(body, allowed_blocks={"docs"}, collect_raw_data=False) - self.assertEqual(len(blocks), 1) - self.assertEqual(blocks[0].block_type_name, "docs") - self.assertEqual(blocks[0].contents, ' asdf {{ "{% enddocs %}" ~ "}}" }}') - self.assertEqual(blocks[0].block_name, "more_doc") - - def test_unclosed_model_quotes(self): - # test case for https://github.com/dbt-labs/dbt-core/issues/1533 - body = '{% model my_model -%} select * from "something"."something_else{% endmodel %}' - blocks = extract_toplevel_blocks(body, allowed_blocks={"model"}, collect_raw_data=False) - self.assertEqual(len(blocks), 1) - self.assertEqual(blocks[0].block_type_name, "model") - self.assertEqual(blocks[0].contents, 'select * from "something"."something_else') - self.assertEqual(blocks[0].block_name, "my_model") - - def test_if(self): - # if you conditionally define your macros/models, don't - body = "{% if true %}{% macro my_macro() %} adsf {% endmacro %}{% endif %}" - with self.assertRaises(CompilationError): - extract_toplevel_blocks(body) - - def test_if_innocuous(self): - body = "{% if true %}{% something %}asdfasd{% endsomething %}{% endif %}" - blocks = extract_toplevel_blocks(body) - self.assertEqual(len(blocks), 1) - self.assertEqual(blocks[0].full_block, body) - - def test_for(self): - # no for-loops over macros. - body = "{% for x in range(10) %}{% macro my_macro() %} adsf {% endmacro %}{% endfor %}" - with self.assertRaises(CompilationError): - extract_toplevel_blocks(body) - - def test_for_innocuous(self): - # no for-loops over macros. - body = "{% for x in range(10) %}{% something my_something %} adsf {% endsomething %}{% endfor %}" - blocks = extract_toplevel_blocks(body) - self.assertEqual(len(blocks), 1) - self.assertEqual(blocks[0].full_block, body) - - def test_endif(self): - body = "{% snapshot foo %}select * from thing{% endsnapshot%}{% endif %}" - with self.assertRaises(CompilationError) as err: - extract_toplevel_blocks(body) - self.assertIn( - "Got an unexpected control flow end tag, got endif but never saw a preceeding if (@ 1:53)", - str(err.exception), - ) - - def test_if_endfor(self): - body = "{% if x %}...{% endfor %}{% endif %}" - with self.assertRaises(CompilationError) as err: - extract_toplevel_blocks(body) - self.assertIn( - "Got an unexpected control flow end tag, got endfor but expected endif next (@ 1:13)", - str(err.exception), - ) - - def test_if_endfor_newlines(self): - body = "{% if x %}\n ...\n {% endfor %}\n{% endif %}" - with self.assertRaises(CompilationError) as err: - extract_toplevel_blocks(body) - self.assertIn( - "Got an unexpected control flow end tag, got endfor but expected endif next (@ 3:4)", - str(err.exception), - ) - - -bar_block = """{% mytype bar %} -{# a comment - that inside it has - {% mytype baz %} -{% endmyothertype %} -{% endmytype %} -{% endmytype %} - {# -{% endmytype %}#} - -some other stuff - -{%- endmytype%}""" - -x_block = """ -{% myothertype x %} -before -{##} -and after -{% endmyothertype %} -""" - -complex_snapshot_file = ( - """ -{#some stuff {% mytype foo %} #} -{% mytype foo %} some stuff {% endmytype %} - -""" - + bar_block - + x_block -) - - -if_you_do_this_you_are_awful = """ -{#} here is a comment with a block inside {% block x %} asdf {% endblock %} {#} -{% do - set('foo="bar"') -%} -{% set x = ("100" + "hello'" + '%}') %} -{% snapshot something -%} - {% set x = ("{% endsnapshot %}" + (40 * '%})')) %} - {# {% endsnapshot %} #} - {% embedded %} - some block data right here - {% endembedded %} -{%- endsnapshot %} - -{% raw %} - {% set x = SYNTAX ERROR} -{% endraw %} - - -{% materialization whatever, adapter='thing' %} -hi -{% endmaterialization %} -""" diff --git a/tests/unit/test_linker.py b/tests/unit/test_linker.py deleted file mode 100644 index 9c36ae19674..00000000000 --- a/tests/unit/test_linker.py +++ /dev/null @@ -1,191 +0,0 @@ -import os -import tempfile -import unittest -from unittest import mock - -from dbt import compilation - -try: - from queue import Empty -except ImportError: - from Queue import Empty - -from dbt.graph.selector import NodeSelector -from dbt.graph.cli import parse_difference - - -def _mock_manifest(nodes): - config = mock.MagicMock(enabled=True) - manifest = mock.MagicMock( - nodes={ - n: mock.MagicMock( - unique_id=n, - package_name="pkg", - name=n, - empty=False, - config=config, - fqn=["pkg", n], - is_versioned=False, - ) - for n in nodes - } - ) - manifest.expect.side_effect = lambda n: mock.MagicMock(unique_id=n) - return manifest - - -class LinkerTest(unittest.TestCase): - def setUp(self): - self.linker = compilation.Linker() - - def test_linker_add_node(self): - expected_nodes = ["A", "B", "C"] - for node in expected_nodes: - self.linker.add_node(node) - - actual_nodes = self.linker.nodes() - for node in expected_nodes: - self.assertIn(node, actual_nodes) - - self.assertEqual(len(actual_nodes), len(expected_nodes)) - - def test_linker_write_graph(self): - expected_nodes = ["A", "B", "C"] - for node in expected_nodes: - self.linker.add_node(node) - - manifest = _mock_manifest("ABC") - (fd, fname) = tempfile.mkstemp() - os.close(fd) - try: - self.linker.write_graph(fname, manifest) - assert os.path.exists(fname) - finally: - os.unlink(fname) - - def assert_would_join(self, queue): - """test join() without timeout risk""" - self.assertEqual(queue.inner.unfinished_tasks, 0) - - def _get_graph_queue(self, manifest, include=None, exclude=None): - graph = compilation.Graph(self.linker.graph) - selector = NodeSelector(graph, manifest) - # TODO: The "eager" string below needs to be replaced with programatic access - # to the default value for the indirect selection parameter in - # dbt.cli.params.indirect_selection - # - # Doing that is actually a little tricky, so I'm punting it to a new ticket GH #6397 - spec = parse_difference(include, exclude, "eager") - return selector.get_graph_queue(spec) - - def test_linker_add_dependency(self): - actual_deps = [("A", "B"), ("A", "C"), ("B", "C")] - - for (l, r) in actual_deps: - self.linker.dependency(l, r) - - queue = self._get_graph_queue(_mock_manifest("ABC")) - - got = queue.get(block=False) - self.assertEqual(got.unique_id, "C") - with self.assertRaises(Empty): - queue.get(block=False) - self.assertFalse(queue.empty()) - queue.mark_done("C") - self.assertFalse(queue.empty()) - - got = queue.get(block=False) - self.assertEqual(got.unique_id, "B") - with self.assertRaises(Empty): - queue.get(block=False) - self.assertFalse(queue.empty()) - queue.mark_done("B") - self.assertFalse(queue.empty()) - - got = queue.get(block=False) - self.assertEqual(got.unique_id, "A") - with self.assertRaises(Empty): - queue.get(block=False) - self.assertTrue(queue.empty()) - queue.mark_done("A") - self.assert_would_join(queue) - self.assertTrue(queue.empty()) - - def test_linker_add_disjoint_dependencies(self): - actual_deps = [("A", "B")] - additional_node = "Z" - - for (l, r) in actual_deps: - self.linker.dependency(l, r) - self.linker.add_node(additional_node) - - queue = self._get_graph_queue(_mock_manifest("ABCZ")) - # the first one we get must be B, it has the longest dep chain - first = queue.get(block=False) - self.assertEqual(first.unique_id, "B") - self.assertFalse(queue.empty()) - queue.mark_done("B") - self.assertFalse(queue.empty()) - - second = queue.get(block=False) - self.assertIn(second.unique_id, {"A", "Z"}) - self.assertFalse(queue.empty()) - queue.mark_done(second.unique_id) - self.assertFalse(queue.empty()) - - third = queue.get(block=False) - self.assertIn(third.unique_id, {"A", "Z"}) - with self.assertRaises(Empty): - queue.get(block=False) - self.assertNotEqual(second.unique_id, third.unique_id) - self.assertTrue(queue.empty()) - queue.mark_done(third.unique_id) - self.assert_would_join(queue) - self.assertTrue(queue.empty()) - - def test_linker_dependencies_limited_to_some_nodes(self): - actual_deps = [("A", "B"), ("B", "C"), ("C", "D")] - - for (l, r) in actual_deps: - self.linker.dependency(l, r) - - queue = self._get_graph_queue(_mock_manifest("ABCD"), ["B"]) - got = queue.get(block=False) - self.assertEqual(got.unique_id, "B") - self.assertTrue(queue.empty()) - queue.mark_done("B") - self.assert_would_join(queue) - - queue_2 = queue = self._get_graph_queue(_mock_manifest("ABCD"), ["A", "B"]) - got = queue_2.get(block=False) - self.assertEqual(got.unique_id, "B") - self.assertFalse(queue_2.empty()) - with self.assertRaises(Empty): - queue_2.get(block=False) - queue_2.mark_done("B") - self.assertFalse(queue_2.empty()) - - got = queue_2.get(block=False) - self.assertEqual(got.unique_id, "A") - self.assertTrue(queue_2.empty()) - with self.assertRaises(Empty): - queue_2.get(block=False) - self.assertTrue(queue_2.empty()) - queue_2.mark_done("A") - self.assert_would_join(queue_2) - - def test__find_cycles__cycles(self): - actual_deps = [("A", "B"), ("B", "C"), ("C", "A")] - - for (l, r) in actual_deps: - self.linker.dependency(l, r) - - self.assertIsNotNone(self.linker.find_cycles()) - - def test__find_cycles__no_cycles(self): - actual_deps = [("A", "B"), ("B", "C"), ("C", "D")] - - for (l, r) in actual_deps: - self.linker.dependency(l, r) - - self.assertIsNone(self.linker.find_cycles()) diff --git a/tests/unit/test_macro_calls.py b/tests/unit/test_macro_calls.py deleted file mode 100644 index d575cfb76e8..00000000000 --- a/tests/unit/test_macro_calls.py +++ /dev/null @@ -1,44 +0,0 @@ -import unittest - -from dbt.clients.jinja_static import statically_extract_macro_calls -from dbt.context.base import generate_base_context - - -class MacroCalls(unittest.TestCase): - def setUp(self): - self.macro_strings = [ - "{% macro parent_macro() %} {% do return(nested_macro()) %} {% endmacro %}", - "{% macro lr_macro() %} {{ return(load_result('relations').table) }} {% endmacro %}", - "{% macro get_snapshot_unique_id() -%} {{ return(adapter.dispatch('get_snapshot_unique_id')()) }} {%- endmacro %}", - "{% macro get_columns_in_query(select_sql) -%} {{ return(adapter.dispatch('get_columns_in_query')(select_sql)) }} {% endmacro %}", - """{% macro test_mutually_exclusive_ranges(model) %} - with base as ( - select {{ get_snapshot_unique_id() }} as dbt_unique_id, - * - from {{ model }} ) - {% endmacro %}""", - "{% macro test_my_test(model) %} select {{ current_timestamp_backcompat() }} {% endmacro %}", - "{% macro some_test(model) -%} {{ return(adapter.dispatch('test_some_kind4', 'foo_utils4')) }} {%- endmacro %}", - "{% macro some_test(model) -%} {{ return(adapter.dispatch('test_some_kind5', macro_namespace = 'foo_utils5')) }} {%- endmacro %}", - ] - - self.possible_macro_calls = [ - ["nested_macro"], - ["load_result"], - ["get_snapshot_unique_id"], - ["get_columns_in_query"], - ["get_snapshot_unique_id"], - ["current_timestamp_backcompat"], - ["test_some_kind4", "foo_utils4.test_some_kind4"], - ["test_some_kind5", "foo_utils5.test_some_kind5"], - ] - - def test_macro_calls(self): - cli_vars = {"local_utils_dispatch_list": ["foo_utils4"]} - ctx = generate_base_context(cli_vars) - - index = 0 - for macro_string in self.macro_strings: - possible_macro_calls = statically_extract_macro_calls(macro_string, ctx) - self.assertEqual(self.possible_macro_calls[index], possible_macro_calls) - index += 1 diff --git a/tests/unit/test_manifest_selectors.py b/tests/unit/test_manifest_selectors.py deleted file mode 100644 index f9b7ff279f9..00000000000 --- a/tests/unit/test_manifest_selectors.py +++ /dev/null @@ -1,199 +0,0 @@ -import textwrap -import yaml -from collections import OrderedDict -import unittest -from dbt.config.selectors import SelectorDict -from dbt.exceptions import DbtSelectorsError - - -def get_selector_dict(txt: str) -> OrderedDict: - txt = textwrap.dedent(txt) - dct = OrderedDict(yaml.safe_load(txt)) - return dct - - -class SelectorUnitTest(unittest.TestCase): - def test_compare_cli_non_cli(self): - dct = get_selector_dict( - """\ - selectors: - - name: nightly_diet_snowplow - description: "This uses more CLI-style syntax" - definition: - union: - - intersection: - - '@source:snowplow' - - 'tag:nightly' - - 'models/export' - - exclude: - - intersection: - - 'package:snowplow' - - 'config.materialized:incremental' - - export_performance_timing - - name: nightly_diet_snowplow_full - description: "This is a fuller YAML specification" - definition: - union: - - intersection: - - method: source - value: snowplow - childrens_parents: true - - method: tag - value: nightly - - method: path - value: models/export - - exclude: - - intersection: - - method: package - value: snowplow - - method: config.materialized - value: incremental - - method: fqn - value: export_performance_timing - """ - ) - - sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) - assert sel_dict - with_strings = sel_dict["nightly_diet_snowplow"]["definition"] - no_strings = sel_dict["nightly_diet_snowplow_full"]["definition"] - self.assertEqual(with_strings, no_strings) - - def test_single_string_definition(self): - dct = get_selector_dict( - """\ - selectors: - - name: nightly_selector - definition: - 'tag:nightly' - """ - ) - - sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) - assert sel_dict - expected = {"method": "tag", "value": "nightly"} - definition = sel_dict["nightly_selector"]["definition"] - self.assertEqual(expected, definition) - - def test_single_key_value_definition(self): - dct = get_selector_dict( - """\ - selectors: - - name: nightly_selector - definition: - tag: nightly - """ - ) - - sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) - assert sel_dict - expected = {"method": "tag", "value": "nightly"} - definition = sel_dict["nightly_selector"]["definition"] - self.assertEqual(expected, definition) - - def test_parent_definition(self): - dct = get_selector_dict( - """\ - selectors: - - name: kpi_nightly_selector - definition: - '+exposure:kpi_nightly' - """ - ) - - sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) - assert sel_dict - expected = {"method": "exposure", "value": "kpi_nightly", "parents": True} - definition = sel_dict["kpi_nightly_selector"]["definition"] - self.assertEqual(expected, definition) - - def test_plus_definition(self): - dct = get_selector_dict( - """\ - selectors: - - name: my_model_children_selector - definition: - 'my_model+2' - """ - ) - - sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) - assert sel_dict - expected = {"method": "fqn", "value": "my_model", "children": True, "children_depth": "2"} - definition = sel_dict["my_model_children_selector"]["definition"] - self.assertEqual(expected, definition) - - def test_selector_definition(self): - dct = get_selector_dict( - """\ - selectors: - - name: default - definition: - union: - - intersection: - - tag: foo - - tag: bar - - name: inherited - definition: - method: selector - value: default - """ - ) - - sel_dict = SelectorDict.parse_from_selectors_list(dct["selectors"]) - assert sel_dict - definition = sel_dict["default"]["definition"] - expected = sel_dict["inherited"]["definition"] - self.assertEqual(expected, definition) - - def test_selector_definition_with_exclusion(self): - dct = get_selector_dict( - """\ - selectors: - - name: default - definition: - union: - - intersection: - - tag: foo - - tag: bar - - name: inherited - definition: - union: - - method: selector - value: default - - exclude: - - tag: bar - - name: comparison - definition: - union: - - union: - - intersection: - - tag: foo - - tag: bar - - exclude: - - tag: bar - """ - ) - - sel_dict = SelectorDict.parse_from_selectors_list((dct["selectors"])) - assert sel_dict - definition = sel_dict["inherited"]["definition"] - expected = sel_dict["comparison"]["definition"] - self.assertEqual(expected, definition) - - def test_missing_selector(self): - dct = get_selector_dict( - """\ - selectors: - - name: inherited - definition: - method: selector - value: default - """ - ) - with self.assertRaises(DbtSelectorsError) as err: - SelectorDict.parse_from_selectors_list((dct["selectors"])) - - self.assertEqual( - "Existing selector definition for default not found.", str(err.exception.msg) - ) diff --git a/tests/unit/test_model_config.py b/tests/unit/test_model_config.py deleted file mode 100644 index 24ef8afcdb6..00000000000 --- a/tests/unit/test_model_config.py +++ /dev/null @@ -1,91 +0,0 @@ -from dataclasses import dataclass, field -from dbt.dataclass_schema import dbtClassMixin -from typing import List, Dict -from dbt.contracts.graph.model_config import MergeBehavior, ShowBehavior, CompareBehavior - - -@dataclass -class ThingWithMergeBehavior(dbtClassMixin): - default_behavior: int - appended: List[str] = field(metadata={"merge": MergeBehavior.Append}) - updated: Dict[str, int] = field(metadata={"merge": MergeBehavior.Update}) - clobbered: str = field(metadata={"merge": MergeBehavior.Clobber}) - keysappended: Dict[str, int] = field(metadata={"merge": MergeBehavior.DictKeyAppend}) - - -def test_merge_behavior_meta(): - existing = {"foo": "bar"} - initial_existing = existing.copy() - assert set(MergeBehavior) == { - MergeBehavior.Append, - MergeBehavior.Update, - MergeBehavior.Clobber, - MergeBehavior.DictKeyAppend, - } - for behavior in MergeBehavior: - assert behavior.meta() == {"merge": behavior} - assert behavior.meta(existing) == {"merge": behavior, "foo": "bar"} - assert existing == initial_existing - - -def test_merge_behavior_from_field(): - fields = [f[0] for f in ThingWithMergeBehavior._get_fields()] - fields = {name: f for f, name in ThingWithMergeBehavior._get_fields()} - assert set(fields) == {"default_behavior", "appended", "updated", "clobbered", "keysappended"} - assert MergeBehavior.from_field(fields["default_behavior"]) == MergeBehavior.Clobber - assert MergeBehavior.from_field(fields["appended"]) == MergeBehavior.Append - assert MergeBehavior.from_field(fields["updated"]) == MergeBehavior.Update - assert MergeBehavior.from_field(fields["clobbered"]) == MergeBehavior.Clobber - assert MergeBehavior.from_field(fields["keysappended"]) == MergeBehavior.DictKeyAppend - - -@dataclass -class ThingWithShowBehavior(dbtClassMixin): - default_behavior: int - hidden: str = field(metadata={"show_hide": ShowBehavior.Hide}) - shown: float = field(metadata={"show_hide": ShowBehavior.Show}) - - -def test_show_behavior_meta(): - existing = {"foo": "bar"} - initial_existing = existing.copy() - assert set(ShowBehavior) == {ShowBehavior.Hide, ShowBehavior.Show} - for behavior in ShowBehavior: - assert behavior.meta() == {"show_hide": behavior} - assert behavior.meta(existing) == {"show_hide": behavior, "foo": "bar"} - assert existing == initial_existing - - -def test_show_behavior_from_field(): - fields = [f[0] for f in ThingWithShowBehavior._get_fields()] - fields = {name: f for f, name in ThingWithShowBehavior._get_fields()} - assert set(fields) == {"default_behavior", "hidden", "shown"} - assert ShowBehavior.from_field(fields["default_behavior"]) == ShowBehavior.Show - assert ShowBehavior.from_field(fields["hidden"]) == ShowBehavior.Hide - assert ShowBehavior.from_field(fields["shown"]) == ShowBehavior.Show - - -@dataclass -class ThingWithCompareBehavior(dbtClassMixin): - default_behavior: int - included: float = field(metadata={"compare": CompareBehavior.Include}) - excluded: str = field(metadata={"compare": CompareBehavior.Exclude}) - - -def test_compare_behavior_meta(): - existing = {"foo": "bar"} - initial_existing = existing.copy() - assert set(CompareBehavior) == {CompareBehavior.Include, CompareBehavior.Exclude} - for behavior in CompareBehavior: - assert behavior.meta() == {"compare": behavior} - assert behavior.meta(existing) == {"compare": behavior, "foo": "bar"} - assert existing == initial_existing - - -def test_compare_behavior_from_field(): - fields = [f[0] for f in ThingWithCompareBehavior._get_fields()] - fields = {name: f for f, name in ThingWithCompareBehavior._get_fields()} - assert set(fields) == {"default_behavior", "included", "excluded"} - assert CompareBehavior.from_field(fields["default_behavior"]) == CompareBehavior.Include - assert CompareBehavior.from_field(fields["included"]) == CompareBehavior.Include - assert CompareBehavior.from_field(fields["excluded"]) == CompareBehavior.Exclude diff --git a/tests/unit/test_node_types.py b/tests/unit/test_node_types.py index 519922f6c76..87bbf51e3a1 100644 --- a/tests/unit/test_node_types.py +++ b/tests/unit/test_node_types.py @@ -1,10 +1,11 @@ import pytest + from dbt.node_types import NodeType node_type_pluralizations = { NodeType.Model: "models", NodeType.Analysis: "analyses", - NodeType.Test: "tests", + NodeType.Test: "data_tests", NodeType.Snapshot: "snapshots", NodeType.Operation: "operations", NodeType.Seed: "seeds", @@ -17,6 +18,9 @@ NodeType.Metric: "metrics", NodeType.Group: "groups", NodeType.SemanticModel: "semantic_models", + NodeType.Unit: "unit_tests", + NodeType.SavedQuery: "saved_queries", + NodeType.Fixture: "fixtures", } diff --git a/tests/unit/test_parse_manifest.py b/tests/unit/test_parse_manifest.py deleted file mode 100644 index 5dc39ab74ed..00000000000 --- a/tests/unit/test_parse_manifest.py +++ /dev/null @@ -1,123 +0,0 @@ -import unittest -from unittest import mock -from unittest.mock import patch, MagicMock -from argparse import Namespace - -from .utils import config_from_parts_or_dicts, normalize - -from dbt.contracts.files import SourceFile, FileHash, FilePath -from dbt.contracts.graph.manifest import Manifest, ManifestStateCheck -from dbt.parser import manifest -from dbt.parser.manifest import ManifestLoader -from dbt.config import RuntimeConfig -from dbt.flags import set_from_args - - -class MatchingHash(FileHash): - def __init__(self): - return super().__init__("", "") - - def __eq__(self, other): - return True - - -class MismatchedHash(FileHash): - def __init__(self): - return super().__init__("", "") - - def __eq__(self, other): - return False - - -class TestLoader(unittest.TestCase): - def setUp(self): - profile_data = { - "target": "test", - "quoting": {}, - "outputs": { - "test": { - "type": "postgres", - "host": "localhost", - "schema": "analytics", - "user": "test", - "pass": "test", - "dbname": "test", - "port": 1, - } - }, - } - - root_project = { - "name": "root", - "version": "0.1", - "profile": "test", - "project-root": normalize("/usr/src/app"), - "config-version": 2, - } - - self.root_project_config = config_from_parts_or_dicts( - project=root_project, profile=profile_data, cli_vars='{"test_schema_name": "foo"}' - ) - self.parser = mock.MagicMock() - - # Create the Manifest.state_check patcher - @patch("dbt.parser.manifest.ManifestLoader.build_manifest_state_check") - def _mock_state_check(self): - all_projects = self.all_projects - return ManifestStateCheck( - vars_hash=FileHash.from_contents("vars"), - project_hashes={name: FileHash.from_contents(name) for name in all_projects}, - profile_hash=FileHash.from_contents("profile"), - ) - - self.load_state_check = patch( - "dbt.parser.manifest.ManifestLoader.build_manifest_state_check" - ) - self.mock_state_check = self.load_state_check.start() - self.mock_state_check.side_effect = _mock_state_check - - self.loader = manifest.ManifestLoader( - self.root_project_config, {"root": self.root_project_config} - ) - - def _new_manifest(self): - state_check = ManifestStateCheck(MatchingHash(), MatchingHash, []) - manifest = Manifest({}, {}, {}, {}, {}, {}, [], {}) - manifest.state_check = state_check - return manifest - - def _mismatched_file(self, searched, name): - return self._new_file(searched, name, False) - - def _matching_file(self, searched, name): - return self._new_file(searched, name, True) - - def _new_file(self, searched, name, match): - if match: - checksum = MatchingHash() - else: - checksum = MismatchedHash() - path = FilePath( - searched_path=normalize(searched), - relative_path=normalize(name), - project_root=normalize(self.root_project_config.project_root), - ) - return SourceFile(path=path, checksum=checksum) - - -class TestPartialParse(unittest.TestCase): - @patch("dbt.parser.manifest.ManifestLoader.build_manifest_state_check") - @patch("dbt.parser.manifest.os.path.exists") - @patch("dbt.parser.manifest.open") - def test_partial_parse_file_path(self, patched_open, patched_os_exist, patched_state_check): - mock_project = MagicMock(RuntimeConfig) - mock_project.project_target_path = "mock_target_path" - patched_os_exist.return_value = True - set_from_args(Namespace(), {}) - ManifestLoader(mock_project, {}) - # by default we use the project_target_path - patched_open.assert_called_with("mock_target_path/partial_parse.msgpack", "rb") - set_from_args(Namespace(partial_parse_file_path="specified_partial_parse_path"), {}) - ManifestLoader(mock_project, {}) - # if specified in flags, we use the specified path - patched_open.assert_called_with("specified_partial_parse_path", "rb") diff --git a/tests/unit/test_partial_parsing.py b/tests/unit/test_partial_parsing.py deleted file mode 100644 index 12caac35013..00000000000 --- a/tests/unit/test_partial_parsing.py +++ /dev/null @@ -1,196 +0,0 @@ -import unittest -import time - -from dbt.parser.partial import PartialParsing -from dbt.contracts.graph.manifest import Manifest -from dbt.contracts.graph.nodes import ModelNode -from dbt.contracts.files import ParseFileType, SourceFile, SchemaSourceFile, FilePath, FileHash -from dbt.node_types import NodeType -from .utils import normalize - - -class TestPartialParsing(unittest.TestCase): - def setUp(self): - - project_name = "my_test" - project_root = "/users/root" - sql_model_file = SourceFile( - path=FilePath( - project_root=project_root, - searched_path="models", - relative_path="my_model.sql", - modification_time=time.time(), - ), - checksum=FileHash.from_contents("abcdef"), - project_name=project_name, - parse_file_type=ParseFileType.Model, - nodes=["model.my_test.my_model"], - env_vars=[], - ) - sql_model_file_untouched = SourceFile( - path=FilePath( - project_root=project_root, - searched_path="models", - relative_path="my_model_untouched.sql", - modification_time=time.time(), - ), - checksum=FileHash.from_contents("abcdef"), - project_name=project_name, - parse_file_type=ParseFileType.Model, - nodes=["model.my_test.my_model_untouched"], - env_vars=[], - ) - - python_model_file = SourceFile( - path=FilePath( - project_root=project_root, - searched_path="models", - relative_path="python_model.py", - modification_time=time.time(), - ), - checksum=FileHash.from_contents("lalala"), - project_name=project_name, - parse_file_type=ParseFileType.Model, - nodes=["model.my_test.python_model"], - env_vars=[], - ) - python_model_file_untouched = SourceFile( - path=FilePath( - project_root=project_root, - searched_path="models", - relative_path="python_model_untouched.py", - modification_time=time.time(), - ), - checksum=FileHash.from_contents("lalala"), - project_name=project_name, - parse_file_type=ParseFileType.Model, - nodes=["model.my_test.python_model_untouched"], - env_vars=[], - ) - schema_file = SchemaSourceFile( - path=FilePath( - project_root=project_root, - searched_path="models", - relative_path="schema.yml", - modification_time=time.time(), - ), - checksum=FileHash.from_contents("ghijkl"), - project_name=project_name, - parse_file_type=ParseFileType.Schema, - dfy={ - "version": 2, - "models": [ - {"name": "my_model", "description": "Test model"}, - {"name": "python_model", "description": "python"}, - ], - }, - ndp=["model.my_test.my_model"], - env_vars={}, - ) - self.saved_files = { - schema_file.file_id: schema_file, - sql_model_file.file_id: sql_model_file, - python_model_file.file_id: python_model_file, - sql_model_file_untouched.file_id: sql_model_file_untouched, - python_model_file_untouched.file_id: python_model_file_untouched, - } - sql_model_node = self.get_model("my_model") - sql_model_node_untouched = self.get_model("my_model_untouched") - python_model_node = self.get_python_model("python_model") - python_model_node_untouched = self.get_python_model("python_model_untouched") - nodes = { - sql_model_node.unique_id: sql_model_node, - python_model_node.unique_id: python_model_node, - sql_model_node_untouched.unique_id: sql_model_node_untouched, - python_model_node_untouched.unique_id: python_model_node_untouched, - } - self.saved_manifest = Manifest(files=self.saved_files, nodes=nodes) - self.new_files = { - sql_model_file.file_id: SourceFile.from_dict(sql_model_file.to_dict()), - python_model_file.file_id: SourceFile.from_dict(python_model_file.to_dict()), - sql_model_file_untouched.file_id: SourceFile.from_dict( - sql_model_file_untouched.to_dict() - ), - python_model_file_untouched.file_id: SourceFile.from_dict( - python_model_file_untouched.to_dict() - ), - schema_file.file_id: SchemaSourceFile.from_dict(schema_file.to_dict()), - } - - self.partial_parsing = PartialParsing(self.saved_manifest, self.new_files) - - def get_model(self, name): - return ModelNode( - package_name="my_test", - path=f"{name}.sql", - original_file_path=f"models/{name}.sql", - language="sql", - raw_code="select * from wherever", - name=name, - resource_type=NodeType.Model, - unique_id=f"model.my_test.{name}", - fqn=["my_test", "models", name], - database="test_db", - schema="test_schema", - alias="bar", - checksum=FileHash.from_contents(""), - patch_path="my_test://" + normalize("models/schema.yml"), - ) - - def get_python_model(self, name): - return ModelNode( - package_name="my_test", - path=f"{name}.py", - original_file_path=f"models/{name}.py", - raw_code="import something", - language="python", - name=name, - resource_type=NodeType.Model, - unique_id=f"model.my_test.{name}", - fqn=["my_test", "models", name], - database="test_db", - schema="test_schema", - alias="bar", - checksum=FileHash.from_contents(""), - patch_path="my_test://" + normalize("models/schema.yml"), - ) - - def test_simple(self): - - # Nothing has changed - self.assertIsNotNone(self.partial_parsing) - self.assertTrue(self.partial_parsing.skip_parsing()) - - # Change a model file - sql_model_file_id = "my_test://" + normalize("models/my_model.sql") - self.partial_parsing.new_files[sql_model_file_id].checksum = FileHash.from_contents( - "xyzabc" - ) - - python_model_file_id = "my_test://" + normalize("models/python_model.py") - self.partial_parsing.new_files[python_model_file_id].checksum = FileHash.from_contents( - "ohohoh" - ) - - self.partial_parsing.build_file_diff() - self.assertFalse(self.partial_parsing.skip_parsing()) - pp_files = self.partial_parsing.get_parsing_files() - pp_files["my_test"]["ModelParser"] = set(pp_files["my_test"]["ModelParser"]) - # models has 'patch_path' so we expect to see a SchemaParser file listed - schema_file_id = "my_test://" + normalize("models/schema.yml") - expected_pp_files = { - "my_test": { - "ModelParser": set([sql_model_file_id, python_model_file_id]), - "SchemaParser": [schema_file_id], - } - } - self.assertEqual(pp_files, expected_pp_files) - schema_file = self.saved_files[schema_file_id] - schema_file_model_names = set([model["name"] for model in schema_file.pp_dict["models"]]) - expected_model_names = set(["python_model", "my_model"]) - self.assertEqual(schema_file_model_names, expected_model_names) - schema_file_model_descriptions = set( - [model["description"] for model in schema_file.pp_dict["models"]] - ) - expected_model_descriptions = set(["Test model", "python"]) - self.assertEqual(schema_file_model_descriptions, expected_model_descriptions) diff --git a/tests/unit/test_postgres_adapter.py b/tests/unit/test_postgres_adapter.py deleted file mode 100644 index d7d91a8988b..00000000000 --- a/tests/unit/test_postgres_adapter.py +++ /dev/null @@ -1,627 +0,0 @@ -import agate -import decimal -import unittest -from unittest import mock - -from dbt.task.debug import DebugTask - -from dbt.adapters.base.query_headers import MacroQueryStringSetter -from dbt.adapters.postgres import PostgresAdapter -from dbt.adapters.postgres import Plugin as PostgresPlugin -from dbt.contracts.files import FileHash -from dbt.contracts.graph.manifest import ManifestStateCheck -from dbt.clients import agate_helper -from dbt.exceptions import DbtValidationError, DbtConfigError -from psycopg2 import extensions as psycopg2_extensions -from psycopg2 import DatabaseError - -from .utils import ( - config_from_parts_or_dicts, - inject_adapter, - mock_connection, - TestAdapterConversions, - load_internal_manifest_macros, - clear_plugin, -) - - -class TestPostgresAdapter(unittest.TestCase): - def setUp(self): - project_cfg = { - "name": "X", - "version": "0.1", - "profile": "test", - "project-root": "/tmp/dbt/does-not-exist", - "config-version": 2, - } - profile_cfg = { - "outputs": { - "test": { - "type": "postgres", - "dbname": "postgres", - "user": "root", - "host": "thishostshouldnotexist", - "pass": "password", - "port": 5432, - "schema": "public", - } - }, - "target": "test", - } - - self.config = config_from_parts_or_dicts(project_cfg, profile_cfg) - self._adapter = None - - @property - def adapter(self): - if self._adapter is None: - self._adapter = PostgresAdapter(self.config) - inject_adapter(self._adapter, PostgresPlugin) - return self._adapter - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_acquire_connection_validations(self, psycopg2): - try: - connection = self.adapter.acquire_connection("dummy") - except DbtValidationError as e: - self.fail("got DbtValidationError: {}".format(str(e))) - except BaseException as e: - self.fail("acquiring connection failed with unknown exception: {}".format(str(e))) - self.assertEqual(connection.type, "postgres") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once() - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_acquire_connection(self, psycopg2): - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - self.assertEqual(connection.state, "open") - self.assertNotEqual(connection.handle, None) - psycopg2.connect.assert_called_once() - - def test_cancel_open_connections_empty(self): - self.assertEqual(len(list(self.adapter.cancel_open_connections())), 0) - - def test_cancel_open_connections_master(self): - key = self.adapter.connections.get_thread_identifier() - self.adapter.connections.thread_connections[key] = mock_connection("master") - self.assertEqual(len(list(self.adapter.cancel_open_connections())), 0) - - def test_cancel_open_connections_single(self): - master = mock_connection("master") - model = mock_connection("model") - key = self.adapter.connections.get_thread_identifier() - model.handle.get_backend_pid.return_value = 42 - self.adapter.connections.thread_connections.update( - { - key: master, - 1: model, - } - ) - with mock.patch.object(self.adapter.connections, "add_query") as add_query: - query_result = mock.MagicMock() - add_query.return_value = (None, query_result) - - self.assertEqual(len(list(self.adapter.cancel_open_connections())), 1) - - add_query.assert_called_once_with("select pg_terminate_backend(42)") - - master.handle.get_backend_pid.assert_not_called() - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_default_connect_timeout(self, psycopg2): - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once_with( - dbname="postgres", - user="root", - host="thishostshouldnotexist", - password="password", - port=5432, - connect_timeout=10, - application_name="dbt", - ) - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_changed_connect_timeout(self, psycopg2): - self.config.credentials = self.config.credentials.replace(connect_timeout=30) - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once_with( - dbname="postgres", - user="root", - host="thishostshouldnotexist", - password="password", - port=5432, - connect_timeout=30, - application_name="dbt", - ) - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_default_keepalive(self, psycopg2): - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once_with( - dbname="postgres", - user="root", - host="thishostshouldnotexist", - password="password", - port=5432, - connect_timeout=10, - application_name="dbt", - ) - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_changed_keepalive(self, psycopg2): - self.config.credentials = self.config.credentials.replace(keepalives_idle=256) - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once_with( - dbname="postgres", - user="root", - host="thishostshouldnotexist", - password="password", - port=5432, - connect_timeout=10, - keepalives_idle=256, - application_name="dbt", - ) - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_default_application_name(self, psycopg2): - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once_with( - dbname="postgres", - user="root", - host="thishostshouldnotexist", - password="password", - port=5432, - connect_timeout=10, - application_name="dbt", - ) - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_changed_application_name(self, psycopg2): - self.config.credentials = self.config.credentials.replace(application_name="myapp") - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once_with( - dbname="postgres", - user="root", - host="thishostshouldnotexist", - password="password", - port=5432, - connect_timeout=10, - application_name="myapp", - ) - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_role(self, psycopg2): - self.config.credentials = self.config.credentials.replace(role="somerole") - connection = self.adapter.acquire_connection("dummy") - - cursor = connection.handle.cursor() - - cursor.execute.assert_called_once_with("set role somerole") - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_search_path(self, psycopg2): - self.config.credentials = self.config.credentials.replace(search_path="test") - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once_with( - dbname="postgres", - user="root", - host="thishostshouldnotexist", - password="password", - port=5432, - connect_timeout=10, - application_name="dbt", - options="-c search_path=test", - ) - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_sslmode(self, psycopg2): - self.config.credentials = self.config.credentials.replace(sslmode="require") - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once_with( - dbname="postgres", - user="root", - host="thishostshouldnotexist", - password="password", - port=5432, - connect_timeout=10, - sslmode="require", - application_name="dbt", - ) - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_ssl_parameters(self, psycopg2): - self.config.credentials = self.config.credentials.replace(sslmode="verify-ca") - self.config.credentials = self.config.credentials.replace(sslcert="service.crt") - self.config.credentials = self.config.credentials.replace(sslkey="service.key") - self.config.credentials = self.config.credentials.replace(sslrootcert="ca.crt") - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once_with( - dbname="postgres", - user="root", - host="thishostshouldnotexist", - password="password", - port=5432, - connect_timeout=10, - sslmode="verify-ca", - sslcert="service.crt", - sslkey="service.key", - sslrootcert="ca.crt", - application_name="dbt", - ) - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_schema_with_space(self, psycopg2): - self.config.credentials = self.config.credentials.replace(search_path="test test") - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once_with( - dbname="postgres", - user="root", - host="thishostshouldnotexist", - password="password", - port=5432, - connect_timeout=10, - application_name="dbt", - options="-c search_path=test\ test", # noqa: [W605] - ) - - @mock.patch("dbt.adapters.postgres.connections.psycopg2") - def test_set_zero_keepalive(self, psycopg2): - self.config.credentials = self.config.credentials.replace(keepalives_idle=0) - connection = self.adapter.acquire_connection("dummy") - - psycopg2.connect.assert_not_called() - connection.handle - psycopg2.connect.assert_called_once_with( - dbname="postgres", - user="root", - host="thishostshouldnotexist", - password="password", - port=5432, - connect_timeout=10, - application_name="dbt", - ) - - @mock.patch.object(PostgresAdapter, "execute_macro") - @mock.patch.object(PostgresAdapter, "_get_catalog_schemas") - def test_get_catalog_various_schemas(self, mock_get_schemas, mock_execute): - column_names = ["table_database", "table_schema", "table_name"] - rows = [ - ("dbt", "foo", "bar"), - ("dbt", "FOO", "baz"), - ("dbt", None, "bar"), - ("dbt", "quux", "bar"), - ("dbt", "skip", "bar"), - ] - mock_execute.return_value = agate.Table(rows=rows, column_names=column_names) - - mock_get_schemas.return_value.items.return_value = [ - (mock.MagicMock(database="dbt"), {"foo", "FOO", "quux"}) - ] - - mock_manifest = mock.MagicMock() - mock_manifest.get_used_schemas.return_value = {("dbt", "foo"), ("dbt", "quux")} - - catalog, exceptions = self.adapter.get_catalog(mock_manifest) - self.assertEqual( - set(map(tuple, catalog)), - {("dbt", "foo", "bar"), ("dbt", "FOO", "baz"), ("dbt", "quux", "bar")}, - ) - self.assertEqual(exceptions, []) - - -class TestConnectingPostgresAdapter(unittest.TestCase): - def setUp(self): - self.target_dict = { - "type": "postgres", - "dbname": "postgres", - "user": "root", - "host": "thishostshouldnotexist", - "pass": "password", - "port": 5432, - "schema": "public", - } - - profile_cfg = { - "outputs": { - "test": self.target_dict, - }, - "target": "test", - } - project_cfg = { - "name": "X", - "version": "0.1", - "profile": "test", - "project-root": "/tmp/dbt/does-not-exist", - "quoting": { - "identifier": False, - "schema": True, - }, - "config-version": 2, - } - - self.config = config_from_parts_or_dicts(project_cfg, profile_cfg) - - self.handle = mock.MagicMock(spec=psycopg2_extensions.connection) - self.cursor = self.handle.cursor.return_value - self.mock_execute = self.cursor.execute - self.patcher = mock.patch("dbt.adapters.postgres.connections.psycopg2") - self.psycopg2 = self.patcher.start() - - # Create the Manifest.state_check patcher - @mock.patch("dbt.parser.manifest.ManifestLoader.build_manifest_state_check") - def _mock_state_check(self): - all_projects = self.all_projects - return ManifestStateCheck( - vars_hash=FileHash.from_contents("vars"), - project_hashes={name: FileHash.from_contents(name) for name in all_projects}, - profile_hash=FileHash.from_contents("profile"), - ) - - self.load_state_check = mock.patch( - "dbt.parser.manifest.ManifestLoader.build_manifest_state_check" - ) - self.mock_state_check = self.load_state_check.start() - self.mock_state_check.side_effect = _mock_state_check - - self.psycopg2.connect.return_value = self.handle - self.adapter = PostgresAdapter(self.config) - self.adapter._macro_manifest_lazy = load_internal_manifest_macros(self.config) - self.adapter.connections.query_header = MacroQueryStringSetter( - self.config, self.adapter._macro_manifest_lazy - ) - - self.qh_patch = mock.patch.object(self.adapter.connections.query_header, "add") - self.mock_query_header_add = self.qh_patch.start() - self.mock_query_header_add.side_effect = lambda q: "/* dbt */\n{}".format(q) - self.adapter.acquire_connection() - inject_adapter(self.adapter, PostgresPlugin) - - def tearDown(self): - # we want a unique self.handle every time. - self.adapter.cleanup_connections() - self.qh_patch.stop() - self.patcher.stop() - self.load_state_check.stop() - clear_plugin(PostgresPlugin) - - def test_quoting_on_drop_schema(self): - relation = self.adapter.Relation.create( - database="postgres", - schema="test_schema", - quote_policy=self.adapter.config.quoting, - ) - self.adapter.drop_schema(relation) - - self.mock_execute.assert_has_calls( - [mock.call('/* dbt */\ndrop schema if exists "test_schema" cascade', None)] - ) - - def test_quoting_on_drop(self): - relation = self.adapter.Relation.create( - database="postgres", - schema="test_schema", - identifier="test_table", - type="table", - quote_policy=self.adapter.config.quoting, - ) - self.adapter.drop_relation(relation) - self.mock_execute.assert_has_calls( - [ - mock.call( - '/* dbt */\ndrop table if exists "postgres"."test_schema".test_table cascade', - None, - ) - ] - ) - - def test_quoting_on_truncate(self): - relation = self.adapter.Relation.create( - database="postgres", - schema="test_schema", - identifier="test_table", - type="table", - quote_policy=self.adapter.config.quoting, - ) - self.adapter.truncate_relation(relation) - self.mock_execute.assert_has_calls( - [mock.call('/* dbt */\ntruncate table "postgres"."test_schema".test_table', None)] - ) - - def test_quoting_on_rename(self): - from_relation = self.adapter.Relation.create( - database="postgres", - schema="test_schema", - identifier="table_a", - type="table", - quote_policy=self.adapter.config.quoting, - ) - to_relation = self.adapter.Relation.create( - database="postgres", - schema="test_schema", - identifier="table_b", - type="table", - quote_policy=self.adapter.config.quoting, - ) - - self.adapter.rename_relation(from_relation=from_relation, to_relation=to_relation) - self.mock_execute.assert_has_calls( - [ - mock.call( - '/* dbt */\nalter table "postgres"."test_schema".table_a rename to table_b', - None, - ) - ] - ) - - def test_debug_connection_ok(self): - DebugTask.validate_connection(self.target_dict) - self.mock_execute.assert_has_calls([mock.call("/* dbt */\nselect 1 as id", None)]) - - def test_debug_connection_fail_nopass(self): - del self.target_dict["pass"] - with self.assertRaises(DbtConfigError): - DebugTask.validate_connection(self.target_dict) - - def test_connection_fail_select(self): - self.mock_execute.side_effect = DatabaseError() - with self.assertRaises(DbtConfigError): - DebugTask.validate_connection(self.target_dict) - self.mock_execute.assert_has_calls([mock.call("/* dbt */\nselect 1 as id", None)]) - - def test_dbname_verification_is_case_insensitive(self): - # Override adapter settings from setUp() - self.target_dict["dbname"] = "Postgres" - profile_cfg = { - "outputs": { - "test": self.target_dict, - }, - "target": "test", - } - project_cfg = { - "name": "X", - "version": "0.1", - "profile": "test", - "project-root": "/tmp/dbt/does-not-exist", - "quoting": { - "identifier": False, - "schema": True, - }, - "config-version": 2, - } - self.config = config_from_parts_or_dicts(project_cfg, profile_cfg) - self.adapter.cleanup_connections() - self._adapter = PostgresAdapter(self.config) - self.adapter.verify_database("postgres") - - -class TestPostgresFilterCatalog(unittest.TestCase): - def test__catalog_filter_table(self): - manifest = mock.MagicMock() - manifest.get_used_schemas.return_value = [["a", "B"], ["a", "1234"]] - column_names = ["table_name", "table_database", "table_schema", "something"] - rows = [ - ["foo", "a", "b", "1234"], # include - ["foo", "a", "1234", "1234"], # include, w/ table schema as str - ["foo", "c", "B", "1234"], # skip - ["1234", "A", "B", "1234"], # include, w/ table name as str - ] - table = agate.Table(rows, column_names, agate_helper.DEFAULT_TYPE_TESTER) - - result = PostgresAdapter._catalog_filter_table(table, manifest) - assert len(result) == 3 - for row in result.rows: - assert isinstance(row["table_schema"], str) - assert isinstance(row["table_database"], str) - assert isinstance(row["table_name"], str) - assert isinstance(row["something"], decimal.Decimal) - - -class TestPostgresAdapterConversions(TestAdapterConversions): - def test_convert_text_type(self): - rows = [ - ["", "a1", "stringval1"], - ["", "a2", "stringvalasdfasdfasdfa"], - ["", "a3", "stringval3"], - ] - agate_table = self._make_table_of(rows, agate.Text) - expected = ["text", "text", "text"] - for col_idx, expect in enumerate(expected): - assert PostgresAdapter.convert_text_type(agate_table, col_idx) == expect - - def test_convert_number_type(self): - rows = [ - ["", "23.98", "-1"], - ["", "12.78", "-2"], - ["", "79.41", "-3"], - ] - agate_table = self._make_table_of(rows, agate.Number) - expected = ["integer", "float8", "integer"] - for col_idx, expect in enumerate(expected): - assert PostgresAdapter.convert_number_type(agate_table, col_idx) == expect - - def test_convert_boolean_type(self): - rows = [ - ["", "false", "true"], - ["", "false", "false"], - ["", "false", "true"], - ] - agate_table = self._make_table_of(rows, agate.Boolean) - expected = ["boolean", "boolean", "boolean"] - for col_idx, expect in enumerate(expected): - assert PostgresAdapter.convert_boolean_type(agate_table, col_idx) == expect - - def test_convert_datetime_type(self): - rows = [ - ["", "20190101T01:01:01Z", "2019-01-01 01:01:01"], - ["", "20190102T01:01:01Z", "2019-01-01 01:01:01"], - ["", "20190103T01:01:01Z", "2019-01-01 01:01:01"], - ] - agate_table = self._make_table_of( - rows, [agate.DateTime, agate_helper.ISODateTime, agate.DateTime] - ) - expected = [ - "timestamp without time zone", - "timestamp without time zone", - "timestamp without time zone", - ] - for col_idx, expect in enumerate(expected): - assert PostgresAdapter.convert_datetime_type(agate_table, col_idx) == expect - - def test_convert_date_type(self): - rows = [ - ["", "2019-01-01", "2019-01-04"], - ["", "2019-01-02", "2019-01-04"], - ["", "2019-01-03", "2019-01-04"], - ] - agate_table = self._make_table_of(rows, agate.Date) - expected = ["date", "date", "date"] - for col_idx, expect in enumerate(expected): - assert PostgresAdapter.convert_date_type(agate_table, col_idx) == expect - - def test_convert_time_type(self): - # dbt's default type testers actually don't have a TimeDelta at all. - agate.TimeDelta - rows = [ - ["", "120s", "10s"], - ["", "3m", "11s"], - ["", "1h", "12s"], - ] - agate_table = self._make_table_of(rows, agate.TimeDelta) - expected = ["time", "time", "time"] - for col_idx, expect in enumerate(expected): - assert PostgresAdapter.convert_time_type(agate_table, col_idx) == expect diff --git a/tests/unit/test_query_headers.py b/tests/unit/test_query_headers.py deleted file mode 100644 index 9e814720a76..00000000000 --- a/tests/unit/test_query_headers.py +++ /dev/null @@ -1,50 +0,0 @@ -import re -from unittest import TestCase, mock - -from dbt.adapters.base.query_headers import MacroQueryStringSetter - -from tests.unit.utils import config_from_parts_or_dicts - - -class TestQueryHeaders(TestCase): - def setUp(self): - self.profile_cfg = { - "outputs": { - "test": { - "type": "postgres", - "dbname": "postgres", - "user": "test", - "host": "test", - "pass": "test", - "port": 5432, - "schema": "test", - }, - }, - "target": "test", - } - self.project_cfg = { - "name": "query_headers", - "version": "0.1", - "profile": "test", - "config-version": 2, - } - self.query = "SELECT 1;" - - def test_comment_should_prepend_query_by_default(self): - config = config_from_parts_or_dicts(self.project_cfg, self.profile_cfg) - query_header = MacroQueryStringSetter(config, mock.MagicMock(macros={})) - sql = query_header.add(self.query) - self.assertTrue(re.match(f"^\/\*.*\*\/\n{self.query}$", sql)) # noqa: [W605] - - def test_append_comment(self): - self.project_cfg.update({"query-comment": {"comment": "executed by dbt", "append": True}}) - config = config_from_parts_or_dicts(self.project_cfg, self.profile_cfg) - query_header = MacroQueryStringSetter(config, mock.MagicMock(macros={})) - sql = query_header.add(self.query) - self.assertEqual(sql, f"{self.query[:-1]}\n/* executed by dbt */;") - - def test_disable_query_comment(self): - self.project_cfg.update({"query-comment": ""}) - config = config_from_parts_or_dicts(self.project_cfg, self.profile_cfg) - query_header = MacroQueryStringSetter(config, mock.MagicMock(macros={})) - self.assertEqual(query_header.add(self.query), self.query) diff --git a/tests/unit/test_semantic_layer_nodes_satisfy_protocols.py b/tests/unit/test_semantic_layer_nodes_satisfy_protocols.py index 083924fb428..3838b1a4a33 100644 --- a/tests/unit/test_semantic_layer_nodes_satisfy_protocols.py +++ b/tests/unit/test_semantic_layer_nodes_satisfy_protocols.py @@ -1,34 +1,41 @@ -from dbt.contracts.graph.nodes import ( - Metric, - MetricInput, - MetricInputMeasure, - MetricTypeParams, - NodeRelation, - SemanticModel, - WhereFilter, -) -from dbt.contracts.graph.semantic_models import ( +import copy +from typing import Protocol, runtime_checkable + +import pytest +from hypothesis import given +from hypothesis.strategies import builds, none, text + +from dbt.artifacts.resources import ( + ConstantPropertyInput, + ConversionTypeParams, + CumulativeTypeParams, + Defaults, Dimension, DimensionTypeParams, + DimensionValidityParams, Entity, + FileSlice, Measure, + MeasureAggregationParameters, + MetricInput, + MetricInputMeasure, + MetricTimeWindow, + MetricTypeParams, + NodeRelation, NonAdditiveDimension, + SourceFileMetadata, + WhereFilter, ) +from dbt.contracts.graph.nodes import Metric, SavedQuery, SemanticModel from dbt.node_types import NodeType -from dbt_semantic_interfaces.protocols import ( - Dimension as DSIDimension, - Entity as DSIEntitiy, - Measure as DSIMeasure, - Metric as DSIMetric, - MetricInput as DSIMetricInput, - MetricInputMeasure as DSIMetricInputMeasure, - MetricTypeParams as DSIMetricTypeParams, - SemanticModel as DSISemanticModel, - WhereFilter as DSIWhereFilter, -) -from dbt_semantic_interfaces.protocols.measure import ( - NonAdditiveDimensionParameters as DSINonAdditiveDimensionParameters, -) +from dbt_semantic_interfaces.protocols import WhereFilter as WhereFilterProtocol +from dbt_semantic_interfaces.protocols import dimension as DimensionProtocols +from dbt_semantic_interfaces.protocols import entity as EntityProtocols +from dbt_semantic_interfaces.protocols import measure as MeasureProtocols +from dbt_semantic_interfaces.protocols import metadata as MetadataProtocols +from dbt_semantic_interfaces.protocols import metric as MetricProtocols +from dbt_semantic_interfaces.protocols import saved_query as SavedQueryProtocols +from dbt_semantic_interfaces.protocols import semantic_model as SemanticModelProtocols from dbt_semantic_interfaces.type_enums import ( AggregationType, DimensionType, @@ -36,63 +43,277 @@ MetricType, TimeGranularity, ) -from typing import Protocol, runtime_checkable @runtime_checkable -class RuntimeCheckableSemanticModel(DSISemanticModel, Protocol): +class RuntimeCheckableSemanticModel(SemanticModelProtocols.SemanticModel, Protocol): pass @runtime_checkable -class RuntimeCheckableDimension(DSIDimension, Protocol): +class RuntimeCheckableDimension(DimensionProtocols.Dimension, Protocol): pass @runtime_checkable -class RuntimeCheckableEntity(DSIEntitiy, Protocol): +class RuntimeCheckableEntity(EntityProtocols.Entity, Protocol): pass @runtime_checkable -class RuntimeCheckableMeasure(DSIMeasure, Protocol): +class RuntimeCheckableMeasure(MeasureProtocols.Measure, Protocol): pass @runtime_checkable -class RuntimeCheckableMetric(DSIMetric, Protocol): +class RuntimeCheckableMetric(MetricProtocols.Metric, Protocol): pass @runtime_checkable -class RuntimeCheckableMetricInput(DSIMetricInput, Protocol): +class RuntimeCheckableMetricInput(MetricProtocols.MetricInput, Protocol): pass @runtime_checkable -class RuntimeCheckableMetricInputMeasure(DSIMetricInputMeasure, Protocol): +class RuntimeCheckableMetricInputMeasure(MetricProtocols.MetricInputMeasure, Protocol): pass @runtime_checkable -class RuntimeCheckableMetricTypeParams(DSIMetricTypeParams, Protocol): +class RuntimeCheckableMetricTypeParams(MetricProtocols.MetricTypeParams, Protocol): pass @runtime_checkable -class RuntimeCheckableWhereFilter(DSIWhereFilter, Protocol): +class RuntimeCheckableWhereFilter(WhereFilterProtocol, Protocol): pass @runtime_checkable -class RuntimeCheckableNonAdditiveDimension(DSINonAdditiveDimensionParameters, Protocol): +class RuntimeCheckableNonAdditiveDimension( + MeasureProtocols.NonAdditiveDimensionParameters, Protocol +): pass -def test_semantic_model_node_satisfies_protocol(): +@runtime_checkable +class RuntimeCheckableFileSlice(MetadataProtocols.FileSlice, Protocol): + pass + + +@runtime_checkable +class RuntimeCheckableSourceFileMetadata(MetadataProtocols.Metadata, Protocol): + pass + + +@runtime_checkable +class RuntimeCheckableSemanticModelDefaults( + SemanticModelProtocols.SemanticModelDefaults, Protocol +): + pass + + +@runtime_checkable +class RuntimeCheckableDimensionValidityParams( + DimensionProtocols.DimensionValidityParams, Protocol +): + pass + + +@runtime_checkable +class RuntimeCheckableDimensionTypeParams(DimensionProtocols.DimensionTypeParams, Protocol): + pass + + +@runtime_checkable +class RuntimeCheckableMeasureAggregationParams( + MeasureProtocols.MeasureAggregationParameters, Protocol +): + pass + + +@runtime_checkable +class RuntimeCheckableMetricTimeWindow(MetricProtocols.MetricTimeWindow, Protocol): + pass + + +@runtime_checkable +class RuntimeCheckableSavedQuery(SavedQueryProtocols.SavedQuery, Protocol): + pass + + +@pytest.fixture(scope="session") +def file_slice() -> FileSlice: + return FileSlice( + filename="test_filename", content="test content", start_line_number=0, end_line_number=1 + ) + + +@pytest.fixture(scope="session") +def source_file_metadata(file_slice) -> SourceFileMetadata: + return SourceFileMetadata( + repo_file_path="test/file/path.yml", + file_slice=file_slice, + ) + + +@pytest.fixture(scope="session") +def semantic_model_defaults() -> Defaults: + return Defaults(agg_time_dimension="test_time_dimension") + + +@pytest.fixture(scope="session") +def dimension_validity_params() -> DimensionValidityParams: + return DimensionValidityParams() + + +@pytest.fixture(scope="session") +def dimension_type_params() -> DimensionTypeParams: + return DimensionTypeParams(time_granularity=TimeGranularity.DAY) + + +@pytest.fixture(scope="session") +def measure_agg_params() -> MeasureAggregationParameters: + return MeasureAggregationParameters() + + +@pytest.fixture(scope="session") +def non_additive_dimension() -> NonAdditiveDimension: + return NonAdditiveDimension( + name="dimension_name", + window_choice=AggregationType.MIN, + window_groupings=["entity_name"], + ) + + +@pytest.fixture(scope="session") +def where_filter() -> WhereFilter: + return WhereFilter( + where_sql_template="{{ Dimension('enity_name__dimension_name') }} AND {{ TimeDimension('entity_name__time_dimension_name', 'month') }} AND {{ Entity('entity_name') }}" + ) + + +@pytest.fixture(scope="session") +def metric_time_window() -> MetricTimeWindow: + return MetricTimeWindow(count=1, granularity=TimeGranularity.DAY) + + +@pytest.fixture(scope="session") +def simple_metric_input() -> MetricInput: + return MetricInput(name="test_simple_metric_input") + + +@pytest.fixture(scope="session") +def complex_metric_input(metric_time_window, where_filter) -> MetricInput: + return MetricInput( + name="test_complex_metric_input", + filter=where_filter, + alias="aliased_metric_input", + offset_window=metric_time_window, + offset_to_grain=TimeGranularity.DAY, + ) + + +@pytest.fixture(scope="session") +def simple_metric_input_measure() -> MetricInputMeasure: + return MetricInputMeasure(name="test_simple_metric_input_measure") + + +@pytest.fixture(scope="session") +def complex_metric_input_measure(where_filter) -> MetricInputMeasure: + return MetricInputMeasure( + name="test_complex_metric_input_measure", + filter=where_filter, + alias="complex_alias", + join_to_timespine=True, + fill_nulls_with=0, + ) + + +@pytest.fixture(scope="session") +def conversion_type_params( + simple_metric_input_measure, metric_time_window +) -> ConversionTypeParams: + return ConversionTypeParams( + base_measure=simple_metric_input_measure, + conversion_measure=simple_metric_input_measure, + entity="entity", + window=metric_time_window, + constant_properties=[ + ConstantPropertyInput(base_property="base", conversion_property="conversion") + ], + ) + + +@pytest.fixture(scope="session") +def cumulative_type_params() -> CumulativeTypeParams: + return CumulativeTypeParams() + + +@pytest.fixture(scope="session") +def complex_metric_type_params( + metric_time_window, + simple_metric_input, + simple_metric_input_measure, + conversion_type_params, + cumulative_type_params, +) -> MetricTypeParams: + return MetricTypeParams( + measure=simple_metric_input_measure, + numerator=simple_metric_input, + denominator=simple_metric_input, + expr="1 = 1", + window=metric_time_window, + grain_to_date=TimeGranularity.DAY, + metrics=[simple_metric_input], + conversion_type_params=conversion_type_params, + cumulative_type_params=cumulative_type_params, + ) + + +def test_file_slice_obj_satisfies_protocol(file_slice): + assert isinstance(file_slice, RuntimeCheckableFileSlice) + + +def test_metadata_obj_satisfies_protocol(source_file_metadata): + assert isinstance(source_file_metadata, RuntimeCheckableSourceFileMetadata) + + +def test_defaults_obj_satisfies_protocol(semantic_model_defaults): + assert isinstance(semantic_model_defaults, RuntimeCheckableSemanticModelDefaults) + assert isinstance(Defaults(), RuntimeCheckableSemanticModelDefaults) + + +def test_dimension_validity_params_satisfies_protocol(dimension_validity_params): + assert isinstance(dimension_validity_params, RuntimeCheckableDimensionValidityParams) + + +def test_dimension_type_params_satisfies_protocol( + dimension_type_params, dimension_validity_params +): + assert isinstance(dimension_type_params, RuntimeCheckableDimensionTypeParams) + + # check with validity params specified + optionals_specified_type_params = copy.deepcopy(dimension_type_params) + optionals_specified_type_params.validity_params = dimension_validity_params + assert isinstance(optionals_specified_type_params, RuntimeCheckableDimensionTypeParams) + + +def test_measure_aggregation_params_satisfies_protocol(measure_agg_params): + assert isinstance(measure_agg_params, RuntimeCheckableMeasureAggregationParams) + + # check with optionals specified + optionals_specified_measure_agg_params = copy.deepcopy(measure_agg_params) + optionals_specified_measure_agg_params.percentile = 0.5 + assert isinstance( + optionals_specified_measure_agg_params, RuntimeCheckableMeasureAggregationParams + ) + + +def test_semantic_model_node_satisfies_protocol_optionals_unspecified(): test_semantic_model = SemanticModel( name="test_semantic_model", - description="a test semantic_model", resource_type=NodeType.SemanticModel, package_name="package_name", path="path.to.semantic_model", @@ -100,33 +321,78 @@ def test_semantic_model_node_satisfies_protocol(): unique_id="not_like_the_other_semantic_models", fqn=["fully", "qualified", "name"], model="ref('a_model')", + # Technically NodeRelation is optional on our SemanticModel implementation + # however, it's functionally always loaded, it's just delayed. + # This will type/state mismatch will likely bite us at some point node_relation=NodeRelation( alias="test_alias", schema_name="test_schema_name", ), - entities=[], - measures=[], - dimensions=[], ) assert isinstance(test_semantic_model, RuntimeCheckableSemanticModel) -def test_dimension_satisfies_protocol(): +def test_semantic_model_node_satisfies_protocol_optionals_specified( + semantic_model_defaults, source_file_metadata +): + test_semantic_model = SemanticModel( + name="test_semantic_model", + resource_type=NodeType.SemanticModel, + package_name="package_name", + path="path.to.semantic_model", + original_file_path="path/to/file", + unique_id="not_like_the_other_semantic_models", + fqn=["fully", "qualified", "name"], + model="ref('a_model')", + node_relation=NodeRelation( + alias="test_alias", + schema_name="test_schema_name", + ), + description="test_description", + label="test label", + defaults=semantic_model_defaults, + metadata=source_file_metadata, + primary_entity="test_primary_entity", + ) + assert isinstance(test_semantic_model, RuntimeCheckableSemanticModel) + + +def test_dimension_satisfies_protocol_optionals_unspecified(): dimension = Dimension( name="test_dimension", - description="a test dimension", type=DimensionType.TIME, - type_params=DimensionTypeParams( - time_granularity=TimeGranularity.DAY, - ), ) assert isinstance(dimension, RuntimeCheckableDimension) -def test_entity_satisfies_protocol(): +def test_dimension_satisfies_protocol_optionals_specified( + dimension_type_params, source_file_metadata +): + dimension = Dimension( + name="test_dimension", + type=DimensionType.TIME, + description="test_description", + label="test_label", + type_params=dimension_type_params, + expr="1", + metadata=source_file_metadata, + ) + assert isinstance(dimension, RuntimeCheckableDimension) + + +def test_entity_satisfies_protocol_optionals_unspecified(): + entity = Entity( + name="test_entity", + type=EntityType.PRIMARY, + ) + assert isinstance(entity, RuntimeCheckableEntity) + + +def test_entity_satisfies_protocol_optionals_specified(): entity = Entity( name="test_entity", description="a test entity", + label="A test entity", type=EntityType.PRIMARY, expr="id", role="a_role", @@ -134,19 +400,33 @@ def test_entity_satisfies_protocol(): assert isinstance(entity, RuntimeCheckableEntity) -def test_measure_satisfies_protocol(): +def test_measure_satisfies_protocol_optionals_unspecified(): + measure = Measure( + name="test_measure", + agg="sum", + ) + assert isinstance(measure, RuntimeCheckableMeasure) + + +def test_measure_satisfies_protocol_optionals_specified( + measure_agg_params, non_additive_dimension +): measure = Measure( name="test_measure", description="a test measure", + label="A test measure", agg="sum", create_metric=True, expr="amount", + agg_params=measure_agg_params, + non_additive_dimension=non_additive_dimension, agg_time_dimension="a_time_dimension", ) assert isinstance(measure, RuntimeCheckableMeasure) -def test_metric_node_satisfies_protocol(): +@pytest.mark.skip(reason="Overly sensitive to non-breaking changes") +def test_metric_node_satisfies_protocol_optionals_unspecified(): metric = Metric( name="a_metric", resource_type=NodeType.Metric, @@ -167,32 +447,64 @@ def test_metric_node_satisfies_protocol(): assert isinstance(metric, RuntimeCheckableMetric) -def test_where_filter_satisfies_protocol(): - where_filter = WhereFilter( - where_sql_template="{{ Dimension('enity_name__dimension_name') }} AND {{ TimeDimension('entity_name__time_dimension_name', 'month') }} AND {{ Entity('entity_name') }}" +@pytest.mark.skip(reason="Overly sensitive to non-breaking changes") +def test_metric_node_satisfies_protocol_optionals_specified( + complex_metric_type_params, source_file_metadata, where_filter +): + metric = Metric( + name="a_metric", + resource_type=NodeType.Metric, + package_name="package_name", + path="path.to.semantic_model", + original_file_path="path/to/file", + unique_id="not_like_the_other_semantic_models", + fqn=["fully", "qualified", "name"], + description="a test metric", + label="A test metric", + type=MetricType.SIMPLE, + type_params=complex_metric_type_params, + filter=where_filter, + metadata=source_file_metadata, + group="test_group", ) + assert isinstance(metric, RuntimeCheckableMetric) + + +def test_where_filter_satisfies_protocol(where_filter): assert isinstance(where_filter, RuntimeCheckableWhereFilter) -def test_metric_input(): - metric_input = MetricInput(name="a_metric_input") - assert isinstance(metric_input, RuntimeCheckableMetricInput) +def test_metric_time_window(metric_time_window): + assert isinstance(metric_time_window, RuntimeCheckableMetricTimeWindow) -def test_metric_input_measure(): - metric_input_measure = MetricInputMeasure(name="a_metric_input_measure") - assert isinstance(metric_input_measure, RuntimeCheckableMetricInputMeasure) +def test_metric_input(simple_metric_input, complex_metric_input): + assert isinstance(simple_metric_input, RuntimeCheckableMetricInput) + assert isinstance(complex_metric_input, RuntimeCheckableMetricInput) -def test_metric_type_params_satisfies_protocol(): - type_params = MetricTypeParams() - assert isinstance(type_params, RuntimeCheckableMetricTypeParams) +def test_metric_input_measure(simple_metric_input_measure, complex_metric_input_measure): + assert isinstance(simple_metric_input_measure, RuntimeCheckableMetricInputMeasure) + assert isinstance(complex_metric_input_measure, RuntimeCheckableMetricInputMeasure) -def test_non_additive_dimension_satisfies_protocol(): - non_additive_dimension = NonAdditiveDimension( - name="dimension_name", - window_choice=AggregationType.MIN, - window_groupings=["entity_name"], - ) +@pytest.mark.skip(reason="Overly sensitive to non-breaking changes") +def test_metric_type_params_satisfies_protocol(complex_metric_type_params): + assert isinstance(MetricTypeParams(), RuntimeCheckableMetricTypeParams) + assert isinstance(complex_metric_type_params, RuntimeCheckableMetricTypeParams) + + +def test_non_additive_dimension_satisfies_protocol(non_additive_dimension): assert isinstance(non_additive_dimension, RuntimeCheckableNonAdditiveDimension) + + +@given( + builds( + SavedQuery, + description=text() | none(), + label=text() | none(), + metadata=builds(SourceFileMetadata) | none(), + ) +) +def test_saved_query_satisfies_protocol(saved_query: SavedQuery): + assert isinstance(saved_query, SavedQuery) diff --git a/tests/unit/test_semantic_models.py b/tests/unit/test_semantic_models.py deleted file mode 100644 index b1052eb2150..00000000000 --- a/tests/unit/test_semantic_models.py +++ /dev/null @@ -1,81 +0,0 @@ -import pytest - -from typing import List - -from dbt.contracts.graph.nodes import SemanticModel -from dbt.contracts.graph.semantic_models import Dimension, Entity, Measure, Defaults -from dbt.node_types import NodeType -from dbt_semantic_interfaces.references import MeasureReference -from dbt_semantic_interfaces.type_enums import AggregationType, DimensionType, EntityType - - -@pytest.fixture(scope="function") -def dimensions() -> List[Dimension]: - return [Dimension(name="ds", type=DimensionType)] - - -@pytest.fixture(scope="function") -def entities() -> List[Entity]: - return [Entity(name="test_entity", type=EntityType.PRIMARY, expr="id")] - - -@pytest.fixture(scope="function") -def measures() -> List[Measure]: - return [Measure(name="test_measure", agg=AggregationType.COUNT, expr="id")] - - -@pytest.fixture(scope="function") -def default_semantic_model( - dimensions: List[Dimension], entities: List[Entity], measures: List[Measure] -) -> SemanticModel: - return SemanticModel( - name="test_semantic_model", - resource_type=NodeType.SemanticModel, - model="ref('test_model')", - package_name="test", - path="test_path", - original_file_path="test_fixture", - unique_id=f"{NodeType.SemanticModel}.test.test_semantic_model", - fqn=[], - defaults=Defaults(agg_time_dimension="ds"), - dimensions=dimensions, - entities=entities, - measures=measures, - node_relation=None, - ) - - -def test_checked_agg_time_dimension_for_measure_via_defaults( - default_semantic_model: SemanticModel, -): - assert default_semantic_model.defaults.agg_time_dimension is not None - measure = default_semantic_model.measures[0] - measure.agg_time_dimension = None - default_semantic_model.checked_agg_time_dimension_for_measure( - MeasureReference(element_name=measure.name) - ) - - -def test_checked_agg_time_dimension_for_measure_via_measure(default_semantic_model: SemanticModel): - default_semantic_model.defaults = None - measure = default_semantic_model.measures[0] - measure.agg_time_dimension = default_semantic_model.dimensions[0].name - default_semantic_model.checked_agg_time_dimension_for_measure( - MeasureReference(element_name=measure.name) - ) - - -def test_checked_agg_time_dimension_for_measure_exception(default_semantic_model: SemanticModel): - default_semantic_model.defaults = None - measure = default_semantic_model.measures[0] - measure.agg_time_dimension = None - - with pytest.raises(AssertionError) as execinfo: - default_semantic_model.checked_agg_time_dimension_for_measure( - MeasureReference(measure.name) - ) - - assert ( - f"Aggregation time dimension for measure {measure.name} on semantic model {default_semantic_model.name}" - in str(execinfo.value) - ) diff --git a/tests/unit/test_semver.py b/tests/unit/test_semver.py deleted file mode 100644 index bdd8c6ae9c3..00000000000 --- a/tests/unit/test_semver.py +++ /dev/null @@ -1,298 +0,0 @@ -import unittest -import itertools - -from typing import List -from dbt.exceptions import VersionsNotCompatibleError -from dbt.semver import ( - VersionSpecifier, - UnboundedVersionSpecifier, - VersionRange, - reduce_versions, - versions_compatible, - resolve_to_specific_version, - filter_installable, -) - - -def semver_regex_versioning(versions: List[str]) -> bool: - for version_string in versions: - try: - VersionSpecifier.from_version_string(version_string) - except Exception: - return False - return True - - -def create_range(start_version_string, end_version_string): - start = UnboundedVersionSpecifier() - end = UnboundedVersionSpecifier() - - if start_version_string is not None: - start = VersionSpecifier.from_version_string(start_version_string) - - if end_version_string is not None: - end = VersionSpecifier.from_version_string(end_version_string) - - return VersionRange(start=start, end=end) - - -class TestSemver(unittest.TestCase): - def assertVersionSetResult(self, inputs, output_range): - expected = create_range(*output_range) - - for permutation in itertools.permutations(inputs): - self.assertEqual(reduce_versions(*permutation), expected) - - def assertInvalidVersionSet(self, inputs): - for permutation in itertools.permutations(inputs): - with self.assertRaises(VersionsNotCompatibleError): - reduce_versions(*permutation) - - def test__versions_compatible(self): - self.assertTrue(versions_compatible("0.0.1", "0.0.1")) - self.assertFalse(versions_compatible("0.0.1", "0.0.2")) - self.assertTrue(versions_compatible(">0.0.1", "0.0.2")) - self.assertFalse(versions_compatible("0.4.5a1", "0.4.5a2")) - - def test__semver_regex_versions(self): - self.assertTrue( - semver_regex_versioning( - [ - "0.0.4", - "1.2.3", - "10.20.30", - "1.1.2-prerelease+meta", - "1.1.2+meta", - "1.1.2+meta-valid", - "1.0.0-alpha", - "1.0.0-beta", - "1.0.0-alpha.beta", - "1.0.0-alpha.beta.1", - "1.0.0-alpha.1", - "1.0.0-alpha0.valid", - "1.0.0-alpha.0valid", - "1.0.0-alpha-a.b-c-somethinglong+build.1-aef.1-its-okay", - "1.0.0-rc.1+build.1", - "2.0.0-rc.1+build.123", - "1.2.3-beta", - "10.2.3-DEV-SNAPSHOT", - "1.2.3-SNAPSHOT-123", - "1.0.0", - "2.0.0", - "1.1.7", - "2.0.0+build.1848", - "2.0.1-alpha.1227", - "1.0.0-alpha+beta", - "1.2.3----RC-SNAPSHOT.12.9.1--.12+788", - "1.2.3----R-S.12.9.1--.12+meta", - "1.2.3----RC-SNAPSHOT.12.9.1--.12", - "1.0.0+0.build.1-rc.10000aaa-kk-0.1", - "99999999999999999999999.999999999999999999.99999999999999999", - "1.0.0-0A.is.legal", - ] - ) - ) - - self.assertFalse( - semver_regex_versioning( - [ - "1", - "1.2", - "1.2.3-0123", - "1.2.3-0123.0123", - "1.1.2+.123", - "+invalid", - "-invalid", - "-invalid+invalid", - "-invalid.01", - "alpha", - "alpha.beta", - "alpha.beta.1", - "alpha.1", - "alpha+beta", - "alpha_beta", - "alpha.", - "alpha..", - "beta", - "1.0.0-alpha_beta", - "-alpha.", - "1.0.0-alpha..", - "1.0.0-alpha..1", - "1.0.0-alpha...1", - "1.0.0-alpha....1", - "1.0.0-alpha.....1", - "1.0.0-alpha......1", - "1.0.0-alpha.......1", - "01.1.1", - "1.01.1", - "1.1.01", - "1.2", - "1.2.3.DEV", - "1.2-SNAPSHOT", - "1.2.31.2.3----RC-SNAPSHOT.12.09.1--..12+788", - "1.2-RC-SNAPSHOT", - "-1.0.3-gamma+b7718", - "+justmeta", - "9.8.7+meta+meta", - "9.8.7-whatever+meta+meta", - "99999999999999999999999.999999999999999999.99999999999999999----RC-SNAPSHOT.12.09.1--------------------------------..12", - ] - ) - ) - - def test__reduce_versions(self): - self.assertVersionSetResult(["0.0.1", "0.0.1"], ["=0.0.1", "=0.0.1"]) - - self.assertVersionSetResult(["0.0.1"], ["=0.0.1", "=0.0.1"]) - - self.assertVersionSetResult([">0.0.1"], [">0.0.1", None]) - - self.assertVersionSetResult(["<0.0.1"], [None, "<0.0.1"]) - - self.assertVersionSetResult([">0.0.1", "0.0.2"], ["=0.0.2", "=0.0.2"]) - - self.assertVersionSetResult(["0.0.2", ">=0.0.2"], ["=0.0.2", "=0.0.2"]) - - self.assertVersionSetResult([">0.0.1", ">0.0.2", ">0.0.3"], [">0.0.3", None]) - - self.assertVersionSetResult([">0.0.1", "<0.0.3"], [">0.0.1", "<0.0.3"]) - - self.assertVersionSetResult([">0.0.1", "0.0.2", "<0.0.3"], ["=0.0.2", "=0.0.2"]) - - self.assertVersionSetResult([">0.0.1", ">=0.0.1", "<0.0.3"], [">0.0.1", "<0.0.3"]) - - self.assertVersionSetResult([">0.0.1", "<0.0.3", "<=0.0.3"], [">0.0.1", "<0.0.3"]) - - self.assertVersionSetResult([">0.0.1", ">0.0.2", "<0.0.3", "<0.0.4"], [">0.0.2", "<0.0.3"]) - - self.assertVersionSetResult(["<=0.0.3", ">=0.0.3"], [">=0.0.3", "<=0.0.3"]) - - self.assertInvalidVersionSet([">0.0.2", "0.0.1"]) - self.assertInvalidVersionSet([">0.0.2", "0.0.2"]) - self.assertInvalidVersionSet(["<0.0.2", "0.0.2"]) - self.assertInvalidVersionSet(["<0.0.2", ">0.0.3"]) - self.assertInvalidVersionSet(["<=0.0.3", ">0.0.3"]) - self.assertInvalidVersionSet(["<0.0.3", ">=0.0.3"]) - self.assertInvalidVersionSet(["<0.0.3", ">0.0.3"]) - - def test__resolve_to_specific_version(self): - self.assertEqual( - resolve_to_specific_version(create_range(">0.0.1", None), ["0.0.1", "0.0.2"]), "0.0.2" - ) - - self.assertEqual( - resolve_to_specific_version(create_range(">=0.0.2", None), ["0.0.1", "0.0.2"]), "0.0.2" - ) - - self.assertEqual( - resolve_to_specific_version(create_range(">=0.0.3", None), ["0.0.1", "0.0.2"]), None - ) - - self.assertEqual( - resolve_to_specific_version( - create_range(">=0.0.3", "<0.0.5"), ["0.0.3", "0.0.4", "0.0.5"] - ), - "0.0.4", - ) - - self.assertEqual( - resolve_to_specific_version( - create_range(None, "<=0.0.5"), ["0.0.3", "0.1.4", "0.0.5"] - ), - "0.0.5", - ) - - self.assertEqual( - resolve_to_specific_version( - create_range("=0.4.5a2", "=0.4.5a2"), ["0.4.5a1", "0.4.5a2"] - ), - "0.4.5a2", - ) - - self.assertEqual( - resolve_to_specific_version(create_range("=0.7.6", "=0.7.6"), ["0.7.6-b1", "0.7.6"]), - "0.7.6", - ) - - self.assertEqual( - resolve_to_specific_version( - create_range(">=1.0.0", None), ["1.0.0", "1.1.0a1", "1.1.0", "1.2.0a1"] - ), - "1.2.0a1", - ) - - self.assertEqual( - resolve_to_specific_version( - create_range(">=1.0.0", "<1.2.0"), ["1.0.0", "1.1.0a1", "1.1.0", "1.2.0a1"] - ), - "1.1.0", - ) - - self.assertEqual( - resolve_to_specific_version( - create_range(">=1.0.0", None), ["1.0.0", "1.1.0a1", "1.1.0", "1.2.0a1", "1.2.0"] - ), - "1.2.0", - ) - - self.assertEqual( - resolve_to_specific_version( - create_range(">=1.0.0", "<1.2.0"), - ["1.0.0", "1.1.0a1", "1.1.0", "1.2.0a1", "1.2.0"], - ), - "1.1.0", - ) - - self.assertEqual( - resolve_to_specific_version( - # https://github.com/dbt-labs/dbt-core/issues/7039 - # 10 is greater than 9 - create_range(">0.9.0", "<0.10.0"), - ["0.9.0", "0.9.1", "0.10.0"], - ), - "0.9.1", - ) - - def test__filter_installable(self): - installable = filter_installable( - [ - "1.1.0", - "1.2.0a1", - "1.0.0", - "2.1.0-alpha", - "2.2.0asdf", - "2.1.0", - "2.2.0", - "2.2.0-fishtown-beta", - "2.2.0-2", - ], - install_prerelease=True, - ) - expected = [ - "1.0.0", - "1.1.0", - "1.2.0a1", - "2.1.0-alpha", - "2.1.0", - "2.2.0-2", - "2.2.0asdf", - "2.2.0-fishtown-beta", - "2.2.0", - ] - assert installable == expected - - installable = filter_installable( - [ - "1.1.0", - "1.2.0a1", - "1.0.0", - "2.1.0-alpha", - "2.2.0asdf", - "2.1.0", - "2.2.0", - "2.2.0-fishtown-beta", - ], - install_prerelease=False, - ) - expected = ["1.0.0", "1.1.0", "2.1.0", "2.2.0"] - assert installable == expected diff --git a/tests/unit/test_sql_result.py b/tests/unit/test_sql_result.py deleted file mode 100644 index f7273acac2e..00000000000 --- a/tests/unit/test_sql_result.py +++ /dev/null @@ -1,19 +0,0 @@ -import unittest -from dbt.adapters.sql.connections import SQLConnectionManager - - -class TestProcessSQLResult(unittest.TestCase): - def test_duplicated_columns(self): - cols_with_one_dupe = ["a", "b", "a", "d"] - rows = [(1, 2, 3, 4)] - self.assertEqual( - SQLConnectionManager.process_results(cols_with_one_dupe, rows), - [{"a": 1, "b": 2, "a_2": 3, "d": 4}], - ) - - cols_with_more_dupes = ["a", "a", "a", "b"] - rows = [(1, 2, 3, 4)] - self.assertEqual( - SQLConnectionManager.process_results(cols_with_more_dupes, rows), - [{"a": 1, "a_2": 2, "a_3": 3, "b": 4}], - ) diff --git a/tests/unit/test_system_client.py b/tests/unit/test_system_client.py deleted file mode 100644 index 195fdeb4134..00000000000 --- a/tests/unit/test_system_client.py +++ /dev/null @@ -1,271 +0,0 @@ -import os -import shutil -import stat -import unittest -import tarfile -import pathspec -from pathlib import Path -from tempfile import mkdtemp, NamedTemporaryFile - -from dbt.exceptions import ExecutableError, WorkingDirectoryError -import dbt.clients.system - - -class SystemClient(unittest.TestCase): - def setUp(self): - super().setUp() - self.tmp_dir = mkdtemp() - self.profiles_path = "{}/profiles.yml".format(self.tmp_dir) - - def set_up_profile(self): - with open(self.profiles_path, "w") as f: - f.write("ORIGINAL_TEXT") - - def get_profile_text(self): - with open(self.profiles_path, "r") as f: - return f.read() - - def tearDown(self): - try: - shutil.rmtree(self.tmp_dir) - except Exception as e: # noqa: [F841] - pass - - def test__make_file_when_exists(self): - self.set_up_profile() - written = dbt.clients.system.make_file(self.profiles_path, contents="NEW_TEXT") - - self.assertFalse(written) - self.assertEqual(self.get_profile_text(), "ORIGINAL_TEXT") - - def test__make_file_when_not_exists(self): - written = dbt.clients.system.make_file(self.profiles_path, contents="NEW_TEXT") - - self.assertTrue(written) - self.assertEqual(self.get_profile_text(), "NEW_TEXT") - - def test__make_file_with_overwrite(self): - self.set_up_profile() - written = dbt.clients.system.make_file( - self.profiles_path, contents="NEW_TEXT", overwrite=True - ) - - self.assertTrue(written) - self.assertEqual(self.get_profile_text(), "NEW_TEXT") - - def test__make_dir_from_str(self): - test_dir_str = self.tmp_dir + "/test_make_from_str/sub_dir" - dbt.clients.system.make_directory(test_dir_str) - self.assertTrue(Path(test_dir_str).is_dir()) - - def test__make_dir_from_pathobj(self): - test_dir_pathobj = Path(self.tmp_dir + "/test_make_from_pathobj/sub_dir") - dbt.clients.system.make_directory(test_dir_pathobj) - self.assertTrue(test_dir_pathobj.is_dir()) - - -class TestRunCmd(unittest.TestCase): - """Test `run_cmd`. - - Don't mock out subprocess, in order to expose any OS-level differences. - """ - - not_a_file = "zzzbbfasdfasdfsdaq" - - def setUp(self): - self.tempdir = mkdtemp() - self.run_dir = os.path.join(self.tempdir, "run_dir") - self.does_not_exist = os.path.join(self.tempdir, "does_not_exist") - self.empty_file = os.path.join(self.tempdir, "empty_file") - if os.name == "nt": - self.exists_cmd = ["cmd", "/C", "echo", "hello"] - else: - self.exists_cmd = ["echo", "hello"] - - os.mkdir(self.run_dir) - with open(self.empty_file, "w") as fp: # noqa: [F841] - pass # "touch" - - def tearDown(self): - shutil.rmtree(self.tempdir) - - def test__executable_does_not_exist(self): - with self.assertRaises(ExecutableError) as exc: - dbt.clients.system.run_cmd(self.run_dir, [self.does_not_exist]) - - msg = str(exc.exception).lower() - - self.assertIn("path", msg) - self.assertIn("could not find", msg) - self.assertIn(self.does_not_exist.lower(), msg) - - def test__not_exe(self): - with self.assertRaises(ExecutableError) as exc: - dbt.clients.system.run_cmd(self.run_dir, [self.empty_file]) - - msg = str(exc.exception).lower() - if os.name == "nt": - # on windows, this means it's not an executable at all! - self.assertIn("not executable", msg) - else: - # on linux, this means you don't have executable permissions on it - self.assertIn("permissions", msg) - self.assertIn(self.empty_file.lower(), msg) - - def test__cwd_does_not_exist(self): - with self.assertRaises(WorkingDirectoryError) as exc: - dbt.clients.system.run_cmd(self.does_not_exist, self.exists_cmd) - msg = str(exc.exception).lower() - self.assertIn("does not exist", msg) - self.assertIn(self.does_not_exist.lower(), msg) - - def test__cwd_not_directory(self): - with self.assertRaises(WorkingDirectoryError) as exc: - dbt.clients.system.run_cmd(self.empty_file, self.exists_cmd) - - msg = str(exc.exception).lower() - self.assertIn("not a directory", msg) - self.assertIn(self.empty_file.lower(), msg) - - def test__cwd_no_permissions(self): - # it would be nice to add a windows test. Possible path to that is via - # `psexec` (to get SYSTEM privs), use `icacls` to set permissions on - # the directory for the test user. I'm pretty sure windows users can't - # create files that they themselves cannot access. - if os.name == "nt": - return - - # read-only -> cannot cd to it - os.chmod(self.run_dir, stat.S_IRUSR) - - with self.assertRaises(WorkingDirectoryError) as exc: - dbt.clients.system.run_cmd(self.run_dir, self.exists_cmd) - - msg = str(exc.exception).lower() - self.assertIn("permissions", msg) - self.assertIn(self.run_dir.lower(), msg) - - def test__ok(self): - out, err = dbt.clients.system.run_cmd(self.run_dir, self.exists_cmd) - self.assertEqual(out.strip(), b"hello") - self.assertEqual(err.strip(), b"") - - -class TestFindMatching(unittest.TestCase): - def setUp(self): - self.base_dir = mkdtemp() - self.tempdir = mkdtemp(dir=self.base_dir) - - def test_find_matching_lowercase_file_pattern(self): - with NamedTemporaryFile(prefix="sql-files", suffix=".sql", dir=self.tempdir) as named_file: - file_path = os.path.dirname(named_file.name) - relative_path = os.path.basename(file_path) - out = dbt.clients.system.find_matching( - self.base_dir, - [relative_path], - "*.sql", - ) - expected_output = [ - { - "searched_path": relative_path, - "absolute_path": named_file.name, - "relative_path": os.path.basename(named_file.name), - "modification_time": out[0]["modification_time"], - } - ] - self.assertEqual(out, expected_output) - - def test_find_matching_uppercase_file_pattern(self): - with NamedTemporaryFile(prefix="sql-files", suffix=".SQL", dir=self.tempdir) as named_file: - file_path = os.path.dirname(named_file.name) - relative_path = os.path.basename(file_path) - out = dbt.clients.system.find_matching(self.base_dir, [relative_path], "*.sql") - expected_output = [ - { - "searched_path": relative_path, - "absolute_path": named_file.name, - "relative_path": os.path.basename(named_file.name), - "modification_time": out[0]["modification_time"], - } - ] - self.assertEqual(out, expected_output) - - def test_find_matching_file_pattern_not_found(self): - with NamedTemporaryFile(prefix="sql-files", suffix=".SQLT", dir=self.tempdir): - out = dbt.clients.system.find_matching(self.tempdir, [""], "*.sql") - self.assertEqual(out, []) - - def test_ignore_spec(self): - with NamedTemporaryFile(prefix="sql-files", suffix=".sql", dir=self.tempdir): - out = dbt.clients.system.find_matching( - self.tempdir, - [""], - "*.sql", - pathspec.PathSpec.from_lines( - pathspec.patterns.GitWildMatchPattern, "sql-files*".splitlines() - ), - ) - self.assertEqual(out, []) - - def tearDown(self): - try: - shutil.rmtree(self.base_dir) - except Exception as e: # noqa: [F841] - pass - - -class TestUntarPackage(unittest.TestCase): - def setUp(self): - self.base_dir = mkdtemp() - self.tempdir = mkdtemp(dir=self.base_dir) - self.tempdest = mkdtemp(dir=self.base_dir) - - def tearDown(self): - try: - shutil.rmtree(self.base_dir) - except Exception as e: # noqa: [F841] - pass - - def test_untar_package_success(self): - # set up a valid tarball to test against - with NamedTemporaryFile( - prefix="my-package.2", suffix=".tar.gz", dir=self.tempdir, delete=False - ) as named_tar_file: - tar_file_full_path = named_tar_file.name - with NamedTemporaryFile(prefix="a", suffix=".txt", dir=self.tempdir) as file_a: - file_a.write(b"some text in the text file") - relative_file_a = os.path.basename(file_a.name) - with tarfile.open(fileobj=named_tar_file, mode="w:gz") as tar: - tar.addfile(tarfile.TarInfo(relative_file_a), open(file_a.name)) - - # now we test can test that we can untar the file successfully - assert tarfile.is_tarfile(tar.name) - dbt.clients.system.untar_package(tar_file_full_path, self.tempdest) - path = Path(os.path.join(self.tempdest, relative_file_a)) - assert path.is_file() - - def test_untar_package_failure(self): - # create a text file then rename it as a tar (so it's invalid) - with NamedTemporaryFile( - prefix="a", suffix=".txt", dir=self.tempdir, delete=False - ) as file_a: - file_a.write(b"some text in the text file") - txt_file_name = file_a.name - file_path = os.path.dirname(txt_file_name) - tar_file_path = os.path.join(file_path, "mypackage.2.tar.gz") - os.rename(txt_file_name, tar_file_path) - - # now that we're set up, test that untarring the file fails - with self.assertRaises(tarfile.ReadError) as exc: # noqa: [F841] - dbt.clients.system.untar_package(tar_file_path, self.tempdest) - - def test_untar_package_empty(self): - # create a tarball with nothing in it - with NamedTemporaryFile( - prefix="my-empty-package.2", suffix=".tar.gz", dir=self.tempdir - ) as named_file: - - # make sure we throw an error for the empty file - with self.assertRaises(tarfile.ReadError) as exc: - dbt.clients.system.untar_package(named_file.name, self.tempdest) - self.assertEqual("empty file", str(exc.exception)) diff --git a/tests/unit/test_tracking.py b/tests/unit/test_tracking.py index accfa99bc3f..685f1108c1b 100644 --- a/tests/unit/test_tracking.py +++ b/tests/unit/test_tracking.py @@ -1,22 +1,25 @@ -import dbt.tracking import datetime -import shutil import tempfile -import unittest + +import pytest + +import dbt.tracking -class TestTracking(unittest.TestCase): - def setUp(self): - dbt.tracking.active_user = None - self.tempdir = tempfile.mkdtemp() +@pytest.fixture(scope="function") +def active_user_none() -> None: + dbt.tracking.active_user = None - def tearDown(self): - dbt.tracking.active_user = None - shutil.rmtree(self.tempdir) - def test_tracking_initial(self): +@pytest.fixture(scope="function") +def tempdir(active_user_none) -> str: + return tempfile.mkdtemp() + + +class TestTracking: + def test_tracking_initial(self, tempdir): assert dbt.tracking.active_user is None - dbt.tracking.initialize_from_flags(True, self.tempdir) + dbt.tracking.initialize_from_flags(True, tempdir) assert isinstance(dbt.tracking.active_user, dbt.tracking.User) invocation_id = dbt.tracking.active_user.invocation_id @@ -48,7 +51,7 @@ def test_tracking_initial(self): # if you use `!=`, you might hit a race condition (especially on windows) assert dbt.tracking.active_user.run_started_at is not run_started_at - def test_tracking_never_ok(self): + def test_tracking_never_ok(self, active_user_none): assert dbt.tracking.active_user is None # this should generate a whole new user object -> new invocation_id/run_started_at @@ -60,7 +63,7 @@ def test_tracking_never_ok(self): assert isinstance(dbt.tracking.active_user.invocation_id, str) assert isinstance(dbt.tracking.active_user.run_started_at, datetime.datetime) - def test_disable_never_enabled(self): + def test_disable_never_enabled(self, active_user_none): assert dbt.tracking.active_user is None # this should generate a whole new user object -> new invocation_id/run_started_at @@ -72,10 +75,7 @@ def test_disable_never_enabled(self): assert isinstance(dbt.tracking.active_user.invocation_id, str) assert isinstance(dbt.tracking.active_user.run_started_at, datetime.datetime) - def test_initialize_from_flags(self): - for send_anonymous_usage_stats in [True, False]: - with self.subTest(send_anonymous_usage_stats=send_anonymous_usage_stats): - - dbt.tracking.initialize_from_flags(send_anonymous_usage_stats, self.tempdir) - - assert dbt.tracking.active_user.do_not_track != send_anonymous_usage_stats + @pytest.mark.parametrize("send_anonymous_usage_stats", [True, False]) + def test_initialize_from_flags(self, tempdir, send_anonymous_usage_stats): + dbt.tracking.initialize_from_flags(send_anonymous_usage_stats, tempdir) + assert dbt.tracking.active_user.do_not_track != send_anonymous_usage_stats diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index 02921354d95..c270d9a1618 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -4,145 +4,6 @@ import dbt.utils -class TestDeepMerge(unittest.TestCase): - def test__simple_cases(self): - cases = [ - {"args": [{}, {"a": 1}], "expected": {"a": 1}, "description": "one key into empty"}, - { - "args": [{}, {"b": 1}, {"a": 1}], - "expected": {"a": 1, "b": 1}, - "description": "three merges", - }, - ] - - for case in cases: - actual = dbt.utils.deep_merge(*case["args"]) - self.assertEqual( - case["expected"], - actual, - "failed on {} (actual {}, expected {})".format( - case["description"], actual, case["expected"] - ), - ) - - -class TestMerge(unittest.TestCase): - def test__simple_cases(self): - cases = [ - {"args": [{}, {"a": 1}], "expected": {"a": 1}, "description": "one key into empty"}, - { - "args": [{}, {"b": 1}, {"a": 1}], - "expected": {"a": 1, "b": 1}, - "description": "three merges", - }, - ] - - for case in cases: - actual = dbt.utils.deep_merge(*case["args"]) - self.assertEqual( - case["expected"], - actual, - "failed on {} (actual {}, expected {})".format( - case["description"], actual, case["expected"] - ), - ) - - -class TestDeepMap(unittest.TestCase): - def setUp(self): - self.input_value = { - "foo": { - "bar": "hello", - "baz": [1, 90.5, "990", "89.9"], - }, - "nested": [ - { - "test": "90", - "other_test": None, - }, - { - "test": 400, - "other_test": 4.7e9, - }, - ], - } - - @staticmethod - def intify_all(value, _): - try: - return int(value) - except (TypeError, ValueError): - return -1 - - def test__simple_cases(self): - expected = { - "foo": { - "bar": -1, - "baz": [1, 90, 990, -1], - }, - "nested": [ - { - "test": 90, - "other_test": -1, - }, - { - "test": 400, - "other_test": 4700000000, - }, - ], - } - actual = dbt.utils.deep_map_render(self.intify_all, self.input_value) - self.assertEqual(actual, expected) - - actual = dbt.utils.deep_map_render(self.intify_all, expected) - self.assertEqual(actual, expected) - - @staticmethod - def special_keypath(value, keypath): - - if tuple(keypath) == ("foo", "baz", 1): - return "hello" - else: - return value - - def test__keypath(self): - expected = { - "foo": { - "bar": "hello", - # the only change from input is the second entry here - "baz": [1, "hello", "990", "89.9"], - }, - "nested": [ - { - "test": "90", - "other_test": None, - }, - { - "test": 400, - "other_test": 4.7e9, - }, - ], - } - actual = dbt.utils.deep_map_render(self.special_keypath, self.input_value) - self.assertEqual(actual, expected) - - actual = dbt.utils.deep_map_render(self.special_keypath, expected) - self.assertEqual(actual, expected) - - def test__noop(self): - actual = dbt.utils.deep_map_render(lambda x, _: x, self.input_value) - self.assertEqual(actual, self.input_value) - - def test_trivial(self): - cases = [[], {}, 1, "abc", None, True] - for case in cases: - result = dbt.utils.deep_map_render(lambda x, _: x, case) - self.assertEqual(result, case) - - with self.assertRaises(dbt.exceptions.DbtConfigError): - dbt.utils.deep_map_render(lambda x, _: x, {"foo": object()}) - - class TestMultiDict(unittest.TestCase): def test_one_member(self): dct = {"a": 1, "b": 2, "c": 3} diff --git a/tests/unit/test_version.py b/tests/unit/test_version.py index 217988ba5e2..efd51872576 100644 --- a/tests/unit/test_version.py +++ b/tests/unit/test_version.py @@ -1,5 +1,5 @@ import dbt.version -from dbt.ui import green, red, yellow +from dbt_common.ui import green, red, yellow class TestGetVersionInformation: diff --git a/tests/unit/utils.py b/tests/unit/utils/__init__.py similarity index 93% rename from tests/unit/utils.py rename to tests/unit/utils/__init__.py index 0f5c12ebbfd..ec9cb57595d 100644 --- a/tests/unit/utils.py +++ b/tests/unit/utils/__init__.py @@ -3,15 +3,17 @@ Note that all imports should be inside the functions to avoid import/mocking issues. """ -import string + import os -from unittest import mock -from unittest import TestCase +import string +from unittest import TestCase, mock import agate import pytest -from dbt.dataclass_schema import ValidationError + from dbt.config.project import PartialProject +from dbt.contracts.graph.manifest import Manifest +from dbt_common.dataclass_schema import ValidationError def normalize(path): @@ -76,9 +78,10 @@ def project_from_dict(project, profile, packages=None, selectors=None, cli_vars= def config_from_parts_or_dicts(project, profile, packages=None, selectors=None, cli_vars={}): - from dbt.config import Project, Profile, RuntimeConfig from copy import deepcopy + from dbt.config import Profile, Project, RuntimeConfig + if isinstance(project, Project): profile_name = project.profile_name else: @@ -253,7 +256,7 @@ def generate_name_macros(package): class TestAdapterConversions(TestCase): def _get_tester_for(self, column_type): - from dbt.clients import agate_helper + from dbt_common.clients import agate_helper if column_type is agate.TimeDelta: # dbt never makes this! return agate.TimeDelta() @@ -305,8 +308,8 @@ def MockGenerateMacro(package, component="some_component", **kwargs): def MockSource(package, source_name, name, **kwargs): - from dbt.node_types import NodeType from dbt.contracts.graph.nodes import SourceDefinition + from dbt.node_types import NodeType src = mock.MagicMock( __class__=SourceDefinition, @@ -322,8 +325,8 @@ def MockSource(package, source_name, name, **kwargs): def MockNode(package, name, resource_type=None, **kwargs): - from dbt.node_types import NodeType from dbt.contracts.graph.nodes import ModelNode, SeedNode + from dbt.node_types import NodeType if resource_type is None: resource_type = NodeType.Model @@ -336,7 +339,7 @@ def MockNode(package, name, resource_type=None, **kwargs): version = kwargs.get("version") search_name = name if version is None else f"{name}.v{version}" - unique_id = f"{str(resource_type)}.{package}.{name}" + unique_id = f"{str(resource_type)}.{package}.{search_name}" node = mock.MagicMock( __class__=cls, resource_type=resource_type, @@ -351,8 +354,8 @@ def MockNode(package, name, resource_type=None, **kwargs): def MockDocumentation(package, name, **kwargs): - from dbt.node_types import NodeType from dbt.contracts.graph.nodes import Documentation + from dbt.node_types import NodeType doc = mock.MagicMock( __class__=Documentation, @@ -379,7 +382,24 @@ def dict_replace(dct, **kwargs): def replace_config(n, **kwargs): - return n.replace( + from dataclasses import replace + + return replace( + n, config=n.config.replace(**kwargs), unrendered_config=dict_replace(n.unrendered_config, **kwargs), ) + + +def make_manifest(nodes=[], sources=[], macros=[], docs=[]) -> Manifest: + return Manifest( + nodes={n.unique_id: n for n in nodes}, + macros={m.unique_id: m for m in macros}, + sources={s.unique_id: s for s in sources}, + docs={d.unique_id: d for d in docs}, + disabled={}, + files={}, + exposures={}, + metrics={}, + selectors={}, + ) diff --git a/tests/unit/utils/adapter.py b/tests/unit/utils/adapter.py new file mode 100644 index 00000000000..66710e645a9 --- /dev/null +++ b/tests/unit/utils/adapter.py @@ -0,0 +1,59 @@ +import sys +from unittest.mock import MagicMock + +import pytest +from pytest_mock import MockerFixture + +from dbt.adapters.factory import get_adapter, register_adapter, reset_adapters +from dbt.adapters.postgres import PostgresAdapter +from dbt.adapters.sql import SQLConnectionManager +from dbt.config.runtime import RuntimeConfig +from dbt.context.providers import generate_runtime_macro_context +from dbt.contracts.graph.manifest import ManifestStateCheck +from dbt.mp_context import get_mp_context +from dbt.parser.manifest import ManifestLoader + +if sys.version_info < (3, 9): + from typing import Generator +else: + from collections.abc import Generator + + +@pytest.fixture +def mock_connection_manager() -> MagicMock: + mock_connection_manager = MagicMock(SQLConnectionManager) + mock_connection_manager.set_query_header = lambda query_header_context: None + return mock_connection_manager + + +@pytest.fixture +def mock_adapter(mock_connection_manager: MagicMock) -> MagicMock: + mock_adapter = MagicMock(PostgresAdapter) + mock_adapter.connections = mock_connection_manager + mock_adapter.clear_macro_resolver = MagicMock() + return mock_adapter + + +@pytest.fixture +def postgres_adapter( + mocker: MockerFixture, runtime_config: RuntimeConfig +) -> Generator[PostgresAdapter, None, None]: + register_adapter(runtime_config, get_mp_context()) + adapter = get_adapter(runtime_config) + assert isinstance(adapter, PostgresAdapter) + + mocker.patch("dbt.parser.manifest.ManifestLoader.build_manifest_state_check").return_value = ( + ManifestStateCheck() + ) + manifest = ManifestLoader.load_macros( + runtime_config, + adapter.connections.set_query_header, + base_macros_only=True, + ) + + adapter.set_macro_resolver(manifest) + adapter.set_macro_context_generator(generate_runtime_macro_context) + + yield adapter + adapter.cleanup_connections() + reset_adapters() diff --git a/tests/unit/utils/config.py b/tests/unit/utils/config.py new file mode 100644 index 00000000000..3041bbada7c --- /dev/null +++ b/tests/unit/utils/config.py @@ -0,0 +1,52 @@ +import pytest + +from dbt.adapters.postgres.connections import PostgresCredentials +from dbt.config.profile import Profile +from dbt.config.project import Project +from dbt.config.renderer import ProfileRenderer +from dbt.config.runtime import RuntimeConfig +from dbt.flags import get_flags + + +@pytest.fixture +def credentials() -> PostgresCredentials: + return PostgresCredentials( + database="test_database", + schema="test_schema", + host="test_host", + user="test_user", + port=1337, + password="test_password", + ) + + +@pytest.fixture +def profile() -> Profile: + profile_yaml = { + "target": "postgres", + "outputs": { + "postgres": { + "type": "postgres", + "host": "postgres-db-hostname", + "port": 5555, + "user": "db_user", + "pass": "db_pass", + "dbname": "postgres-db-name", + "schema": "postgres-schema", + "threads": 7, + }, + }, + } + return Profile.from_raw_profile_info( + raw_profile=profile_yaml, profile_name="test_profile", renderer=ProfileRenderer({}) + ) + + +@pytest.fixture +def runtime_config(project: Project, profile: Profile, set_test_flags) -> RuntimeConfig: + args = get_flags() + return RuntimeConfig.from_parts( + project=project, + profile=profile, + args=args, + ) diff --git a/tests/unit/utils/event_manager.py b/tests/unit/utils/event_manager.py new file mode 100644 index 00000000000..70415e36231 --- /dev/null +++ b/tests/unit/utils/event_manager.py @@ -0,0 +1,8 @@ +import pytest + +from dbt_common.events.event_manager_client import cleanup_event_logger + + +@pytest.fixture(autouse=True) +def always_clean_event_manager() -> None: + cleanup_event_logger() diff --git a/tests/unit/utils/flags.py b/tests/unit/utils/flags.py new file mode 100644 index 00000000000..20bb4a44ea0 --- /dev/null +++ b/tests/unit/utils/flags.py @@ -0,0 +1,33 @@ +import sys +from argparse import Namespace + +if sys.version_info < (3, 9): + from typing import Generator +else: + from collections.abc import Generator + +import pytest + +from dbt.flags import set_from_args + + +@pytest.fixture +def args_for_flags() -> Namespace: + """Defines the namespace args to be used in `set_from_args` of `set_test_flags` fixture. + + This fixture is meant to be overrided by tests that need specific flags to be set. + """ + return Namespace() + + +@pytest.fixture(autouse=True) +def set_test_flags(args_for_flags: Namespace) -> Generator[None, None, None]: + """Sets up and tears down the global flags for every pytest unit test + + Override `args_for_flags` fixture as needed to set any specific flags. + """ + set_from_args(args_for_flags, {}) + # fixtures stop setup upon yield + yield None + # everything after yield is run at test teardown + set_from_args(Namespace(), {}) diff --git a/tests/unit/utils/manifest.py b/tests/unit/utils/manifest.py new file mode 100644 index 00000000000..0950f68ebb5 --- /dev/null +++ b/tests/unit/utils/manifest.py @@ -0,0 +1,1068 @@ +from typing import Any, Dict, List + +import pytest + +from dbt.artifacts.resources import ( + ExposureType, + MacroDependsOn, + MetricInputMeasure, + MetricTypeParams, + NodeRelation, + Owner, + QueryParams, + RefArgs, + TestConfig, + TestMetadata, + WhereFilter, + WhereFilterIntersection, +) +from dbt.artifacts.resources.types import ModelLanguage +from dbt.artifacts.resources.v1.model import ModelConfig +from dbt.contracts.files import AnySourceFile, FileHash +from dbt.contracts.graph.manifest import Manifest, ManifestMetadata +from dbt.contracts.graph.nodes import ( + AccessType, + DependsOn, + Documentation, + Exposure, + GenericTestNode, + GraphMemberNode, + Group, + Macro, + ManifestNode, + Metric, + ModelNode, + NodeConfig, + SavedQuery, + SeedNode, + SemanticModel, + SingularTestNode, + SourceDefinition, + UnitTestDefinition, +) +from dbt.contracts.graph.unparsed import UnitTestInputFixture, UnitTestOutputFixture +from dbt.node_types import NodeType +from dbt_semantic_interfaces.type_enums import MetricType + + +def make_model( + pkg, + name, + code, + language="sql", + refs=None, + sources=None, + tags=None, + path=None, + alias=None, + config_kwargs=None, + fqn_extras=None, + depends_on_macros=None, + version=None, + latest_version=None, + access=None, + patch_path=None, +): + if refs is None: + refs = [] + if sources is None: + sources = [] + if tags is None: + tags = [] + if path is None: + if language == "sql": + path = f"{name}.sql" + elif language == "python": + path = f"{name}.py" + else: + raise ValueError(f"Unknown language: {language}") + if alias is None: + alias = name + if config_kwargs is None: + config_kwargs = {} + if depends_on_macros is None: + depends_on_macros = [] + + if fqn_extras is None: + fqn_extras = [] + + fqn = [pkg] + fqn_extras + [name] + if version: + fqn.append(f"v{version}") + + depends_on_nodes = [] + source_values = [] + ref_values = [] + for ref in refs: + ref_version = ref.version if hasattr(ref, "version") else None + ref_values.append(RefArgs(name=ref.name, package=ref.package_name, version=ref_version)) + depends_on_nodes.append(ref.unique_id) + for src in sources: + source_values.append([src.source_name, src.name]) + depends_on_nodes.append(src.unique_id) + + return ModelNode( + language="sql", + raw_code=code, + database="dbt", + schema="dbt_schema", + alias=alias, + name=name, + fqn=fqn, + unique_id=f"model.{pkg}.{name}" if not version else f"model.{pkg}.{name}.v{version}", + package_name=pkg, + path=path, + original_file_path=f"models/{path}", + config=NodeConfig(**config_kwargs), + tags=tags, + refs=ref_values, + sources=source_values, + depends_on=DependsOn( + nodes=depends_on_nodes, + macros=depends_on_macros, + ), + resource_type=NodeType.Model, + checksum=FileHash.from_contents(""), + version=version, + latest_version=latest_version, + access=access or AccessType.Protected, + patch_path=patch_path, + ) + + +def make_seed( + pkg, name, path=None, loader=None, alias=None, tags=None, fqn_extras=None, checksum=None +): + if alias is None: + alias = name + if tags is None: + tags = [] + if path is None: + path = f"{name}.csv" + + if fqn_extras is None: + fqn_extras = [] + + if checksum is None: + checksum = FileHash.from_contents("") + + fqn = [pkg] + fqn_extras + [name] + return SeedNode( + database="dbt", + schema="dbt_schema", + alias=alias, + name=name, + fqn=fqn, + unique_id=f"seed.{pkg}.{name}", + package_name=pkg, + path=path, + original_file_path=f"data/{path}", + tags=tags, + resource_type=NodeType.Seed, + checksum=FileHash.from_contents(""), + ) + + +def make_source( + pkg, source_name, table_name, path=None, loader=None, identifier=None, fqn_extras=None +): + if path is None: + path = "models/schema.yml" + if loader is None: + loader = "my_loader" + if identifier is None: + identifier = table_name + + if fqn_extras is None: + fqn_extras = [] + + fqn = [pkg] + fqn_extras + [source_name, table_name] + + return SourceDefinition( + fqn=fqn, + database="dbt", + schema="dbt_schema", + unique_id=f"source.{pkg}.{source_name}.{table_name}", + package_name=pkg, + path=path, + original_file_path=path, + name=table_name, + source_name=source_name, + loader="my_loader", + identifier=identifier, + resource_type=NodeType.Source, + loaded_at_field="loaded_at", + tags=[], + source_description="", + ) + + +def make_macro(pkg, name, macro_sql, path=None, depends_on_macros=None): + if path is None: + path = "macros/macros.sql" + + if depends_on_macros is None: + depends_on_macros = [] + + return Macro( + name=name, + macro_sql=macro_sql, + unique_id=f"macro.{pkg}.{name}", + package_name=pkg, + path=path, + original_file_path=path, + resource_type=NodeType.Macro, + depends_on=MacroDependsOn(macros=depends_on_macros), + ) + + +def make_unique_test(pkg, test_model, column_name, path=None, refs=None, sources=None, tags=None): + return make_generic_test(pkg, "unique", test_model, {}, column_name=column_name) + + +def make_not_null_test( + pkg, test_model, column_name, path=None, refs=None, sources=None, tags=None +): + return make_generic_test(pkg, "not_null", test_model, {}, column_name=column_name) + + +def make_generic_test( + pkg, + test_name, + test_model, + test_kwargs, + path=None, + refs=None, + sources=None, + tags=None, + column_name=None, +): + kwargs = test_kwargs.copy() + ref_values = [] + source_values = [] + # this doesn't really have to be correct + if isinstance(test_model, SourceDefinition): + kwargs["model"] = ( + "{{ source('" + test_model.source_name + "', '" + test_model.name + "') }}" + ) + source_values.append([test_model.source_name, test_model.name]) + else: + kwargs["model"] = "{{ ref('" + test_model.name + "')}}" + ref_values.append( + RefArgs( + name=test_model.name, package=test_model.package_name, version=test_model.version + ) + ) + if column_name is not None: + kwargs["column_name"] = column_name + + # whatever + args_name = test_model.search_name.replace(".", "_") + if column_name is not None: + args_name += "_" + column_name + node_name = f"{test_name}_{args_name}" + raw_code = ( + '{{ config(severity="ERROR") }}{{ test_' + test_name + "(**dbt_schema_test_kwargs) }}" + ) + name_parts = test_name.split(".") + + if len(name_parts) == 2: + namespace, test_name = name_parts + macro_depends = f"macro.{namespace}.test_{test_name}" + elif len(name_parts) == 1: + namespace = None + macro_depends = f"macro.dbt.test_{test_name}" + else: + assert False, f"invalid test name: {test_name}" + + if path is None: + path = "schema.yml" + if tags is None: + tags = ["schema"] + + if refs is None: + refs = [] + if sources is None: + sources = [] + + depends_on_nodes = [] + for ref in refs: + ref_version = ref.version if hasattr(ref, "version") else None + ref_values.append(RefArgs(name=ref.name, package=ref.package_name, version=ref_version)) + depends_on_nodes.append(ref.unique_id) + + for source in sources: + source_values.append([source.source_name, source.name]) + depends_on_nodes.append(source.unique_id) + + return GenericTestNode( + language="sql", + raw_code=raw_code, + test_metadata=TestMetadata( + namespace=namespace, + name=test_name, + kwargs=kwargs, + ), + database="dbt", + schema="dbt_postgres", + name=node_name, + alias=node_name, + fqn=["minimal", "schema_test", node_name], + unique_id=f"test.{pkg}.{node_name}", + package_name=pkg, + path=f"schema_test/{node_name}.sql", + original_file_path=f"models/{path}", + resource_type=NodeType.Test, + tags=tags, + refs=ref_values, + sources=[], + depends_on=DependsOn(macros=[macro_depends], nodes=depends_on_nodes), + column_name=column_name, + checksum=FileHash.from_contents(""), + ) + + +def make_unit_test( + pkg, + test_name, + test_model, +): + input_fixture = UnitTestInputFixture( + input="ref('table_model')", + rows=[{"id": 1, "string_a": "a"}], + ) + output_fixture = UnitTestOutputFixture( + rows=[{"id": 1, "string_a": "a"}], + ) + return UnitTestDefinition( + name=test_name, + model=test_model, + package_name=pkg, + resource_type=NodeType.Unit, + path="unit_tests.yml", + original_file_path="models/unit_tests.yml", + unique_id=f"unit.{pkg}.{test_model.name}__{test_name}", + given=[input_fixture], + expect=output_fixture, + fqn=[pkg, test_model.name, test_name], + ) + + +def make_singular_test( + pkg, name, sql, refs=None, sources=None, tags=None, path=None, config_kwargs=None +): + if refs is None: + refs = [] + if sources is None: + sources = [] + if tags is None: + tags = ["data"] + if path is None: + path = f"{name}.sql" + + if config_kwargs is None: + config_kwargs = {} + + fqn = ["minimal", "data_test", name] + + depends_on_nodes = [] + source_values = [] + ref_values = [] + for ref in refs: + ref_version = ref.version if hasattr(ref, "version") else None + ref_values.append(RefArgs(name=ref.name, package=ref.package_name, version=ref_version)) + depends_on_nodes.append(ref.unique_id) + for src in sources: + source_values.append([src.source_name, src.name]) + depends_on_nodes.append(src.unique_id) + + return SingularTestNode( + language="sql", + raw_code=sql, + database="dbt", + schema="dbt_schema", + name=name, + alias=name, + fqn=fqn, + unique_id=f"test.{pkg}.{name}", + package_name=pkg, + path=path, + original_file_path=f"tests/{path}", + config=TestConfig(**config_kwargs), + tags=tags, + refs=ref_values, + sources=source_values, + depends_on=DependsOn(nodes=depends_on_nodes, macros=[]), + resource_type=NodeType.Test, + checksum=FileHash.from_contents(""), + ) + + +def make_exposure(pkg, name, path=None, fqn_extras=None, owner=None): + if path is None: + path = "schema.yml" + + if fqn_extras is None: + fqn_extras = [] + + if owner is None: + owner = Owner(email="test@example.com") + + fqn = [pkg, "exposures"] + fqn_extras + [name] + return Exposure( + name=name, + resource_type=NodeType.Exposure, + type=ExposureType.Notebook, + fqn=fqn, + unique_id=f"exposure.{pkg}.{name}", + package_name=pkg, + path=path, + original_file_path=path, + owner=owner, + ) + + +def make_metric(pkg, name, path=None): + if path is None: + path = "schema.yml" + + return Metric( + name=name, + resource_type=NodeType.Metric, + path=path, + package_name=pkg, + original_file_path=path, + unique_id=f"metric.{pkg}.{name}", + fqn=[pkg, "metrics", name], + label="New Customers", + description="New customers", + type=MetricType.SIMPLE, + type_params=MetricTypeParams(measure=MetricInputMeasure(name="count_cats")), + meta={"is_okr": True}, + tags=["okrs"], + ) + + +def make_group(pkg, name, path=None): + if path is None: + path = "schema.yml" + + return Group( + name=name, + resource_type=NodeType.Group, + path=path, + package_name=pkg, + original_file_path=path, + unique_id=f"group.{pkg}.{name}", + owner="email@gmail.com", + ) + + +def make_semantic_model( + pkg: str, + name: str, + model, + path=None, +): + if path is None: + path = "schema.yml" + + return SemanticModel( + name=name, + resource_type=NodeType.SemanticModel, + model=model, + node_relation=NodeRelation( + alias=model.alias, + schema_name="dbt", + relation_name=model.name, + ), + package_name=pkg, + path=path, + description="Customer entity", + primary_entity="customer", + unique_id=f"semantic_model.{pkg}.{name}", + original_file_path=path, + fqn=[pkg, "semantic_models", name], + ) + + +def make_saved_query(pkg: str, name: str, metric: str, path=None): + if path is None: + path = "schema.yml" + + return SavedQuery( + name=name, + resource_type=NodeType.SavedQuery, + package_name=pkg, + path=path, + description="Test Saved Query", + query_params=QueryParams( + metrics=[metric], + group_by=[], + where=None, + ), + exports=[], + unique_id=f"saved_query.{pkg}.{name}", + original_file_path=path, + fqn=[pkg, "saved_queries", name], + ) + + +@pytest.fixture +def macro_test_unique() -> Macro: + return make_macro( + "dbt", "test_unique", "blablabla", depends_on_macros=["macro.dbt.default__test_unique"] + ) + + +@pytest.fixture +def macro_default_test_unique() -> Macro: + return make_macro("dbt", "default__test_unique", "blablabla") + + +@pytest.fixture +def macro_test_not_null() -> Macro: + return make_macro( + "dbt", "test_not_null", "blablabla", depends_on_macros=["macro.dbt.default__test_not_null"] + ) + + +@pytest.fixture +def macro_materialization_table_default() -> Macro: + macro = make_macro("dbt", "materialization_table_default", "SELECT 1") + macro.supported_languages = [ModelLanguage.sql] + return macro + + +@pytest.fixture +def macro_default_test_not_null() -> Macro: + return make_macro("dbt", "default__test_not_null", "blabla") + + +@pytest.fixture +def seed() -> SeedNode: + return make_seed("pkg", "seed") + + +@pytest.fixture +def source() -> SourceDefinition: + return make_source("pkg", "raw", "seed", identifier="seed") + + +@pytest.fixture +def ephemeral_model(source) -> ModelNode: + return make_model( + "pkg", + "ephemeral_model", + 'select * from {{ source("raw", "seed") }}', + config_kwargs={"materialized": "ephemeral"}, + sources=[source], + ) + + +@pytest.fixture +def view_model(ephemeral_model) -> ModelNode: + return make_model( + "pkg", + "view_model", + 'select * from {{ ref("ephemeral_model") }}', + config_kwargs={"materialized": "view"}, + refs=[ephemeral_model], + tags=["uses_ephemeral"], + ) + + +@pytest.fixture +def table_model(ephemeral_model) -> ModelNode: + return make_model( + "pkg", + "table_model", + 'select * from {{ ref("ephemeral_model") }}', + config_kwargs={ + "materialized": "table", + "meta": { + # Other properties to test in test_select_config_meta + "string_property": "some_string", + "truthy_bool_property": True, + "falsy_bool_property": False, + "list_property": ["some_value", True, False], + }, + }, + refs=[ephemeral_model], + tags=["uses_ephemeral"], + path="subdirectory/table_model.sql", + ) + + +@pytest.fixture +def table_model_py(seed) -> ModelNode: + return make_model( + "pkg", + "table_model_py", + 'select * from {{ ref("seed") }}', + config_kwargs={"materialized": "table"}, + refs=[seed], + tags=[], + path="subdirectory/table_model.py", + ) + + +@pytest.fixture +def table_model_csv(seed) -> ModelNode: + return make_model( + "pkg", + "table_model_csv", + 'select * from {{ ref("seed") }}', + config_kwargs={"materialized": "table"}, + refs=[seed], + tags=[], + path="subdirectory/table_model.csv", + ) + + +@pytest.fixture +def ext_source() -> SourceDefinition: + return make_source( + "ext", + "ext_raw", + "ext_source", + ) + + +@pytest.fixture +def ext_source_2() -> SourceDefinition: + return make_source( + "ext", + "ext_raw", + "ext_source_2", + ) + + +@pytest.fixture +def ext_source_other() -> SourceDefinition: + return make_source( + "ext", + "raw", + "ext_source", + ) + + +@pytest.fixture +def ext_source_other_2() -> SourceDefinition: + return make_source( + "ext", + "raw", + "ext_source_2", + ) + + +@pytest.fixture +def ext_model(ext_source) -> ModelNode: + return make_model( + "ext", + "ext_model", + 'select * from {{ source("ext_raw", "ext_source") }}', + sources=[ext_source], + ) + + +@pytest.fixture +def union_model(seed, ext_source) -> ModelNode: + return make_model( + "pkg", + "union_model", + 'select * from {{ ref("seed") }} union all select * from {{ source("ext_raw", "ext_source") }}', + config_kwargs={"materialized": "table"}, + refs=[seed], + sources=[ext_source], + fqn_extras=["unions"], + path="subdirectory/union_model.sql", + tags=["unions"], + ) + + +@pytest.fixture +def versioned_model_v1(seed) -> ModelNode: + return make_model( + "pkg", + "versioned_model", + 'select * from {{ ref("seed") }}', + config_kwargs={"materialized": "table"}, + refs=[seed], + sources=[], + path="subdirectory/versioned_model_v1.sql", + version=1, + latest_version=2, + ) + + +@pytest.fixture +def versioned_model_v2(seed) -> ModelNode: + return make_model( + "pkg", + "versioned_model", + 'select * from {{ ref("seed") }}', + config_kwargs={"materialized": "table"}, + refs=[seed], + sources=[], + path="subdirectory/versioned_model_v2.sql", + version=2, + latest_version=2, + ) + + +@pytest.fixture +def versioned_model_v3(seed) -> ModelNode: + return make_model( + "pkg", + "versioned_model", + 'select * from {{ ref("seed") }}', + config_kwargs={"materialized": "table"}, + refs=[seed], + sources=[], + path="subdirectory/versioned_model_v3.sql", + version="3", + latest_version=2, + ) + + +@pytest.fixture +def versioned_model_v12_string(seed) -> ModelNode: + return make_model( + "pkg", + "versioned_model", + 'select * from {{ ref("seed") }}', + config_kwargs={"materialized": "table"}, + refs=[seed], + sources=[], + path="subdirectory/versioned_model_v12.sql", + version="12", + latest_version=2, + ) + + +@pytest.fixture +def versioned_model_v4_nested_dir(seed) -> ModelNode: + return make_model( + "pkg", + "versioned_model", + 'select * from {{ ref("seed") }}', + config_kwargs={"materialized": "table"}, + refs=[seed], + sources=[], + path="subdirectory/nested_dir/versioned_model_v3.sql", + version="4", + latest_version=2, + fqn_extras=["nested_dir"], + ) + + +@pytest.fixture +def table_id_unique(table_model) -> GenericTestNode: + return make_unique_test("pkg", table_model, "id") + + +@pytest.fixture +def table_id_not_null(table_model) -> GenericTestNode: + return make_not_null_test("pkg", table_model, "id") + + +@pytest.fixture +def view_id_unique(view_model) -> GenericTestNode: + return make_unique_test("pkg", view_model, "id") + + +@pytest.fixture +def ext_source_id_unique(ext_source) -> GenericTestNode: + return make_unique_test("ext", ext_source, "id") + + +@pytest.fixture +def view_test_nothing(view_model) -> SingularTestNode: + return make_singular_test( + "pkg", + "view_test_nothing", + 'select * from {{ ref("view_model") }} limit 0', + refs=[view_model], + ) + + +@pytest.fixture +def unit_test_table_model(table_model) -> UnitTestDefinition: + return make_unit_test( + "pkg", + "unit_test_table_model", + table_model, + ) + + +# Support dots as namespace separators +@pytest.fixture +def namespaced_seed() -> SeedNode: + return make_seed("pkg", "mynamespace.seed") + + +@pytest.fixture +def namespace_model(source) -> ModelNode: + return make_model( + "pkg", + "mynamespace.ephemeral_model", + 'select * from {{ source("raw", "seed") }}', + config_kwargs={"materialized": "ephemeral"}, + sources=[source], + ) + + +@pytest.fixture +def namespaced_union_model(seed, ext_source) -> ModelNode: + return make_model( + "pkg", + "mynamespace.union_model", + 'select * from {{ ref("mynamespace.seed") }} union all select * from {{ ref("mynamespace.ephemeral_model") }}', + config_kwargs={"materialized": "table"}, + refs=[seed], + sources=[ext_source], + fqn_extras=["unions"], + path="subdirectory/union_model.sql", + tags=["unions"], + ) + + +@pytest.fixture +def metric() -> Metric: + return Metric( + name="my_metric", + resource_type=NodeType.Metric, + type=MetricType.SIMPLE, + type_params=MetricTypeParams(measure=MetricInputMeasure(name="a_measure")), + fqn=["test", "metrics", "myq_metric"], + unique_id="metric.test.my_metric", + package_name="test", + path="models/metric.yml", + original_file_path="models/metric.yml", + description="", + meta={}, + tags=[], + label="test_label", + ) + + +@pytest.fixture +def saved_query() -> SavedQuery: + pkg = "test" + name = "test_saved_query" + path = "test_path" + return SavedQuery( + name=name, + resource_type=NodeType.SavedQuery, + package_name=pkg, + path=path, + description="Test Saved Query", + query_params=QueryParams( + metrics=["my_metric"], + group_by=[], + where=WhereFilterIntersection( + where_filters=[ + WhereFilter(where_sql_template="1=1"), + ] + ), + ), + exports=[], + unique_id=f"saved_query.{pkg}.{name}", + original_file_path=path, + fqn=[pkg, "saved_queries", name], + ) + + +@pytest.fixture +def semantic_model(table_model) -> SemanticModel: + return make_semantic_model("test", "test_semantic_model", model=table_model) + + +@pytest.fixture +def metricflow_time_spine_model() -> ModelNode: + return ModelNode( + name="metricflow_time_spine", + database="dbt", + schema="analytics", + alias="events", + resource_type=NodeType.Model, + unique_id="model.test.metricflow_time_spine", + fqn=["snowplow", "events"], + package_name="snowplow", + refs=[], + sources=[], + metrics=[], + depends_on=DependsOn(), + config=ModelConfig(), + tags=[], + path="events.sql", + original_file_path="events.sql", + meta={}, + language="sql", + raw_code="does not matter", + checksum=FileHash.empty(), + relation_name="events", + ) + + +@pytest.fixture +def nodes( + seed, + ephemeral_model, + view_model, + table_model, + table_model_py, + table_model_csv, + union_model, + versioned_model_v1, + versioned_model_v2, + versioned_model_v3, + versioned_model_v4_nested_dir, + versioned_model_v12_string, + ext_model, + table_id_unique, + table_id_not_null, + view_id_unique, + ext_source_id_unique, + view_test_nothing, + namespaced_seed, + namespace_model, + namespaced_union_model, +) -> List[ManifestNode]: + return [ + seed, + ephemeral_model, + view_model, + table_model, + table_model_py, + table_model_csv, + union_model, + versioned_model_v1, + versioned_model_v2, + versioned_model_v3, + versioned_model_v4_nested_dir, + versioned_model_v12_string, + ext_model, + table_id_unique, + table_id_not_null, + view_id_unique, + ext_source_id_unique, + view_test_nothing, + namespaced_seed, + namespace_model, + namespaced_union_model, + ] + + +@pytest.fixture +def sources( + source, + ext_source, + ext_source_2, + ext_source_other, + ext_source_other_2, +) -> list: + return [source, ext_source, ext_source_2, ext_source_other, ext_source_other_2] + + +@pytest.fixture +def macros( + macro_test_unique, + macro_default_test_unique, + macro_test_not_null, + macro_default_test_not_null, + macro_materialization_table_default, +) -> List[Macro]: + return [ + macro_test_unique, + macro_default_test_unique, + macro_test_not_null, + macro_default_test_not_null, + macro_materialization_table_default, + ] + + +@pytest.fixture +def unit_tests(unit_test_table_model) -> List[UnitTestDefinition]: + return [unit_test_table_model] + + +@pytest.fixture +def metrics(metric: Metric) -> List[Metric]: + return [metric] + + +@pytest.fixture +def semantic_models(semantic_model: SemanticModel) -> List[SemanticModel]: + return [semantic_model] + + +@pytest.fixture +def saved_queries(saved_query: SavedQuery) -> List[SavedQuery]: + return [saved_query] + + +@pytest.fixture +def files() -> Dict[str, AnySourceFile]: + return {} + + +def make_manifest( + disabled: Dict[str, List[GraphMemberNode]] = {}, + docs: List[Documentation] = [], + exposures: List[Exposure] = [], + files: Dict[str, AnySourceFile] = {}, + groups: List[Group] = [], + macros: List[Macro] = [], + metrics: List[Metric] = [], + nodes: List[ModelNode] = [], + saved_queries: List[SavedQuery] = [], + selectors: Dict[str, Any] = {}, + semantic_models: List[SemanticModel] = [], + sources: List[SourceDefinition] = [], + unit_tests: List[UnitTestDefinition] = [], +) -> Manifest: + manifest = Manifest( + nodes={n.unique_id: n for n in nodes}, + sources={s.unique_id: s for s in sources}, + macros={m.unique_id: m for m in macros}, + unit_tests={t.unique_id: t for t in unit_tests}, + semantic_models={s.unique_id: s for s in semantic_models}, + docs={d.unique_id: d for d in docs}, + files=files, + exposures={e.unique_id: e for e in exposures}, + metrics={m.unique_id: m for m in metrics}, + disabled=disabled, + selectors=selectors, + groups={g.unique_id: g for g in groups}, + metadata=ManifestMetadata(adapter_type="postgres", project_name="pkg"), + saved_queries={s.unique_id: s for s in saved_queries}, + ) + manifest.build_parent_and_child_maps() + return manifest + + +@pytest.fixture +def manifest( + metric, + semantic_model, + nodes, + sources, + macros, + unit_tests, + metrics, + semantic_models, + files, + saved_queries, +) -> Manifest: + return make_manifest( + nodes=nodes, + sources=sources, + macros=macros, + unit_tests=unit_tests, + semantic_models=semantic_models, + files=files, + metrics=metrics, + saved_queries=saved_queries, + ) diff --git a/tests/unit/utils/project.py b/tests/unit/utils/project.py new file mode 100644 index 00000000000..c8a22143daf --- /dev/null +++ b/tests/unit/utils/project.py @@ -0,0 +1,89 @@ +from unittest.mock import MagicMock + +import pytest + +from dbt.adapters.contracts.connection import QueryComment +from dbt.config import RuntimeConfig +from dbt.config.project import Project, RenderComponents, VarProvider +from dbt.config.selectors import SelectorConfig +from dbt.contracts.project import PackageConfig +from dbt_common.semver import VersionSpecifier + + +@pytest.fixture(scope="function") +def selector_config() -> SelectorConfig: + return SelectorConfig.selectors_from_dict( + data={ + "selectors": [ + { + "name": "my_selector", + "definition": "give me cats", + "default": True, + } + ] + } + ) + + +@pytest.fixture(scope="function") +def project(selector_config: SelectorConfig) -> Project: + return Project( + project_name="test_project", + version=1.0, + project_root="doesnt/actually/exist", + profile_name="test_profile", + model_paths=["models"], + macro_paths=["macros"], + seed_paths=["seeds"], + test_paths=["tests"], + analysis_paths=["analyses"], + docs_paths=["docs"], + asset_paths=["assets"], + target_path="target", + snapshot_paths=["snapshots"], + clean_targets=["target"], + log_path="path/to/project/logs", + packages_install_path="dbt_packages", + packages_specified_path="packages.yml", + quoting={}, + models={}, + on_run_start=[], + on_run_end=[], + dispatch=[{"macro_namespace": "dbt_utils", "search_order": ["test_project", "dbt_utils"]}], + seeds={}, + snapshots={}, + sources={}, + data_tests={}, + unit_tests={}, + metrics={}, + semantic_models={}, + saved_queries={}, + exposures={}, + vars=VarProvider({}), + dbt_version=[VersionSpecifier.from_version_string("0.0.0")], + packages=PackageConfig([]), + manifest_selectors={}, + selectors=selector_config, + query_comment=QueryComment(), + config_version=1, + unrendered=RenderComponents({}, {}, {}), + project_env_vars={}, + restrict_access=False, + dbt_cloud={}, + flags={}, + ) + + +@pytest.fixture +def mock_project(): + mock_project = MagicMock(RuntimeConfig) + mock_project.cli_vars = {} + mock_project.args = MagicMock() + mock_project.args.profile = "test" + mock_project.args.target = "test" + mock_project.project_env_vars = {} + mock_project.profile_env_vars = {} + mock_project.project_target_path = "mock_target_path" + mock_project.credentials = MagicMock() + mock_project.clear_dependencies = MagicMock() + return mock_project diff --git a/tests/utils.py b/tests/utils.py new file mode 100644 index 00000000000..e09e377a62b --- /dev/null +++ b/tests/utils.py @@ -0,0 +1,18 @@ +from dataclasses import dataclass, field +from typing import Callable, List + +from dbt_common.events.base_types import BaseEvent, EventMsg + + +@dataclass +class EventCatcher: + event_to_catch: BaseEvent + caught_events: List[EventMsg] = field(default_factory=list) + predicate: Callable[[EventMsg], bool] = lambda event: True + + def catch(self, event: EventMsg): + if event.info.name == self.event_to_catch.__name__ and self.predicate(event): + self.caught_events.append(event) + + def flush(self) -> None: + self.caught_events = [] diff --git a/third-party-stubs/colorama/__init__.pyi b/third-party-stubs/colorama/__init__.pyi deleted file mode 100644 index 4502880eb24..00000000000 --- a/third-party-stubs/colorama/__init__.pyi +++ /dev/null @@ -1,16 +0,0 @@ -from typing import Optional, Any - -class Fore: - RED: str = ... - GREEN: str = ... - YELLOW: str = ... - -class Style: - RESET_ALL: str = ... - -def init( - autoreset: bool = ..., - convert: Optional[Any] = ..., - strip: Optional[Any] = ..., - wrap: bool = ..., -) -> None: ... diff --git a/third-party-stubs/daff/__init__.pyi b/third-party-stubs/daff/__init__.pyi new file mode 100644 index 00000000000..d75053bae83 --- /dev/null +++ b/third-party-stubs/daff/__init__.pyi @@ -0,0 +1,1545 @@ +import __builtin__ # type: ignore +import builtins +import functools +from _typeshed import Incomplete + +builtins = __builtin__ +hxunicode: Incomplete +hxunichr: Incomplete + +def hxnext(x): ... + +hx_cmp_to_key = functools.cmp_to_key +hxunicode = str +hxrange = range +hxunichr = chr +unichr = chr +unicode = str +hx_cmp_to_key = functools.cmp_to_key +python_lib_Builtin = builtins +String = builtins.str +python_lib_Dict = builtins.dict +python_lib_Set = builtins.set +imap: Incomplete +ifilter: Incomplete + +class _hx_AnonObject: + __dict__: Incomplete + def __init__(self, fields) -> None: ... + def __contains__(self, item) -> bool: ... + def __getitem__(self, item): ... + def __getattr__(self, name) -> None: ... + +class Enum: + tag: Incomplete + index: Incomplete + params: Incomplete + def __init__(self, tag, index, params) -> None: ... + +class Alignment: + has_removal: Incomplete + has_addition: Incomplete + index_columns: Incomplete + order_cache: Incomplete + tb: Incomplete + ta: Incomplete + map_a2b: Incomplete + map_b2a: Incomplete + hb: int + ha: Incomplete + map_count: int + reference: Incomplete + meta: Incomplete + comp: Incomplete + order_cache_has_reference: bool + ia: int + ib: int + marked_as_identical: bool + def __init__(self) -> None: ... + def range(self, ha, hb) -> None: ... + def tables(self, ta, tb) -> None: ... + def headers(self, ia, ib) -> None: ... + def setRowlike(self, flag) -> None: ... + def link(self, a, b) -> None: ... + def addIndexColumns(self, unit) -> None: ... + def getIndexColumns(self): ... + def a2b(self, a): ... + def b2a(self, b): ... + def count(self): ... + def toString(self): ... + def toOrder(self): ... + def addToOrder(self, l, r, p: Incomplete | None = ...) -> None: ... + def getSource(self): ... + def getTarget(self): ... + def getSourceHeader(self): ... + def getTargetHeader(self): ... + def toOrder3(self): ... + def markIdentical(self) -> None: ... + def isMarkedAsIdentical(self): ... + +class CellBuilder: ... + +class CellInfo: + meta: Incomplete + rvalue: Incomplete + lvalue: Incomplete + pvalue: Incomplete + conflicted: Incomplete + updated: Incomplete + pretty_separator: Incomplete + separator: Incomplete + category_given_tr: Incomplete + category: Incomplete + pretty_value: Incomplete + value: Incomplete + raw: Incomplete + def __init__(self) -> None: ... + def toString(self): ... + +class Class: ... + +class ColumnChange: + props: Incomplete + name: Incomplete + prevName: Incomplete + def __init__(self) -> None: ... + +class Table: ... + +class CombinedTable: + meta: Incomplete + body: Incomplete + t: Incomplete + dx: int + dy: int + core: Incomplete + head: Incomplete + def __init__(self, t) -> None: ... + def all(self): ... + def getTable(self): ... + def get_width(self): ... + def get_height(self): ... + def getCell(self, x, y): ... + def setCell(self, x, y, c) -> None: ... + def toString(self): ... + def getCellView(self): ... + def isResizable(self): ... + def resize(self, w, h): ... + def clear(self) -> None: ... + def insertOrDeleteRows(self, fate, hfate): ... + def insertOrDeleteColumns(self, fate, wfate): ... + def trimBlank(self): ... + def getData(self) -> None: ... + def clone(self): ... + def create(self): ... + def getMeta(self): ... + +class CombinedTableBody: + meta: Incomplete + parent: Incomplete + dx: Incomplete + dy: Incomplete + all: Incomplete + def __init__(self, parent, dx, dy) -> None: ... + def getTable(self): ... + def get_width(self): ... + def get_height(self): ... + def getCell(self, x, y): ... + def setCell(self, x, y, c) -> None: ... + def toString(self): ... + def getCellView(self): ... + def isResizable(self): ... + def resize(self, w, h): ... + def clear(self) -> None: ... + def insertOrDeleteRows(self, fate, hfate): ... + def insertOrDeleteColumns(self, fate, wfate): ... + def trimBlank(self): ... + def getData(self) -> None: ... + def clone(self): ... + def create(self): ... + def getMeta(self): ... + +class CombinedTableHead: + parent: Incomplete + dx: Incomplete + dy: Incomplete + all: Incomplete + def __init__(self, parent, dx, dy) -> None: ... + def getTable(self): ... + def get_width(self): ... + def get_height(self): ... + def getCell(self, x, y): ... + def setCell(self, x, y, c) -> None: ... + def toString(self): ... + def getCellView(self): ... + def isResizable(self): ... + def resize(self, w, h): ... + def clear(self) -> None: ... + def insertOrDeleteRows(self, fate, hfate): ... + def insertOrDeleteColumns(self, fate, wfate): ... + def trimBlank(self): ... + def getData(self) -> None: ... + def clone(self) -> None: ... + def create(self) -> None: ... + def getMeta(self) -> None: ... + +class CompareFlags: + padding_strategy: Incomplete + ordered: bool + show_unchanged: bool + unchanged_context: int + always_show_order: bool + never_show_order: bool + show_unchanged_columns: bool + unchanged_column_context: int + always_show_header: bool + acts: Incomplete + ids: Incomplete + columns_to_ignore: Incomplete + allow_nested_cells: bool + warnings: Incomplete + diff_strategy: Incomplete + show_meta: bool + show_unchanged_meta: bool + tables: Incomplete + parent: Incomplete + count_like_a_spreadsheet: bool + ignore_whitespace: bool + ignore_case: bool + ignore_epsilon: int + terminal_format: Incomplete + use_glyphs: bool + quote_html: bool + def __init__(self) -> None: ... + def filter(self, act, allow): ... + def allowUpdate(self): ... + def allowInsert(self): ... + def allowDelete(self): ... + def allowColumn(self): ... + def getIgnoredColumns(self): ... + def addPrimaryKey(self, column) -> None: ... + def ignoreColumn(self, column) -> None: ... + def addTable(self, table) -> None: ... + def addWarning(self, warn) -> None: ... + def getWarning(self): ... + def getNameByRole(self, name, role): ... + def getCanonicalName(self, name): ... + def getIdsByRole(self, role): ... + +class CompareTable: + indexes: Incomplete + comp: Incomplete + def __init__(self, comp) -> None: ... + def run(self): ... + def align(self): ... + def getComparisonState(self): ... + def alignCore(self, align) -> None: ... + def alignCore2(self, align, a, b): ... + def alignColumns(self, align, a, b) -> None: ... + def testHasSameColumns(self): ... + def hasSameColumns2(self, a, b): ... + def testIsEqual(self): ... + def isEqual2(self, a, b): ... + def compareCore(self): ... + def storeIndexes(self) -> None: ... + def getIndexes(self): ... + def useSql(self): ... + +class ConflictInfo: + row: Incomplete + col: Incomplete + pvalue: Incomplete + lvalue: Incomplete + rvalue: Incomplete + def __init__(self, row, col, pvalue, lvalue, rvalue) -> None: ... + +class Coopy: + daff_cmd: Incomplete + status: Incomplete + mv: Incomplete + diffs_found: Incomplete + fail_if_diff: Incomplete + cache_txt: Incomplete + flags: Incomplete + fragment: Incomplete + css_output: Incomplete + strategy: Incomplete + io: Incomplete + order_preference: Incomplete + order_set: Incomplete + nested_output: Incomplete + output_format_set: Incomplete + output_format: Incomplete + extern_preference: Incomplete + csv_eol_preference: Incomplete + delim_preference: Incomplete + format_preference: Incomplete + def __init__(self, io: Incomplete | None = ...) -> None: ... + def init(self) -> None: ... + def checkFormat(self, name): ... + def setFormat(self, name) -> None: ... + def getRenderer(self): ... + def applyRenderer(self, name, renderer): ... + def renderTable(self, name, t): ... + def renderTables(self, name, t): ... + def saveTable(self, name, t, render: Incomplete | None = ...): ... + def encodeTable(self, name, t, render: Incomplete | None = ...): ... + def saveTables(self, name, os, use_color, is_diff): ... + def saveText(self, name, txt): ... + def jsonToTables(self, json): ... + def jsonToTable(self, json): ... + def useColor(self, flags, output): ... + def runDiff(self, parent, a, b, flags, output) -> None: ... + def loadTable(self, name, role): ... + def command(self, io, cmd, args): ... + def installGitDriver(self, io, formats): ... + def run(self, args, io: Incomplete | None = ...): ... + def coopyhx(self, io): ... + @staticmethod + def diffAsHtml(local, remote, flags: Incomplete | None = ...): ... + @staticmethod + def diffAsAnsi(local, remote, flags: Incomplete | None = ...): ... + @staticmethod + def diff(local, remote, flags: Incomplete | None = ...): ... + @staticmethod + def getBlankTable(td, comp): ... + @staticmethod + def align(local, remote, flags, comp): ... + @staticmethod + def patch(local, patch, flags: Incomplete | None = ...): ... + @staticmethod + def compareTables(local, remote, flags: Incomplete | None = ...): ... + @staticmethod + def compareTables3(parent, local, remote, flags: Incomplete | None = ...): ... + @staticmethod + def keepAround(): ... + @staticmethod + def cellFor(x): ... + @staticmethod + def main(): ... + @staticmethod + def show(t) -> None: ... + @staticmethod + def jsonify(t): ... + @staticmethod + def tablify(data): ... + +class CrossMatch: + item_b: Incomplete + item_a: Incomplete + spot_b: Incomplete + spot_a: Incomplete + def __init__(self) -> None: ... + +class Csv: + has_structure: Incomplete + cursor: int + row_ended: bool + delim: Incomplete + discovered_eol: Incomplete + preferred_eol: Incomplete + def __init__(self, delim: Incomplete | None = ..., eol: Incomplete | None = ...) -> None: ... + def renderTable(self, t): ... + def renderCell(self, v, d, force_quote: Incomplete | None = ...): ... + def parseTable(self, txt, tab): ... + def makeTable(self, txt): ... + def parseCellPart(self, txt): ... + def parseCell(self, txt): ... + def getDiscoveredEol(self): ... + def setPreferredEol(self, eol) -> None: ... + +class Date: + dateUTC: Incomplete + date: Incomplete + def __init__(self, year, month, day, hour, _hx_min, sec) -> None: ... + def toString(self): ... + @staticmethod + def makeLocal(date): ... + +class DiffRender: + section: Incomplete + td_close: Incomplete + td_open: Incomplete + text_to_insert: Incomplete + open: bool + pretty_arrows: bool + quote_html: bool + def __init__(self) -> None: ... + def usePrettyArrows(self, flag) -> None: ... + def quoteHtml(self, flag) -> None: ... + def insert(self, _hx_str) -> None: ... + def beginTable(self) -> None: ... + def setSection(self, _hx_str) -> None: ... + def beginRow(self, mode) -> None: ... + def insertCell(self, txt, mode) -> None: ... + def endRow(self) -> None: ... + def endTable(self) -> None: ... + def html(self): ... + def toString(self): ... + def render(self, tab): ... + def renderTables(self, tabs): ... + def sampleCss(self): ... + def completeHtml(self) -> None: ... + @staticmethod + def examineCell( + x, y, view, raw, vcol, vrow, vcorner, cell, offset: Incomplete | None = ... + ): ... + @staticmethod + def markSpaces(sl, sr): ... + @staticmethod + def renderCell(tab, view, x, y): ... + +class DiffSummary: + different: Incomplete + col_count_final: Incomplete + col_count_initial: Incomplete + row_count_final: Incomplete + row_count_initial: Incomplete + row_count_final_with_header: Incomplete + row_count_initial_with_header: Incomplete + col_reorders: Incomplete + col_renames: Incomplete + col_updates: Incomplete + col_inserts: Incomplete + col_deletes: Incomplete + row_reorders: Incomplete + row_updates: Incomplete + row_inserts: Incomplete + row_deletes: Incomplete + def __init__(self) -> None: ... + +class FlatCellBuilder: + conflict_separator: Incomplete + separator: Incomplete + view: Incomplete + flags: Incomplete + def __init__(self, flags) -> None: ... + def needSeparator(self): ... + def setSeparator(self, separator) -> None: ... + def setConflictSeparator(self, separator) -> None: ... + def setView(self, view) -> None: ... + def update(self, local, remote): ... + def conflict(self, parent, local, remote): ... + def marker(self, label): ... + def links(self, unit, row_like): ... + @staticmethod + def quoteForDiff(v, d): ... + +class Row: ... + +class HighlightPatch: + finished_columns: Incomplete + next_meta: Incomplete + prev_meta: Incomplete + process_meta: Incomplete + meta_change: Incomplete + preambleRow: Incomplete + headerRow: Incomplete + haveDroppedColumns: Incomplete + colPermutationRev: Incomplete + colPermutation: Incomplete + rowPermutationRev: Incomplete + rowPermutation: Incomplete + actions: Incomplete + lastSourceRow: Incomplete + patchInSourceRow: Incomplete + patchInDestCol: Incomplete + destInPatchCol: Incomplete + patchInSourceCol: Incomplete + sourceInPatchCol: Incomplete + indexes: Incomplete + rcOffset: Incomplete + cellInfo: Incomplete + rowInfo: Incomplete + cmods: Incomplete + mods: Incomplete + payloadTop: Incomplete + payloadCol: Incomplete + currentRow: Incomplete + modifier: Incomplete + headerMove: Incomplete + headerRename: Incomplete + headerPost: Incomplete + headerPre: Incomplete + header: Incomplete + csv: Incomplete + source: Incomplete + patch: Incomplete + flags: Incomplete + view: Incomplete + sourceView: Incomplete + meta: Incomplete + def __init__(self, source, patch, flags: Incomplete | None = ...) -> None: ... + def reset(self): ... + def processMeta(self) -> None: ... + def apply(self): ... + def needSourceColumns(self) -> None: ... + def needDestColumns(self) -> None: ... + def needSourceIndex(self) -> None: ... + def setMetaProp(self, target, column_name, prop_name, value) -> None: ... + def applyMetaRow(self, code) -> None: ... + def applyRow(self, r): ... + def getDatum(self, c): ... + def getString(self, c): ... + def getStringNull(self, c): ... + def applyMeta(self) -> None: ... + def applyHeader(self) -> None: ... + def lookUp(self, _hx_del: Incomplete | None = ...): ... + def applyActionExternal(self, code) -> None: ... + def applyAction(self, code): ... + def checkAct(self) -> None: ... + def getPreString(self, txt): ... + def getRowString(self, c): ... + def isPreamble(self): ... + def sortMods(self, a, b): ... + def processMods(self, rmods, fate, _hx_len): ... + def useMetaForColumnChanges(self): ... + def useMetaForRowChanges(self): ... + def computeOrdering(self, mods, permutation, permutationRev, dim) -> None: ... + def permuteRows(self) -> None: ... + def fillInNewColumns(self) -> None: ... + def finishRows(self) -> None: ... + def permuteColumns(self) -> None: ... + def finishColumns(self): ... + +class HighlightPatchUnit: + add: bool + rem: bool + update: bool + sourceRow: int + sourceRowOffset: int + sourcePrevRow: int + sourceNextRow: int + destRow: int + patchRow: int + code: str + def __init__(self) -> None: ... + def toString(self): ... + +class Index: + indexed_table: Incomplete + v: Incomplete + items: Incomplete + cols: Incomplete + keys: Incomplete + top_freq: int + height: int + hdr: int + ignore_whitespace: bool + ignore_case: bool + def __init__(self, flags) -> None: ... + def addColumn(self, i) -> None: ... + def indexTable(self, t, hdr) -> None: ... + def toKey(self, t, i): ... + def toKeyByContent(self, row): ... + def getTable(self): ... + +class IndexItem: + lst: Incomplete + def __init__(self) -> None: ... + def add(self, i): ... + def length(self): ... + def value(self): ... + def asList(self): ... + +class IndexPair: + flags: Incomplete + ia: Incomplete + ib: Incomplete + quality: int + hdr: int + def __init__(self, flags) -> None: ... + def addColumns(self, ca, cb) -> None: ... + def indexTables(self, a, b, hdr) -> None: ... + def queryByKey(self, ka): ... + def queryByContent(self, row): ... + def queryLocal(self, row): ... + def localKey(self, row): ... + def remoteKey(self, row): ... + def getTopFreq(self): ... + def getQuality(self): ... + +class Meta: ... + +class JsonTable: + name: Incomplete + idx2col: Incomplete + h: Incomplete + w: Incomplete + data: Incomplete + columns: Incomplete + rows: Incomplete + def __init__(self, data, name) -> None: ... + def getTable(self): ... + def get_width(self): ... + def get_height(self): ... + def getCell(self, x, y): ... + def setCell(self, x, y, c) -> None: ... + def toString(self): ... + def getCellView(self): ... + def isResizable(self): ... + def resize(self, w, h): ... + def clear(self) -> None: ... + def insertOrDeleteRows(self, fate, hfate): ... + def insertOrDeleteColumns(self, fate, wfate): ... + def trimBlank(self): ... + def getData(self) -> None: ... + def clone(self) -> None: ... + def setMeta(self, meta) -> None: ... + def getMeta(self): ... + def create(self) -> None: ... + def alterColumns(self, columns): ... + def changeRow(self, rc): ... + def applyFlags(self, flags): ... + def asTable(self) -> None: ... + def cloneMeta(self, table: Incomplete | None = ...) -> None: ... + def useForColumnChanges(self): ... + def useForRowChanges(self): ... + def getRowStream(self) -> None: ... + def isNested(self): ... + def isSql(self): ... + def getName(self): ... + +class JsonTables: + flags: Incomplete + db: Incomplete + t: Incomplete + def __init__(self, json, flags) -> None: ... + def getCell(self, x, y): ... + def setCell(self, x, y, c) -> None: ... + def getCellView(self): ... + def isResizable(self): ... + def resize(self, w, h): ... + def clear(self) -> None: ... + def insertOrDeleteRows(self, fate, hfate): ... + def insertOrDeleteColumns(self, fate, wfate): ... + def trimBlank(self): ... + def get_width(self): ... + def get_height(self): ... + def getData(self) -> None: ... + def clone(self) -> None: ... + def getMeta(self): ... + def create(self) -> None: ... + +class Lambda: + @staticmethod + def array(it): ... + @staticmethod + def has(it, elt): ... + +class Merger: + conflict_infos: Incomplete + conflicts: Incomplete + column_mix_remote: Incomplete + column_mix_local: Incomplete + row_mix_remote: Incomplete + row_mix_local: Incomplete + column_units: Incomplete + column_order: Incomplete + units: Incomplete + order: Incomplete + parent: Incomplete + local: Incomplete + remote: Incomplete + flags: Incomplete + def __init__(self, parent, local, remote, flags) -> None: ... + def shuffleDimension(self, dim_units, _hx_len, fate, cl, cr): ... + def shuffleColumns(self) -> None: ... + def shuffleRows(self) -> None: ... + def apply(self): ... + def getConflictInfos(self): ... + def addConflictInfo(self, row, col, view, pcell, lcell, rcell) -> None: ... + @staticmethod + def makeConflictedCell(view, pcell, lcell, rcell): ... + +class Mover: + @staticmethod + def moveUnits(units): ... + @staticmethod + def move(isrc, idest): ... + @staticmethod + def moveWithoutExtras(src, dest): ... + +class Ndjson: + columns: Incomplete + tab: Incomplete + view: Incomplete + header_row: int + def __init__(self, tab) -> None: ... + def renderRow(self, r): ... + def render(self): ... + def addRow(self, r, txt) -> None: ... + def addHeaderRow(self, r) -> None: ... + def parse(self, txt) -> None: ... + +class NestedCellBuilder: + view: Incomplete + def __init__(self) -> None: ... + def needSeparator(self): ... + def setSeparator(self, separator) -> None: ... + def setConflictSeparator(self, separator) -> None: ... + def setView(self, view) -> None: ... + def update(self, local, remote): ... + def conflict(self, parent, local, remote): ... + def marker(self, label): ... + def negToNull(self, x): ... + def links(self, unit, row_like): ... + +class Ordering: + order: Incomplete + ignore_parent: bool + def __init__(self) -> None: ... + def add(self, l, r, p: Incomplete | None = ...) -> None: ... + def getList(self): ... + def setList(self, lst) -> None: ... + def toString(self): ... + def ignoreParent(self) -> None: ... + +class PropertyChange: + val: Incomplete + name: Incomplete + prevName: Incomplete + def __init__(self) -> None: ... + +class Reflect: + @staticmethod + def field(o, field): ... + @staticmethod + def isFunction(f): ... + @staticmethod + def compare(a, b): ... + +class RowChange: + action: Incomplete + is_key: Incomplete + conflicted: Incomplete + conflicting_parent_val: Incomplete + conflicting_val: Incomplete + val: Incomplete + cond: Incomplete + def __init__(self) -> None: ... + def showMap(self, m): ... + def toString(self): ... + +class RowStream: ... + +class SimpleMeta: + may_be_nested: Incomplete + row_change_cache: Incomplete + row_active: Incomplete + keys: Incomplete + metadata: Incomplete + has_properties: Incomplete + name2col: Incomplete + name2row: Incomplete + t: Incomplete + def __init__( + self, t, has_properties: Incomplete | None = ..., may_be_nested: Incomplete | None = ... + ) -> None: ... + def storeRowChanges(self, changes) -> None: ... + def rowChange(self) -> None: ... + def colChange(self) -> None: ... + def col(self, key): ... + def row(self, key): ... + def alterColumns(self, columns): ... + def setCell(self, c, r, val): ... + def addMetaData(self, column, property, val) -> None: ... + def asTable(self): ... + def cloneMeta(self, table: Incomplete | None = ...): ... + def useForColumnChanges(self): ... + def useForRowChanges(self): ... + def changeRow(self, rc): ... + def applyFlags(self, flags): ... + def getRowStream(self): ... + def isNested(self): ... + def isSql(self): ... + def getName(self) -> None: ... + +class SimpleTable: + data: Incomplete + w: Incomplete + h: Incomplete + meta: Incomplete + def __init__(self, w, h) -> None: ... + def getTable(self): ... + def get_width(self): ... + def get_height(self): ... + def getCell(self, x, y): ... + def setCell(self, x, y, c) -> None: ... + def toString(self): ... + def getCellView(self): ... + def isResizable(self): ... + def resize(self, w, h): ... + def clear(self) -> None: ... + def insertOrDeleteRows(self, fate, hfate): ... + def insertOrDeleteColumns(self, fate, wfate): ... + def trimBlank(self): ... + def getData(self) -> None: ... + def clone(self): ... + def create(self): ... + def setMeta(self, meta) -> None: ... + def getMeta(self): ... + @staticmethod + def tableToString(tab): ... + @staticmethod + def tableIsSimilar(tab1, tab2): ... + +class View: ... + +class SimpleView: + def __init__(self) -> None: ... + def toString(self, d): ... + def equals(self, d1, d2): ... + def toDatum(self, x): ... + def makeHash(self): ... + def hashSet(self, h, _hx_str, d) -> None: ... + def hashExists(self, h, _hx_str): ... + def hashGet(self, h, _hx_str): ... + def isHash(self, h): ... + def isTable(self, t): ... + def getTable(self, t): ... + def wrapTable(self, t): ... + +class SparseSheet: + zero: Incomplete + row: Incomplete + w: int + h: Incomplete + def __init__(self) -> None: ... + def resize(self, w, h, zero) -> None: ... + def nonDestructiveResize(self, w, h, zero) -> None: ... + def get(self, x, y): ... + def set(self, x, y, val) -> None: ... + +class SqlColumn: + name: str + primary: bool + type_value: Incomplete + type_family: Incomplete + def __init__(self) -> None: ... + def setName(self, name) -> None: ... + def setPrimaryKey(self, primary) -> None: ... + def setType(self, value, family) -> None: ... + def getName(self): ... + def isPrimaryKey(self): ... + def toString(self): ... + +class SqlCompare: + needed: Incomplete + alt_peered: Incomplete + peered: Incomplete + diff_ct: Incomplete + at2: Incomplete + at1: Incomplete + at0: Incomplete + db: Incomplete + local: Incomplete + remote: Incomplete + alt: Incomplete + align: Incomplete + flags: Incomplete + def __init__( + self, + db, + local, + remote, + alt, + align: Incomplete | None = ..., + flags: Incomplete | None = ..., + ) -> None: ... + def equalArray(self, a1, a2): ... + def validateSchema(self): ... + def denull(self, x): ... + def link(self) -> None: ... + def linkQuery(self, query, order) -> None: ... + def where(self, txt): ... + def scanColumns(self, all_cols1, all_cols2, key_cols, present1, present2, align) -> None: ... + def apply(self): ... + +class SqlDatabase: ... +class SqlHelper: ... + +class SqlTable: + columnNames: Incomplete + quotedTableName: Incomplete + columns: Incomplete + db: Incomplete + name: Incomplete + helper: Incomplete + cache: Incomplete + h: int + id2rid: Incomplete + def __init__(self, db, name, helper: Incomplete | None = ...) -> None: ... + def getColumns(self) -> None: ... + def getPrimaryKey(self): ... + def getAllButPrimaryKey(self): ... + def getColumnNames(self): ... + def getQuotedTableName(self): ... + def getQuotedColumnName(self, name): ... + def getCell(self, x, y): ... + def setCellCache(self, x, y, c) -> None: ... + def setCell(self, x, y, c) -> None: ... + def getCellView(self): ... + def isResizable(self): ... + def resize(self, w, h): ... + def clear(self) -> None: ... + def insertOrDeleteRows(self, fate, hfate): ... + def insertOrDeleteColumns(self, fate, wfate): ... + def trimBlank(self): ... + def get_width(self): ... + def get_height(self): ... + def getData(self) -> None: ... + def clone(self) -> None: ... + def create(self) -> None: ... + def getMeta(self): ... + def alterColumns(self, columns): ... + def changeRow(self, rc): ... + def asTable(self): ... + def useForColumnChanges(self): ... + def useForRowChanges(self): ... + def cloneMeta(self, table: Incomplete | None = ...) -> None: ... + def applyFlags(self, flags): ... + def getDatabase(self): ... + def getRowStream(self): ... + def isNested(self): ... + def isSql(self): ... + def fetchRow(self): ... + def fetchColumns(self): ... + def getName(self): ... + +class SqlTableName: + name: Incomplete + prefix: Incomplete + def __init__(self, name: Incomplete | None = ..., prefix: Incomplete | None = ...) -> None: ... + def toString(self): ... + +class SqlTables: + flags: Incomplete + t: Incomplete + db: Incomplete + def __init__(self, db, flags, role) -> None: ... + def getCell(self, x, y): ... + def setCell(self, x, y, c) -> None: ... + def getCellView(self): ... + def isResizable(self): ... + def resize(self, w, h): ... + def clear(self) -> None: ... + def insertOrDeleteRows(self, fate, hfate): ... + def insertOrDeleteColumns(self, fate, wfate): ... + def trimBlank(self): ... + def get_width(self): ... + def get_height(self): ... + def getData(self) -> None: ... + def clone(self) -> None: ... + def create(self) -> None: ... + def getMeta(self): ... + +class SqliteHelper: + def __init__(self) -> None: ... + def getTableNames(self, db): ... + def countRows(self, db, name): ... + def getRowIDs(self, db, name): ... + def update(self, db, name, conds, vals): ... + def delete(self, db, name, conds): ... + def insert(self, db, name, vals): ... + def attach(self, db, tag, resource_name): ... + def columnListSql(self, x): ... + def fetchSchema(self, db, name): ... + def splitSchema(self, db, name, sql): ... + def alterColumns(self, db, name, columns): ... + +class Std: + @staticmethod + def isOfType(v, t): ... + @staticmethod + def string(s): ... + @staticmethod + def parseInt(x): ... + @staticmethod + def shortenPossibleNumber(x): ... + @staticmethod + def parseFloat(x): ... + +class Float: ... +class Int: ... +class Bool: ... +class Dynamic: ... + +class StringBuf: + b: Incomplete + def __init__(self) -> None: ... + def get_length(self): ... + +class StringTools: + @staticmethod + def htmlEscape(s, quotes: Incomplete | None = ...): ... + @staticmethod + def isSpace(s, pos): ... + @staticmethod + def ltrim(s): ... + @staticmethod + def rtrim(s): ... + @staticmethod + def trim(s): ... + @staticmethod + def lpad(s, c, l): ... + @staticmethod + def replace(s, sub, by): ... + +class sys_FileSystem: + @staticmethod + def exists(path): ... + +class haxe_IMap: ... + +class haxe_ds_StringMap: + h: Incomplete + def __init__(self) -> None: ... + def keys(self): ... + def iterator(self): ... + +class python_HaxeIterator: + checked: bool + has: bool + x: Incomplete + it: Incomplete + def __init__(self, it) -> None: ... + def __next__(self): ... + def next(self): ... + def hasNext(self): ... + +class Sys: + @staticmethod + def exit(code) -> None: ... + @staticmethod + def args(): ... + @staticmethod + def getEnv(s): ... + @staticmethod + def command(cmd, args: Incomplete | None = ...): ... + @staticmethod + def stdout(): ... + @staticmethod + def stderr(): ... + +class TableComparisonState: + child_order: Incomplete + children: Incomplete + alignment: Incomplete + b_meta: Incomplete + a_meta: Incomplete + p_meta: Incomplete + compare_flags: Incomplete + has_same_columns_known: Incomplete + has_same_columns: Incomplete + is_equal_known: Incomplete + is_equal: Incomplete + run_to_completion: Incomplete + completed: Incomplete + b: Incomplete + a: Incomplete + p: Incomplete + def __init__(self) -> None: ... + def reset(self) -> None: ... + def getMeta(self) -> None: ... + +class TableDiff: + nesting_present: Incomplete + nested: Incomplete + column_units_updated: Incomplete + col_reorders: Incomplete + col_renames: Incomplete + col_updates: Incomplete + col_inserts: Incomplete + col_deletes: Incomplete + row_reorders: Incomplete + row_updates: Incomplete + row_inserts: Incomplete + row_deletes: Incomplete + schema_diff_found: Incomplete + diff_found: Incomplete + publish: Incomplete + act: Incomplete + have_addition: Incomplete + top_line_done: Incomplete + have_schema: Incomplete + schema: Incomplete + conflict_sep: Incomplete + sep: Incomplete + v: Incomplete + allow_column: Incomplete + allow_update: Incomplete + allow_delete: Incomplete + allow_insert: Incomplete + active_column: Incomplete + active_row: Incomplete + col_moves: Incomplete + row_moves: Incomplete + show_rc_numbers: Incomplete + column_units: Incomplete + row_units: Incomplete + order: Incomplete + is_index_b: Incomplete + is_index_a: Incomplete + is_index_p: Incomplete + rb_header: Incomplete + ra_header: Incomplete + rp_header: Incomplete + p: Incomplete + b: Incomplete + a: Incomplete + has_parent: Incomplete + col_map: Incomplete + row_map: Incomplete + align: Incomplete + flags: Incomplete + builder: Incomplete + preserve_columns: bool + def __init__(self, align, flags) -> None: ... + def setCellBuilder(self, builder) -> None: ... + def getSeparator(self, t, t2, root): ... + def isReordered(self, m, ct): ... + def spreadContext(self, units, _hx_del, active) -> None: ... + def setIgnore(self, ignore, idx_ignore, tab, r_header) -> None: ... + def countActive(self, active): ... + def reset(self): ... + def setupTables(self) -> None: ... + def scanActivity(self) -> None: ... + def setupColumns(self) -> None: ... + def setupMoves(self) -> None: ... + def scanSchema(self) -> None: ... + def checkRcNumbers(self, w, h) -> None: ... + def addRcNumbers(self, output): ... + def elideColumns(self, output, admin_w) -> None: ... + def addSchema(self, output) -> None: ... + def addHeader(self, output) -> None: ... + def checkMeta(self, t, meta): ... + def getMetaTable(self, t): ... + def addMeta(self, output): ... + def refineActivity(self) -> None: ... + def normalizeString(self, v, _hx_str): ... + def isEqual(self, v, aa, bb): ... + def checkNesting(self, v, have_ll, ll, have_rr, rr, have_pp, pp, x, y): ... + def scanRow(self, unit, output, at, i, out) -> None: ... + def hilite(self, output): ... + def hiliteSingle(self, output): ... + def hiliteWithNesting(self, output): ... + def hasDifference(self): ... + def hasSchemaDifference(self): ... + def isNested(self): ... + def getComparisonState(self): ... + def getSummary(self): ... + +class TableIO: + def __init__(self) -> None: ... + def valid(self): ... + def getContent(self, name): ... + def saveContent(self, name, txt): ... + def args(self): ... + def writeStdout(self, txt) -> None: ... + def writeStderr(self, txt) -> None: ... + def command(self, cmd, args): ... + def hasAsync(self): ... + def exists(self, path): ... + def isTtyKnown(self): ... + def isTty(self): ... + def openSqliteDatabase(self, path): ... + def sendToBrowser(self, html) -> None: ... + +class TableModifier: + t: Incomplete + def __init__(self, t) -> None: ... + def removeColumn(self, at): ... + +class TableStream: + row: Incomplete + columns: Incomplete + t: Incomplete + at: int + h: Incomplete + src: Incomplete + def __init__(self, t) -> None: ... + def fetchColumns(self): ... + def fetchRow(self): ... + def fetch(self): ... + def getCell(self, x): ... + def width(self): ... + +class Tables: + alignment: Incomplete + template: Incomplete + tables: Incomplete + table_order: Incomplete + def __init__(self, template) -> None: ... + def add(self, name): ... + def getOrder(self): ... + def get(self, name): ... + def one(self): ... + def hasInsDel(self): ... + +class TerminalDiffRender: + v: Incomplete + csv: Incomplete + t: Incomplete + codes: Incomplete + align_columns: bool + wide_columns: bool + use_glyphs: bool + flags: Incomplete + delim: Incomplete + diff: Incomplete + def __init__( + self, + flags: Incomplete | None = ..., + delim: Incomplete | None = ..., + diff: Incomplete | None = ..., + ) -> None: ... + def alignColumns(self, enable) -> None: ... + def render(self, t): ... + def getText(self, x, y, color): ... + def pickSizes(self, t): ... + +class ValueType(Enum): + @staticmethod + def TClass(c): ... + @staticmethod + def TEnum(e): ... + +class Type: + @staticmethod + def getClass(o): ... + @staticmethod + def typeof(v): ... + +class Unit: + l: Incomplete + r: Incomplete + p: Incomplete + def __init__( + self, l: Incomplete | None = ..., r: Incomplete | None = ..., p: Incomplete | None = ... + ) -> None: ... + def lp(self): ... + def toString(self): ... + def fromString(self, txt): ... + def base26(self, num): ... + def toBase26String(self): ... + @staticmethod + def describe(i): ... + +class Viterbi: + path: Incomplete + src: Incomplete + cost: Incomplete + best_cost: Incomplete + path_valid: Incomplete + mode: Incomplete + index: Incomplete + T: int + K: Incomplete + def __init__(self) -> None: ... + def reset(self) -> None: ... + def setSize(self, states, sequence_length) -> None: ... + def assertMode(self, next) -> None: ... + def addTransition(self, s0, s1, c) -> None: ... + def endTransitions(self) -> None: ... + def beginTransitions(self) -> None: ... + def calculatePath(self) -> None: ... + def toString(self): ... + def length(self): ... + def get(self, i): ... + def getCost(self): ... + +class haxe_Exception(Exception): + def __init__( + self, message, previous: Incomplete | None = ..., native: Incomplete | None = ... + ) -> None: ... + def unwrap(self): ... + def get_native(self): ... + @staticmethod + def caught(value): ... + @staticmethod + def thrown(value): ... + +class haxe_NativeStackTrace: + @staticmethod + def saveStack(exception) -> None: ... + @staticmethod + def exceptionStack(): ... + +class haxe_ValueException(haxe_Exception): + value: Incomplete + def __init__( + self, value, previous: Incomplete | None = ..., native: Incomplete | None = ... + ) -> None: ... + def unwrap(self): ... + +class haxe_ds_IntMap: + h: Incomplete + def __init__(self) -> None: ... + def set(self, key, value) -> None: ... + def remove(self, key): ... + def keys(self): ... + def toString(self): ... + +class haxe_format_JsonPrinter: + replacer: Incomplete + indent: Incomplete + pretty: Incomplete + nind: int + buf: Incomplete + def __init__(self, replacer, space) -> None: ... + def write(self, k, v) -> None: ... + def classString(self, v) -> None: ... + def fieldsString(self, v, fields) -> None: ... + def quote(self, s) -> None: ... + @staticmethod + def print(o, replacer: Incomplete | None = ..., space: Incomplete | None = ...): ... + +class haxe_io_Bytes: + length: Incomplete + b: Incomplete + def __init__(self, length, b) -> None: ... + @staticmethod + def ofString(s, encoding: Incomplete | None = ...): ... + +class haxe_io_Encoding(Enum): ... + +class haxe_io_Error(Enum): + @staticmethod + def Custom(e): ... + +class haxe_io_Output: + def writeByte(self, c) -> None: ... + def writeBytes(self, s, pos, _hx_len): ... + bigEndian: Incomplete + def set_bigEndian(self, b): ... + def writeFullBytes(self, s, pos, _hx_len) -> None: ... + def writeString(self, s, encoding: Incomplete | None = ...) -> None: ... + +class haxe_iterators_ArrayIterator: + current: int + array: Incomplete + def __init__(self, array) -> None: ... + def hasNext(self): ... + def __next__(self): ... + def next(self): ... + +class haxe_iterators_ArrayKeyValueIterator: + current: int + array: Incomplete + def __init__(self, array) -> None: ... + def hasNext(self): ... + def __next__(self): ... + def next(self): ... + +class python_Boot: + @staticmethod + def toString1(o, s): ... + @staticmethod + def fields(o): ... + @staticmethod + def simpleField(o, field): ... + @staticmethod + def hasField(o, field): ... + @staticmethod + def field(o, field): ... + @staticmethod + def getInstanceFields(c): ... + @staticmethod + def getSuperClass(c): ... + @staticmethod + def getClassFields(c): ... + @staticmethod + def unhandleKeywords(name): ... + +class python__KwArgs_KwArgs_Impl_: + @staticmethod + def fromT(d): ... + +class python_Lib: + @staticmethod + def dictToAnon(v): ... + @staticmethod + def anonToDict(o): ... + @staticmethod + def anonAsDict(o): ... + +class python_internal_ArrayImpl: + @staticmethod + def concat(a1, a2): ... + @staticmethod + def copy(x): ... + @staticmethod + def iterator(x): ... + @staticmethod + def keyValueIterator(x): ... + @staticmethod + def indexOf(a, x, fromIndex: Incomplete | None = ...): ... + @staticmethod + def lastIndexOf(a, x, fromIndex: Incomplete | None = ...): ... + @staticmethod + def join(x, sep): ... + @staticmethod + def toString(x): ... + @staticmethod + def pop(x): ... + @staticmethod + def push(x, e): ... + @staticmethod + def unshift(x, e) -> None: ... + @staticmethod + def remove(x, e): ... + @staticmethod + def contains(x, e): ... + @staticmethod + def shift(x): ... + @staticmethod + def slice(x, pos, end: Incomplete | None = ...): ... + @staticmethod + def sort(x, f) -> None: ... + @staticmethod + def splice(x, pos, _hx_len): ... + @staticmethod + def map(x, f): ... + @staticmethod + def filter(x, f): ... + @staticmethod + def insert(a, pos, x) -> None: ... + @staticmethod + def reverse(a) -> None: ... + +class HxOverrides: + @staticmethod + def iterator(x): ... + @staticmethod + def eq(a, b): ... + @staticmethod + def stringOrNull(s): ... + @staticmethod + def modf(a, b): ... + @staticmethod + def mod(a, b): ... + @staticmethod + def mapKwArgs(a, v): ... + +class python_internal_MethodClosure: + obj: Incomplete + func: Incomplete + def __init__(self, obj, func) -> None: ... + def __call__(self, *args): ... + +class HxString: + @staticmethod + def split(s, d): ... + @staticmethod + def charCodeAt(s, index): ... + @staticmethod + def charAt(s, index): ... + @staticmethod + def lastIndexOf(s, _hx_str, startIndex: Incomplete | None = ...): ... + @staticmethod + def toUpperCase(s): ... + @staticmethod + def toLowerCase(s): ... + @staticmethod + def indexOf(s, _hx_str, startIndex: Incomplete | None = ...): ... + @staticmethod + def indexOfImpl(s, _hx_str, startIndex): ... + @staticmethod + def toString(s): ... + @staticmethod + def substring(s, startIndex, endIndex: Incomplete | None = ...): ... + @staticmethod + def substr(s, startIndex, _hx_len: Incomplete | None = ...): ... + +class python_io_NativeOutput(haxe_io_Output): + stream: Incomplete + def __init__(self, stream) -> None: ... + +class python_io_IOutput: ... +class python_io_IFileOutput: ... + +class python_io_NativeTextOutput(python_io_NativeOutput): + def __init__(self, stream) -> None: ... + def writeBytes(self, s, pos, _hx_len): ... + def writeByte(self, c) -> None: ... + +class python_io_FileTextOutput(python_io_NativeTextOutput): + def __init__(self, stream) -> None: ... + +class python_io_IoTools: + @staticmethod + def createFileOutputFromText(t): ... + +class sys_io_File: + @staticmethod + def getContent(path): ... + @staticmethod + def saveContent(path, content) -> None: ... + +class sys_io_FileOutput(haxe_io_Output): + impl: Incomplete + def __init__(self, impl) -> None: ... + def set_bigEndian(self, b): ... + def writeByte(self, c) -> None: ... + def writeBytes(self, s, pos, _hx_len): ... + def writeFullBytes(self, s, pos, _hx_len) -> None: ... + def writeString(self, s, encoding: Incomplete | None = ...) -> None: ... + +class PythonCellView(View): + def __init__(self) -> None: ... + def toString(self, d): ... + def equals(self, d1, d2): ... + def toDatum(self, d): ... + def makeHash(self): ... + def isHash(self, d): ... + def hashSet(self, d, k, v) -> None: ... + def hashGet(self, d, k): ... + def hashExists(self, d, k): ... + +class PythonTableView(Table): + data: Incomplete + height: Incomplete + width: int + def __init__(self, data) -> None: ... + def get_width(self): ... + def get_height(self): ... + def getCell(self, x, y): ... + def setCell(self, x, y, c) -> None: ... + def toString(self): ... + def getCellView(self): ... + def isResizable(self): ... + def resize(self, w, h): ... + def clear(self) -> None: ... + def trimBlank(self): ... + def getData(self): ... + def insertOrDeleteRows(self, fate, hfate): ... + def insertOrDeleteColumns(self, fate, wfate): ... + def isSimilar(self, alt): ... + def clone(self): ... + def create(self): ... + def getMeta(self) -> None: ... + +class SqliteDatabase(SqlDatabase): + db: Incomplete + fname: Incomplete + cursor: Incomplete + row: Incomplete + quoter: Incomplete + view: Incomplete + def __init__(self, db, fname) -> None: ... + def getQuotedColumnName(self, name): ... + def getQuotedTableName(self, name): ... + def getColumns(self, name): ... + def begin(self, query, args=..., order=...): ... + def beginRow(self, tab, row, order=...): ... + def read(self): ... + def get(self, index): ... + def end(self) -> None: ... + def rowid(self): ... + def getHelper(self): ... + def getNameForAttachment(self): ... + +def get_stdout(): ... +def stream_write(s): ... +def main() -> None: ... diff --git a/third-party-stubs/isodate/__init__.pyi b/third-party-stubs/isodate/__init__.pyi deleted file mode 100644 index 96b67c34a0d..00000000000 --- a/third-party-stubs/isodate/__init__.pyi +++ /dev/null @@ -1,4 +0,0 @@ -import datetime - -def parse_datetime(datetimestring: str): - datetime.datetime diff --git a/third-party-stubs/logbook/__init__.pyi b/third-party-stubs/logbook/__init__.pyi deleted file mode 100644 index a0952ff7c5f..00000000000 --- a/third-party-stubs/logbook/__init__.pyi +++ /dev/null @@ -1,65 +0,0 @@ -# Stubs for logbook (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -from .__version__ import __version__ -from .base import ( - CRITICAL as CRITICAL, - DEBUG as DEBUG, - ERROR as ERROR, - Flags as Flags, - INFO as INFO, - LogRecord as LogRecord, - Logger as Logger, - LoggerGroup as LoggerGroup, - NOTICE as NOTICE, - NOTSET as NOTSET, - NestedSetup as NestedSetup, - Processor as Processor, - TRACE as TRACE, - WARNING as WARNING, - dispatch_record as dispatch_record, - get_level_name as get_level_name, - lookup_level as lookup_level, - set_datetime_format as set_datetime_format, -) -from .handlers import ( - BrotliCompressionHandler as BrotliCompressionHandler, - FileHandler as FileHandler, - FingersCrossedHandler as FingersCrossedHandler, - GMailHandler as GMailHandler, - GZIPCompressionHandler as GZIPCompressionHandler, - GroupHandler as GroupHandler, - Handler as Handler, - HashingHandlerMixin as HashingHandlerMixin, - LimitingHandlerMixin as LimitingHandlerMixin, - MailHandler as MailHandler, - MonitoringFileHandler as MonitoringFileHandler, - NTEventLogHandler as NTEventLogHandler, - NullHandler as NullHandler, - RotatingFileHandler as RotatingFileHandler, - StderrHandler as StderrHandler, - StreamHandler as StreamHandler, - StringFormatter as StringFormatter, - StringFormatterHandlerMixin as StringFormatterHandlerMixin, - SyslogHandler as SyslogHandler, - TestHandler as TestHandler, - TimedRotatingFileHandler as TimedRotatingFileHandler, - WrapperHandler as WrapperHandler, - create_syshandler as create_syshandler, -) -from . import compat as compat -from typing import Any - -trace: Any -debug: Any -info: Any -warn: Any -warning: Any -notice: Any -error: Any -exception: Any -catch_exceptions: Any -critical: Any -log: Any -default_handler: Any diff --git a/third-party-stubs/logbook/__version__.pyi b/third-party-stubs/logbook/__version__.pyi deleted file mode 100644 index e5b7a06e5ee..00000000000 --- a/third-party-stubs/logbook/__version__.pyi +++ /dev/null @@ -1,5 +0,0 @@ -# Stubs for logbook.__version__ (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -__version__: str diff --git a/third-party-stubs/logbook/_fallback.pyi b/third-party-stubs/logbook/_fallback.pyi deleted file mode 100644 index 0e2b32f6edf..00000000000 --- a/third-party-stubs/logbook/_fallback.pyi +++ /dev/null @@ -1,40 +0,0 @@ -# Stubs for logbook._fallback (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -from typing import Any - -def group_reflected_property(name: Any, default: Any, fallback: Any = ...): ... - -class _StackBound: - def __init__(self, obj: Any, push: Any, pop: Any) -> None: ... - def __enter__(self): ... - def __exit__(self, exc_type: Any, exc_value: Any, tb: Any) -> None: ... - -class StackedObject: - def push_greenlet(self) -> None: ... - def pop_greenlet(self) -> None: ... - def push_context(self) -> None: ... - def pop_context(self) -> None: ... - def push_thread(self) -> None: ... - def pop_thread(self) -> None: ... - def push_application(self) -> None: ... - def pop_application(self) -> None: ... - def __enter__(self): ... - def __exit__(self, exc_type: Any, exc_value: Any, tb: Any) -> None: ... - def greenletbound(self, _cls: Any = ...): ... - def contextbound(self, _cls: Any = ...): ... - def threadbound(self, _cls: Any = ...): ... - def applicationbound(self, _cls: Any = ...): ... - -class ContextStackManager: - def __init__(self) -> None: ... - def iter_context_objects(self): ... - def push_greenlet(self, obj: Any) -> None: ... - def pop_greenlet(self): ... - def push_context(self, obj: Any) -> None: ... - def pop_context(self): ... - def push_thread(self, obj: Any) -> None: ... - def pop_thread(self): ... - def push_application(self, obj: Any) -> None: ... - def pop_application(self): ... diff --git a/third-party-stubs/logbook/_termcolors.pyi b/third-party-stubs/logbook/_termcolors.pyi deleted file mode 100644 index 7284b777785..00000000000 --- a/third-party-stubs/logbook/_termcolors.pyi +++ /dev/null @@ -1,13 +0,0 @@ -# Stubs for logbook._termcolors (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -from typing import Any - -esc: str -codes: Any -dark_colors: Any -light_colors: Any -x: int - -def colorize(color_key: Any, text: Any): ... diff --git a/third-party-stubs/logbook/base.pyi b/third-party-stubs/logbook/base.pyi deleted file mode 100644 index ed769dd1698..00000000000 --- a/third-party-stubs/logbook/base.pyi +++ /dev/null @@ -1,184 +0,0 @@ -# Stubs for logbook.base (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -from logbook._fallback import StackedObject -from typing import Any, Optional - -def set_datetime_format(datetime_format: Any) -> None: ... - -CRITICAL: int -ERROR: int -WARNING: int -NOTICE: int -INFO: int -DEBUG: int -TRACE: int -NOTSET: int - -def level_name_property(): ... -def lookup_level(level: Any): ... -def get_level_name(level: Any): ... - -class _ExceptionCatcher: - logger: Any = ... - args: Any = ... - kwargs: Any = ... - def __init__(self, logger: Any, args: Any, kwargs: Any) -> None: ... - def __enter__(self): ... - def __exit__(self, exc_type: Any, exc_value: Any, tb: Any): ... - -class ContextObject(StackedObject): - stack_manager: Any = ... - def push_greenlet(self) -> None: ... - def pop_greenlet(self) -> None: ... - def push_context(self) -> None: ... - def pop_context(self) -> None: ... - def push_thread(self) -> None: ... - def pop_thread(self) -> None: ... - def push_application(self) -> None: ... - def pop_application(self) -> None: ... - -class NestedSetup(StackedObject): - objects: Any = ... - def __init__(self, objects: Optional[Any] = ...) -> None: ... - def push_application(self) -> None: ... - def pop_application(self) -> None: ... - def push_thread(self) -> None: ... - def pop_thread(self) -> None: ... - def push_greenlet(self) -> None: ... - def pop_greenlet(self) -> None: ... - def push_context(self) -> None: ... - def pop_context(self) -> None: ... - -class Processor(ContextObject): - stack_manager: Any = ... - callback: Any = ... - def __init__(self, callback: Optional[Any] = ...) -> None: ... - def process(self, record: Any) -> None: ... - -class _InheritedType: - def __reduce__(self): ... - -Inherit: Any - -class Flags(ContextObject): - stack_manager: Any = ... - def __init__(self, **flags: Any) -> None: ... - @staticmethod - def get_flag(flag: Any, default: Optional[Any] = ...): ... - -class LogRecord: - keep_open: bool = ... - time: Any = ... - heavy_initialized: bool = ... - late: bool = ... - information_pulled: bool = ... - channel: Any = ... - msg: Any = ... - args: Any = ... - kwargs: Any = ... - level: Any = ... - exc_info: Any = ... - extra: Any = ... - frame: Any = ... - frame_correction: Any = ... - process: int = ... - def __init__( - self, - channel: Any, - level: Any, - msg: Any, - args: Optional[Any] = ..., - kwargs: Optional[Any] = ..., - exc_info: Optional[Any] = ..., - extra: Optional[Any] = ..., - frame: Optional[Any] = ..., - dispatcher: Optional[Any] = ..., - frame_correction: int = ..., - ) -> None: ... - def heavy_init(self) -> None: ... - def pull_information(self) -> None: ... - def close(self) -> None: ... - def __reduce_ex__(self, protocol: Any): ... - def to_dict(self, json_safe: bool = ...): ... - @classmethod - def from_dict(cls, d: Any): ... - def update_from_dict(self, d: Any): ... - def message(self): ... - level_name: Any = ... - def calling_frame(self): ... - def func_name(self): ... - def module(self): ... - def filename(self): ... - def lineno(self): ... - def greenlet(self): ... - def thread(self): ... - @property - def thread_name(self) -> str: ... - def process_name(self): ... - @property - def formatted_exception(self) -> Optional[str]: ... - def exception_name(self): ... - @property - def exception_shortname(self): ... - def exception_message(self): ... - @property - def dispatcher(self): ... - -class LoggerMixin: - level_name: Any = ... - def trace(self, *args: Any, **kwargs: Any) -> None: ... - def debug(self, *args: Any, **kwargs: Any) -> None: ... - def info(self, *args: Any, **kwargs: Any) -> None: ... - def warn(self, *args: Any, **kwargs: Any) -> None: ... - def warning(self, *args: Any, **kwargs: Any): ... - def notice(self, *args: Any, **kwargs: Any) -> None: ... - def error(self, *args: Any, **kwargs: Any) -> None: ... - def exception(self, *args: Any, **kwargs: Any): ... - def critical(self, *args: Any, **kwargs: Any) -> None: ... - def log(self, level: Any, *args: Any, **kwargs: Any) -> None: ... - def catch_exceptions(self, *args: Any, **kwargs: Any): ... - disabled: bool = ... - def enable(self) -> None: ... - def disable(self) -> None: ... - -class RecordDispatcher: - suppress_dispatcher: bool = ... - name: Any = ... - handlers: Any = ... - group: Any = ... - level: Any = ... - def __init__(self, name: Optional[Any] = ..., level: Any = ...) -> None: ... - disabled: Any = ... - def handle(self, record: Any) -> None: ... - def make_record_and_handle( - self, - level: Any, - msg: Any, - args: Any, - kwargs: Any, - exc_info: Any, - extra: Any, - frame_correction: Any, - ) -> None: ... - def call_handlers(self, record: Any) -> None: ... - def process_record(self, record: Any) -> None: ... - -class Logger(RecordDispatcher, LoggerMixin): ... - -class LoggerGroup: - loggers: Any = ... - level: Any = ... - disabled: bool = ... - processor: Any = ... - def __init__( - self, loggers: Optional[Any] = ..., level: Any = ..., processor: Optional[Any] = ... - ) -> None: ... - def add_logger(self, logger: Any) -> None: ... - def remove_logger(self, logger: Any) -> None: ... - def process_record(self, record: Any) -> None: ... - def enable(self, force: bool = ...) -> None: ... - def disable(self, force: bool = ...) -> None: ... - -def dispatch_record(record: Any) -> None: ... diff --git a/third-party-stubs/logbook/compat.pyi b/third-party-stubs/logbook/compat.pyi deleted file mode 100644 index 75592bf31ed..00000000000 --- a/third-party-stubs/logbook/compat.pyi +++ /dev/null @@ -1,60 +0,0 @@ -# Stubs for logbook.compat (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -import logbook -import logging -from typing import Any, Optional - -def redirect_logging(set_root_logger_level: bool = ...) -> None: ... - -class redirected_logging: - old_handlers: Any = ... - old_level: Any = ... - set_root_logger_level: Any = ... - def __init__(self, set_root_logger_level: bool = ...) -> None: ... - def start(self) -> None: ... - def end( - self, etype: Optional[Any] = ..., evalue: Optional[Any] = ..., tb: Optional[Any] = ... - ) -> None: ... - __enter__: Any = ... - __exit__: Any = ... - -class LoggingCompatRecord(logbook.LogRecord): ... - -class RedirectLoggingHandler(logging.Handler): - def __init__(self) -> None: ... - def convert_level(self, level: Any): ... - def find_extra(self, old_record: Any): ... - def find_caller(self, old_record: Any): ... - def convert_time(self, timestamp: Any): ... - def convert_record(self, old_record: Any): ... - def emit(self, record: Any) -> None: ... - -class LoggingHandler(logbook.Handler): - logger: Any = ... - def __init__( - self, - logger: Optional[Any] = ..., - level: Any = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def get_logger(self, record: Any): ... - def convert_level(self, level: Any): ... - def convert_time(self, dt: Any): ... - def convert_record(self, old_record: Any): ... - def emit(self, record: Any) -> None: ... - -def redirect_warnings() -> None: ... - -class redirected_warnings: - def __init__(self) -> None: ... - def message_to_unicode(self, message: Any): ... - def make_record(self, message: Any, exception: Any, filename: Any, lineno: Any): ... - def start(self) -> None: ... - def end( - self, etype: Optional[Any] = ..., evalue: Optional[Any] = ..., tb: Optional[Any] = ... - ) -> None: ... - __enter__: Any = ... - __exit__: Any = ... diff --git a/third-party-stubs/logbook/concurrency.pyi b/third-party-stubs/logbook/concurrency.pyi deleted file mode 100644 index 070fd741ad0..00000000000 --- a/third-party-stubs/logbook/concurrency.pyi +++ /dev/null @@ -1,51 +0,0 @@ -# Stubs for logbook.concurrency (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -from _thread import _local as thread_local, get_ident as thread_get_ident -from typing import Any, Optional - -has_gevent: bool -use_gevent: bool - -def enable_gevent() -> None: ... -def is_gevent_enabled(): ... - -ThreadLock: Any -ThreadRLock: Any -thread_get_ident: Any -thread_local: Any - -def thread_get_name(): ... - -class GreenletRLock: - def __init__(self) -> None: ... - def acquire(self, blocking: int = ...): ... - def release(self) -> None: ... - __enter__: Any = ... - def __exit__(self, t: Any, v: Any, tb: Any) -> None: ... - -greenlet_get_ident = thread_get_ident -greenlet_local = thread_local - -class GreenletRLock: - def acquire(self) -> None: ... - def release(self) -> None: ... - def __enter__(self) -> None: ... - def __exit__(self, t: Any, v: Any, tb: Any) -> None: ... - -def new_fine_grained_lock(): ... - -has_contextvars: bool -context_ident_counter: Any -context_ident: Any - -def context_get_ident(): ... -def is_context_enabled(): ... - -class ContextVar: - name: Any = ... - local: Any = ... - def __init__(self, name: Any) -> None: ... - def set(self, value: Any) -> None: ... - def get(self, default: Optional[Any] = ...): ... diff --git a/third-party-stubs/logbook/handlers.pyi b/third-party-stubs/logbook/handlers.pyi deleted file mode 100644 index 54131084690..00000000000 --- a/third-party-stubs/logbook/handlers.pyi +++ /dev/null @@ -1,412 +0,0 @@ -# Stubs for logbook.handlers (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -from logbook.base import ContextObject -from typing import Any, Optional - -DEFAULT_FORMAT_STRING: Any -SYSLOG_FORMAT_STRING: Any -NTLOG_FORMAT_STRING: Any -TEST_FORMAT_STRING: Any -MAIL_FORMAT_STRING: Any -MAIL_RELATED_FORMAT_STRING: Any -SYSLOG_PORT: int -REGTYPE: Any - -def create_syshandler(application_name: Any, level: Any = ...): ... - -class _HandlerType(type): - def __new__(cls, name: Any, bases: Any, d: Any): ... - -class Handler(ContextObject): - stack_manager: Any = ... - blackhole: bool = ... - level: Any = ... - formatter: Any = ... - filter: Any = ... - bubble: Any = ... - def __init__( - self, level: Any = ..., filter: Optional[Any] = ..., bubble: bool = ... - ) -> None: ... - level_name: Any = ... - def format(self, record: Any): ... - def should_handle(self, record: Any): ... - def handle(self, record: Any): ... - def emit(self, record: Any) -> None: ... - def emit_batch(self, records: Any, reason: Any) -> None: ... - def close(self) -> None: ... - def handle_error(self, record: Any, exc_info: Any) -> None: ... - -class NullHandler(Handler): - blackhole: bool = ... - def __init__(self, level: Any = ..., filter: Optional[Any] = ...) -> None: ... - -class WrapperHandler(Handler): - handler: Any = ... - def __init__(self, handler: Any) -> None: ... - def __getattr__(self, name: Any): ... - def __setattr__(self, name: Any, value: Any): ... - -class StringFormatter: - format_string: Any = ... - def __init__(self, format_string: Any) -> None: ... - def format_record(self, record: Any, handler: Any): ... - def format_exception(self, record: Any): ... - def __call__(self, record: Any, handler: Any): ... - -class StringFormatterHandlerMixin: - default_format_string: Any = ... - formatter_class: Any = ... - format_string: Any = ... - def __init__(self, format_string: Any) -> None: ... - -class HashingHandlerMixin: - def hash_record_raw(self, record: Any): ... - def hash_record(self, record: Any): ... - -class LimitingHandlerMixin(HashingHandlerMixin): - record_limit: Any = ... - record_delta: Any = ... - def __init__(self, record_limit: Any, record_delta: Any) -> None: ... - def check_delivery(self, record: Any): ... - -class StreamHandler(Handler, StringFormatterHandlerMixin): - encoding: Any = ... - lock: Any = ... - stream: Any = ... - def __init__( - self, - stream: Any, - level: Any = ..., - format_string: Optional[Any] = ..., - encoding: Optional[Any] = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def __enter__(self): ... - def __exit__(self, exc_type: Any, exc_value: Any, tb: Any): ... - def ensure_stream_is_open(self) -> None: ... - def close(self) -> None: ... - def flush(self) -> None: ... - def encode(self, msg: Any): ... - def write(self, item: Any) -> None: ... - def emit(self, record: Any) -> None: ... - def should_flush(self): ... - -class FileHandler(StreamHandler): - stream: Any = ... - def __init__( - self, - filename: Any, - mode: str = ..., - encoding: Optional[Any] = ..., - level: Any = ..., - format_string: Optional[Any] = ..., - delay: bool = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def write(self, item: Any) -> None: ... - def close(self) -> None: ... - def encode(self, record: Any): ... - def ensure_stream_is_open(self) -> None: ... - -class GZIPCompressionHandler(FileHandler): - def __init__( - self, - filename: Any, - encoding: Optional[Any] = ..., - level: Any = ..., - format_string: Optional[Any] = ..., - delay: bool = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - compression_quality: int = ..., - ) -> None: ... - def write(self, item: Any) -> None: ... - def should_flush(self): ... - -class BrotliCompressionHandler(FileHandler): - def __init__( - self, - filename: Any, - encoding: Optional[Any] = ..., - level: Any = ..., - format_string: Optional[Any] = ..., - delay: bool = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - compression_window_size: Any = ..., - compression_quality: int = ..., - ) -> None: ... - def write(self, item: Any) -> None: ... - def should_flush(self): ... - def flush(self) -> None: ... - def close(self) -> None: ... - -class MonitoringFileHandler(FileHandler): - def __init__( - self, - filename: Any, - mode: str = ..., - encoding: str = ..., - level: Any = ..., - format_string: Optional[Any] = ..., - delay: bool = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - stream: Any = ... - def emit(self, record: Any) -> None: ... - -class StderrHandler(StreamHandler): - def __init__( - self, - level: Any = ..., - format_string: Optional[Any] = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - @property - def stream(self): ... - -class RotatingFileHandler(FileHandler): - max_size: Any = ... - backup_count: Any = ... - def __init__( - self, - filename: Any, - mode: str = ..., - encoding: str = ..., - level: Any = ..., - format_string: Optional[Any] = ..., - delay: bool = ..., - max_size: Any = ..., - backup_count: int = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def should_rollover(self, record: Any, bytes: Any): ... - def perform_rollover(self) -> None: ... - def emit(self, record: Any) -> None: ... - -class TimedRotatingFileHandler(FileHandler): - date_format: Any = ... - backup_count: Any = ... - rollover_format: Any = ... - original_filename: Any = ... - timed_filename_for_current: Any = ... - def __init__( - self, - filename: Any, - mode: str = ..., - encoding: str = ..., - level: Any = ..., - format_string: Optional[Any] = ..., - date_format: str = ..., - backup_count: int = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - timed_filename_for_current: bool = ..., - rollover_format: str = ..., - ) -> None: ... - def generate_timed_filename(self, timestamp: Any): ... - def files_to_delete(self): ... - def perform_rollover(self, new_timestamp: Any) -> None: ... - def emit(self, record: Any) -> None: ... - -class TestHandler(Handler, StringFormatterHandlerMixin): - default_format_string: Any = ... - records: Any = ... - def __init__( - self, - level: Any = ..., - format_string: Optional[Any] = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - force_heavy_init: bool = ..., - ) -> None: ... - def close(self) -> None: ... - def emit(self, record: Any) -> None: ... - @property - def formatted_records(self): ... - @property - def has_criticals(self): ... - @property - def has_errors(self): ... - @property - def has_warnings(self): ... - @property - def has_notices(self): ... - @property - def has_infos(self): ... - @property - def has_debugs(self): ... - @property - def has_traces(self): ... - def has_critical(self, *args: Any, **kwargs: Any): ... - def has_error(self, *args: Any, **kwargs: Any): ... - def has_warning(self, *args: Any, **kwargs: Any): ... - def has_notice(self, *args: Any, **kwargs: Any): ... - def has_info(self, *args: Any, **kwargs: Any): ... - def has_debug(self, *args: Any, **kwargs: Any): ... - def has_trace(self, *args: Any, **kwargs: Any): ... - -class MailHandler(Handler, StringFormatterHandlerMixin, LimitingHandlerMixin): - default_format_string: Any = ... - default_related_format_string: Any = ... - default_subject: Any = ... - max_record_cache: int = ... - record_cache_prune: float = ... - from_addr: Any = ... - recipients: Any = ... - subject: Any = ... - server_addr: Any = ... - credentials: Any = ... - secure: Any = ... - related_format_string: Any = ... - starttls: Any = ... - def __init__( - self, - from_addr: Any, - recipients: Any, - subject: Optional[Any] = ..., - server_addr: Optional[Any] = ..., - credentials: Optional[Any] = ..., - secure: Optional[Any] = ..., - record_limit: Optional[Any] = ..., - record_delta: Optional[Any] = ..., - level: Any = ..., - format_string: Optional[Any] = ..., - related_format_string: Optional[Any] = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - starttls: bool = ..., - ) -> None: ... - def get_recipients(self, record: Any): ... - def message_from_record(self, record: Any, suppressed: Any): ... - def format_related_record(self, record: Any): ... - def generate_mail(self, record: Any, suppressed: int = ...): ... - def collapse_mails(self, mail: Any, related: Any, reason: Any): ... - def get_connection(self): ... - def close_connection(self, con: Any) -> None: ... - def deliver(self, msg: Any, recipients: Any) -> None: ... - def emit(self, record: Any) -> None: ... - def emit_batch(self, records: Any, reason: Any) -> None: ... - -class GMailHandler(MailHandler): - def __init__(self, account_id: Any, password: Any, recipients: Any, **kw: Any) -> None: ... - -class SyslogHandler(Handler, StringFormatterHandlerMixin): - default_format_string: Any = ... - LOG_EMERG: int = ... - LOG_ALERT: int = ... - LOG_CRIT: int = ... - LOG_ERR: int = ... - LOG_WARNING: int = ... - LOG_NOTICE: int = ... - LOG_INFO: int = ... - LOG_DEBUG: int = ... - LOG_KERN: int = ... - LOG_USER: int = ... - LOG_MAIL: int = ... - LOG_DAEMON: int = ... - LOG_AUTH: int = ... - LOG_SYSLOG: int = ... - LOG_LPR: int = ... - LOG_NEWS: int = ... - LOG_UUCP: int = ... - LOG_CRON: int = ... - LOG_AUTHPRIV: int = ... - LOG_FTP: int = ... - LOG_LOCAL0: int = ... - LOG_LOCAL1: int = ... - LOG_LOCAL2: int = ... - LOG_LOCAL3: int = ... - LOG_LOCAL4: int = ... - LOG_LOCAL5: int = ... - LOG_LOCAL6: int = ... - LOG_LOCAL7: int = ... - facility_names: Any = ... - level_priority_map: Any = ... - application_name: Any = ... - remote_address: Any = ... - facility: Any = ... - socktype: Any = ... - enveloper: Any = ... - record_delimiter: Any = ... - connection_exception: Any = ... - def __init__( - self, - application_name: Optional[Any] = ..., - address: Optional[Any] = ..., - facility: str = ..., - socktype: Any = ..., - level: Any = ..., - format_string: Optional[Any] = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - record_delimiter: Optional[Any] = ..., - ) -> None: ... - def encode_priority(self, record: Any): ... - def wrap_segments(self, record: Any, before: Any): ... - def unix_envelope(self, record: Any): ... - format_string: Any = ... - def net_envelope(self, record: Any): ... - def emit(self, record: Any) -> None: ... - def send_to_socket(self, data: Any) -> None: ... - def close(self) -> None: ... - -class NTEventLogHandler(Handler, StringFormatterHandlerMixin): - dllname: Any = ... - default_format_string: Any = ... - application_name: Any = ... - log_type: Any = ... - def __init__( - self, - application_name: Any, - log_type: str = ..., - level: Any = ..., - format_string: Optional[Any] = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def unregister_logger(self) -> None: ... - def get_event_type(self, record: Any): ... - def get_event_category(self, record: Any): ... - def get_message_id(self, record: Any): ... - def emit(self, record: Any) -> None: ... - -class FingersCrossedHandler(Handler): - batch_emit_reason: str = ... - lock: Any = ... - buffered_records: Any = ... - buffer_size: Any = ... - def __init__( - self, - handler: Any, - action_level: Any = ..., - buffer_size: int = ..., - pull_information: bool = ..., - reset: bool = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def close(self) -> None: ... - def enqueue(self, record: Any): ... - def rollover(self, record: Any) -> None: ... - @property - def triggered(self): ... - def emit(self, record: Any) -> None: ... - -class GroupHandler(WrapperHandler): - pull_information: Any = ... - buffered_records: Any = ... - def __init__(self, handler: Any, pull_information: bool = ...) -> None: ... - def rollover(self) -> None: ... - def pop_application(self) -> None: ... - def pop_thread(self) -> None: ... - def pop_context(self) -> None: ... - def pop_greenlet(self) -> None: ... - def emit(self, record: Any) -> None: ... diff --git a/third-party-stubs/logbook/helpers.pyi b/third-party-stubs/logbook/helpers.pyi deleted file mode 100644 index 02f13f15ea0..00000000000 --- a/third-party-stubs/logbook/helpers.pyi +++ /dev/null @@ -1,42 +0,0 @@ -# Stubs for logbook.helpers (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -import os -from typing import Any, Optional - -PY2: Any -iteritems: Any -xrange: Any -xrange = range - -def u(s: Any): ... - -u: Any -integer_types: Any -string_types: Any - -def reraise(tp: Any, value: Any, tb: Optional[Any] = ...) -> None: ... -def b(x: Any): ... - -can_rename_open_file: bool - -def rename(src: Any, dst: Any) -> None: ... - -rename = os.rename - -def to_safe_json(data: Any): ... -def format_iso8601(d: Optional[Any] = ...): ... -def parse_iso8601(value: Any): ... -def get_application_name(): ... - -class cached_property: - __name__: Any = ... - __module__: Any = ... - __doc__: Any = ... - func: Any = ... - def __init__(self, func: Any, name: Optional[Any] = ..., doc: Optional[Any] = ...) -> None: ... - def __get__(self, obj: Any, type: Optional[Any] = ...): ... - -def get_iterator_next_method(it: Any): ... -def is_unicode(x: Any): ... diff --git a/third-party-stubs/logbook/more.pyi b/third-party-stubs/logbook/more.pyi deleted file mode 100644 index f0ff7af11db..00000000000 --- a/third-party-stubs/logbook/more.pyi +++ /dev/null @@ -1,148 +0,0 @@ -# Stubs for logbook.more (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -from logbook.base import RecordDispatcher -from logbook.handlers import ( - FingersCrossedHandler as FingersCrossedHandlerBase, - Handler, - StderrHandler, - StringFormatter, - StringFormatterHandlerMixin, -) -from logbook.ticketing import BackendBase -from typing import Any, Optional - -TWITTER_FORMAT_STRING: Any -TWITTER_ACCESS_TOKEN_URL: str -NEW_TWEET_URL: str - -class CouchDBBackend(BackendBase): - database: Any = ... - def setup_backend(self) -> None: ... - def record_ticket(self, record: Any, data: Any, hash: Any, app_id: Any) -> None: ... - -class TwitterFormatter(StringFormatter): - max_length: int = ... - def format_exception(self, record: Any): ... - def __call__(self, record: Any, handler: Any): ... - -class TaggingLogger(RecordDispatcher): - def __init__(self, name: Optional[Any] = ..., tags: Optional[Any] = ...) -> None: ... - def log(self, tags: Any, msg: Any, *args: Any, **kwargs: Any): ... - -class TaggingHandler(Handler): - def __init__(self, handlers: Any, filter: Optional[Any] = ..., bubble: bool = ...) -> None: ... - def emit(self, record: Any) -> None: ... - -class TwitterHandler(Handler, StringFormatterHandlerMixin): - default_format_string: Any = ... - formatter_class: Any = ... - consumer_key: Any = ... - consumer_secret: Any = ... - username: Any = ... - password: Any = ... - def __init__( - self, - consumer_key: Any, - consumer_secret: Any, - username: Any, - password: Any, - level: Any = ..., - format_string: Optional[Any] = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def get_oauth_token(self): ... - def make_client(self): ... - def tweet(self, status: Any): ... - def emit(self, record: Any) -> None: ... - -class SlackHandler(Handler, StringFormatterHandlerMixin): - api_token: Any = ... - channel: Any = ... - slack: Any = ... - def __init__( - self, - api_token: Any, - channel: Any, - level: Any = ..., - format_string: Optional[Any] = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def emit(self, record: Any) -> None: ... - -class JinjaFormatter: - template: Any = ... - def __init__(self, template: Any) -> None: ... - def __call__(self, record: Any, handler: Any): ... - -class ExternalApplicationHandler(Handler): - encoding: Any = ... - def __init__( - self, - arguments: Any, - stdin_format: Optional[Any] = ..., - encoding: str = ..., - level: Any = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def emit(self, record: Any) -> None: ... - -class ColorizingStreamHandlerMixin: - def force_color(self) -> None: ... - def forbid_color(self) -> None: ... - def should_colorize(self, record: Any): ... - def get_color(self, record: Any): ... - def format(self, record: Any): ... - -class ColorizedStderrHandler(ColorizingStreamHandlerMixin, StderrHandler): - def __init__(self, *args: Any, **kwargs: Any) -> None: ... - -class FingersCrossedHandler(FingersCrossedHandlerBase): - def __init__(self, *args: Any, **kwargs: Any) -> None: ... - -class ExceptionHandler(Handler, StringFormatterHandlerMixin): - exc_type: Any = ... - def __init__( - self, - exc_type: Any, - level: Any = ..., - format_string: Optional[Any] = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def handle(self, record: Any): ... - -class DedupHandler(Handler): - def __init__(self, format_string: str = ..., *args: Any, **kwargs: Any) -> None: ... - def clear(self) -> None: ... - def pop_application(self) -> None: ... - def pop_thread(self) -> None: ... - def pop_context(self) -> None: ... - def pop_greenlet(self) -> None: ... - def handle(self, record: Any): ... - def flush(self) -> None: ... - -class RiemannHandler(Handler): - host: Any = ... - port: Any = ... - ttl: Any = ... - queue: Any = ... - flush_threshold: Any = ... - transport: Any = ... - def __init__( - self, - host: Any, - port: Any, - message_type: str = ..., - ttl: int = ..., - flush_threshold: int = ..., - bubble: bool = ..., - filter: Optional[Any] = ..., - level: Any = ..., - ) -> None: ... - def record_to_event(self, record: Any): ... - def emit(self, record: Any) -> None: ... diff --git a/third-party-stubs/logbook/notifiers.pyi b/third-party-stubs/logbook/notifiers.pyi deleted file mode 100644 index 9bd1fe73bb8..00000000000 --- a/third-party-stubs/logbook/notifiers.pyi +++ /dev/null @@ -1,123 +0,0 @@ -# Stubs for logbook.notifiers (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -from logbook.handlers import Handler, LimitingHandlerMixin -from typing import Any, Optional - -def create_notification_handler( - application_name: Optional[Any] = ..., level: Any = ..., icon: Optional[Any] = ... -): ... - -class NotificationBaseHandler(Handler, LimitingHandlerMixin): - application_name: Any = ... - def __init__( - self, - application_name: Optional[Any] = ..., - record_limit: Optional[Any] = ..., - record_delta: Optional[Any] = ..., - level: Any = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def make_title(self, record: Any): ... - def make_text(self, record: Any): ... - -class GrowlHandler(NotificationBaseHandler): - def __init__( - self, - application_name: Optional[Any] = ..., - icon: Optional[Any] = ..., - host: Optional[Any] = ..., - password: Optional[Any] = ..., - record_limit: Optional[Any] = ..., - record_delta: Optional[Any] = ..., - level: Any = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def is_sticky(self, record: Any): ... - def get_priority(self, record: Any): ... - def emit(self, record: Any) -> None: ... - -class LibNotifyHandler(NotificationBaseHandler): - icon: Any = ... - def __init__( - self, - application_name: Optional[Any] = ..., - icon: Optional[Any] = ..., - no_init: bool = ..., - record_limit: Optional[Any] = ..., - record_delta: Optional[Any] = ..., - level: Any = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def set_notifier_icon(self, notifier: Any, icon: Any) -> None: ... - def get_expires(self, record: Any): ... - def get_urgency(self, record: Any): ... - def emit(self, record: Any) -> None: ... - -class BoxcarHandler(NotificationBaseHandler): - api_url: str = ... - email: Any = ... - password: Any = ... - def __init__( - self, - email: Any, - password: Any, - record_limit: Optional[Any] = ..., - record_delta: Optional[Any] = ..., - level: Any = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def get_screen_name(self, record: Any): ... - def emit(self, record: Any) -> None: ... - -class NotifoHandler(NotificationBaseHandler): - application_name: Any = ... - username: Any = ... - secret: Any = ... - hide_level: Any = ... - def __init__( - self, - application_name: Optional[Any] = ..., - username: Optional[Any] = ..., - secret: Optional[Any] = ..., - record_limit: Optional[Any] = ..., - record_delta: Optional[Any] = ..., - level: Any = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - hide_level: bool = ..., - ) -> None: ... - def emit(self, record: Any) -> None: ... - -class PushoverHandler(NotificationBaseHandler): - application_name: Any = ... - apikey: Any = ... - userkey: Any = ... - device: Any = ... - priority: Any = ... - sound: Any = ... - max_title_len: Any = ... - max_message_len: Any = ... - title: Any = ... - def __init__( - self, - application_name: Optional[Any] = ..., - apikey: Optional[Any] = ..., - userkey: Optional[Any] = ..., - device: Optional[Any] = ..., - priority: int = ..., - sound: Optional[Any] = ..., - record_limit: Optional[Any] = ..., - record_delta: Optional[Any] = ..., - level: Any = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - max_title_len: int = ..., - max_message_len: int = ..., - ) -> None: ... - def emit(self, record: Any) -> None: ... diff --git a/third-party-stubs/logbook/queues.pyi b/third-party-stubs/logbook/queues.pyi deleted file mode 100644 index 96e757b2ee6..00000000000 --- a/third-party-stubs/logbook/queues.pyi +++ /dev/null @@ -1,154 +0,0 @@ -# Stubs for logbook.queues (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -from logbook.handlers import Handler, WrapperHandler -from typing import Any, Optional - -class RedisHandler(Handler): - redis: Any = ... - key: Any = ... - extra_fields: Any = ... - flush_threshold: Any = ... - queue: Any = ... - lock: Any = ... - push_method: Any = ... - def __init__( - self, - host: str = ..., - port: int = ..., - key: str = ..., - extra_fields: Optional[Any] = ..., - flush_threshold: int = ..., - flush_time: int = ..., - level: Any = ..., - filter: Optional[Any] = ..., - password: bool = ..., - bubble: bool = ..., - context: Optional[Any] = ..., - push_method: str = ..., - ) -> None: ... - def disable_buffering(self) -> None: ... - def emit(self, record: Any) -> None: ... - def close(self) -> None: ... - -class MessageQueueHandler(Handler): - queue: Any = ... - def __init__( - self, - uri: Optional[Any] = ..., - queue: str = ..., - level: Any = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - ) -> None: ... - def export_record(self, record: Any): ... - def emit(self, record: Any) -> None: ... - def close(self) -> None: ... - -RabbitMQHandler = MessageQueueHandler - -class ZeroMQHandler(Handler): - context: Any = ... - socket: Any = ... - def __init__( - self, - uri: Optional[Any] = ..., - level: Any = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - context: Optional[Any] = ..., - multi: bool = ..., - ) -> None: ... - def export_record(self, record: Any): ... - def emit(self, record: Any) -> None: ... - def close(self, linger: int = ...) -> None: ... - def __del__(self) -> None: ... - -class ThreadController: - setup: Any = ... - subscriber: Any = ... - running: bool = ... - def __init__(self, subscriber: Any, setup: Optional[Any] = ...) -> None: ... - def start(self) -> None: ... - def stop(self) -> None: ... - -class SubscriberBase: - def recv(self, timeout: Optional[Any] = ...) -> Any: ... - def dispatch_once(self, timeout: Optional[Any] = ...): ... - def dispatch_forever(self) -> None: ... - def dispatch_in_background(self, setup: Optional[Any] = ...): ... - -class MessageQueueSubscriber(SubscriberBase): - queue: Any = ... - def __init__(self, uri: Optional[Any] = ..., queue: str = ...) -> None: ... - def __del__(self) -> None: ... - def close(self) -> None: ... - def recv(self, timeout: Optional[Any] = ...): ... - -RabbitMQSubscriber = MessageQueueSubscriber - -class ZeroMQSubscriber(SubscriberBase): - context: Any = ... - socket: Any = ... - def __init__( - self, uri: Optional[Any] = ..., context: Optional[Any] = ..., multi: bool = ... - ) -> None: ... - def __del__(self) -> None: ... - def close(self) -> None: ... - def recv(self, timeout: Optional[Any] = ...): ... - -class MultiProcessingHandler(Handler): - queue: Any = ... - def __init__( - self, queue: Any, level: Any = ..., filter: Optional[Any] = ..., bubble: bool = ... - ) -> None: ... - def emit(self, record: Any) -> None: ... - -class MultiProcessingSubscriber(SubscriberBase): - queue: Any = ... - def __init__(self, queue: Optional[Any] = ...) -> None: ... - def recv(self, timeout: Optional[Any] = ...): ... - -class ExecnetChannelHandler(Handler): - channel: Any = ... - def __init__( - self, channel: Any, level: Any = ..., filter: Optional[Any] = ..., bubble: bool = ... - ) -> None: ... - def emit(self, record: Any) -> None: ... - -class ExecnetChannelSubscriber(SubscriberBase): - channel: Any = ... - def __init__(self, channel: Any) -> None: ... - def recv(self, timeout: Optional[Any] = ...): ... - -class TWHThreadController: - class Command: - stop: Any = ... - emit: Any = ... - emit_batch: Any = ... - wrapper_handler: Any = ... - running: bool = ... - def __init__(self, wrapper_handler: Any) -> None: ... - def start(self) -> None: ... - def stop(self) -> None: ... - -class ThreadedWrapperHandler(WrapperHandler): - queue: Any = ... - controller: Any = ... - def __init__(self, handler: Any, maxsize: int = ...) -> None: ... - def close(self) -> None: ... - def emit(self, record: Any) -> None: ... - def emit_batch(self, records: Any, reason: Any) -> None: ... - -class GroupMember(ThreadController): - queue: Any = ... - def __init__(self, subscriber: Any, queue: Any) -> None: ... - -class SubscriberGroup(SubscriberBase): - members: Any = ... - queue: Any = ... - def __init__(self, subscribers: Optional[Any] = ..., queue_limit: int = ...) -> None: ... - def add(self, subscriber: Any) -> None: ... - def recv(self, timeout: Optional[Any] = ...): ... - def stop(self) -> None: ... diff --git a/third-party-stubs/logbook/ticketing.pyi b/third-party-stubs/logbook/ticketing.pyi deleted file mode 100644 index 4435a206d26..00000000000 --- a/third-party-stubs/logbook/ticketing.pyi +++ /dev/null @@ -1,110 +0,0 @@ -# Stubs for logbook.ticketing (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -from logbook.base import LogRecord -from logbook.handlers import Handler, HashingHandlerMixin -from typing import Any, Optional - -class Ticket: - level_name: Any = ... - db: Any = ... - def __init__(self, db: Any, row: Any) -> None: ... - def last_occurrence(self): ... - def get_occurrences(self, order_by: str = ..., limit: int = ..., offset: int = ...): ... - solved: bool = ... - def solve(self) -> None: ... - def delete(self) -> None: ... - __hash__: Any = ... - def __eq__(self, other: Any): ... - def __ne__(self, other: Any): ... - -class Occurrence(LogRecord): - db: Any = ... - time: Any = ... - ticket_id: Any = ... - occurrence_id: Any = ... - def __init__(self, db: Any, row: Any) -> None: ... - -class BackendBase: - options: Any = ... - def __init__(self, **options: Any) -> None: ... - def setup_backend(self) -> None: ... - def record_ticket(self, record: Any, data: Any, hash: Any, app_id: Any) -> None: ... - def count_tickets(self) -> None: ... - def get_tickets(self, order_by: str = ..., limit: int = ..., offset: int = ...) -> None: ... - def solve_ticket(self, ticket_id: Any) -> None: ... - def delete_ticket(self, ticket_id: Any) -> None: ... - def get_ticket(self, ticket_id: Any) -> None: ... - def get_occurrences( - self, ticket: Any, order_by: str = ..., limit: int = ..., offset: int = ... - ) -> None: ... - -class SQLAlchemyBackend(BackendBase): - engine: Any = ... - session: Any = ... - table_prefix: Any = ... - metadata: Any = ... - def setup_backend(self) -> None: ... - tickets: Any = ... - occurrences: Any = ... - def create_tables(self): ... - def record_ticket(self, record: Any, data: Any, hash: Any, app_id: Any) -> None: ... - def count_tickets(self): ... - def get_tickets(self, order_by: str = ..., limit: int = ..., offset: int = ...): ... - def solve_ticket(self, ticket_id: Any) -> None: ... - def delete_ticket(self, ticket_id: Any) -> None: ... - def get_ticket(self, ticket_id: Any): ... - def get_occurrences( - self, ticket: Any, order_by: str = ..., limit: int = ..., offset: int = ... - ): ... - -class MongoDBBackend(BackendBase): - class _FixedTicketClass(Ticket): - @property - def ticket_id(self): ... - - class _FixedOccurrenceClass(Occurrence): - db: Any = ... - time: Any = ... - ticket_id: Any = ... - occurrence_id: Any = ... - def __init__(self, db: Any, row: Any) -> None: ... - database: Any = ... - def setup_backend(self) -> None: ... - def record_ticket(self, record: Any, data: Any, hash: Any, app_id: Any) -> None: ... - def count_tickets(self): ... - def get_tickets(self, order_by: str = ..., limit: int = ..., offset: int = ...): ... - def solve_ticket(self, ticket_id: Any) -> None: ... - def delete_ticket(self, ticket_id: Any) -> None: ... - def get_ticket(self, ticket_id: Any): ... - def get_occurrences( - self, ticket: Any, order_by: str = ..., limit: int = ..., offset: int = ... - ): ... - -class TicketingBaseHandler(Handler, HashingHandlerMixin): - hash_salt: Any = ... - def __init__( - self, hash_salt: Any, level: Any = ..., filter: Optional[Any] = ..., bubble: bool = ... - ) -> None: ... - def hash_record_raw(self, record: Any): ... - -class TicketingHandler(TicketingBaseHandler): - default_backend: Any = ... - app_id: Any = ... - def __init__( - self, - uri: Any, - app_id: str = ..., - level: Any = ..., - filter: Optional[Any] = ..., - bubble: bool = ..., - hash_salt: Optional[Any] = ..., - backend: Optional[Any] = ..., - **db_options: Any, - ) -> None: ... - db: Any = ... - def set_backend(self, cls: Any, **options: Any) -> None: ... - def process_record(self, record: Any, hash: Any): ... - def record_ticket(self, record: Any, data: Any, hash: Any) -> None: ... - def emit(self, record: Any) -> None: ... diff --git a/third-party-stubs/logbook/utils.pyi b/third-party-stubs/logbook/utils.pyi deleted file mode 100644 index 27f9bb5d818..00000000000 --- a/third-party-stubs/logbook/utils.pyi +++ /dev/null @@ -1,39 +0,0 @@ -# Stubs for logbook.utils (Python 3) -# -# NOTE: This dynamically typed stub was automatically generated by stubgen. - -import threading -from .base import DEBUG, Logger -from .helpers import string_types -from typing import Any, Optional - -class _SlowContextNotifier: - timer: Any = ... - def __init__(self, threshold: Any, func: Any) -> None: ... - def __enter__(self): ... - def __exit__(self, *_: Any) -> None: ... - -def logged_if_slow(*args: Any, **kwargs: Any): ... - -class _Local(threading.local): - enabled: bool = ... - -def suppressed_deprecations() -> None: ... -def forget_deprecation_locations() -> None: ... -def log_deprecation_message(message: Any, frame_correction: int = ...) -> None: ... - -class _DeprecatedFunction: - def __init__( - self, func: Any, message: Any, obj: Optional[Any] = ..., objtype: Optional[Any] = ... - ) -> None: ... - def __call__(self, *args: Any, **kwargs: Any): ... - def __get__(self, obj: Any, objtype: Any): ... - def bound_to(self, obj: Any, objtype: Any): ... - @property - def __name__(self): ... - @property - def __doc__(self): ... - @__doc__.setter - def __doc__(self, doc: Any) -> None: ... - -def deprecated(func: Optional[Any] = ..., message: Optional[Any] = ...): ... diff --git a/third-party-stubs/mashumaro/__init__.pyi b/third-party-stubs/mashumaro/__init__.pyi index 59b16d3d3aa..0a67966c158 100644 --- a/third-party-stubs/mashumaro/__init__.pyi +++ b/third-party-stubs/mashumaro/__init__.pyi @@ -1,5 +1,3 @@ from mashumaro.exceptions import MissingField as MissingField from mashumaro.helper import field_options as field_options, pass_through as pass_through from mashumaro.mixins.dict import DataClassDictMixin as DataClassDictMixin -from mashumaro.mixins.msgpack import DataClassMessagePackMixin as DataClassMessagePackMixin -from mashumaro.mixins.json import DataClassJSONMixin as DataClassJSONMixin diff --git a/third-party-stubs/mashumaro/config.pyi b/third-party-stubs/mashumaro/config.pyi index 8263ceb46fe..7b30709dd9f 100644 --- a/third-party-stubs/mashumaro/config.pyi +++ b/third-party-stubs/mashumaro/config.pyi @@ -1,31 +1,26 @@ +from mashumaro.core.const import Sentinel from mashumaro.dialect import Dialect -from mashumaro.types import SerializationStrategy +from mashumaro.types import Discriminator, SerializationStrategy from typing import Any, Callable, Dict, List, Optional, Type, Union -from mashumaro.core.const import PEP_586_COMPATIBLE - -if PEP_586_COMPATIBLE: - from typing import Literal # type: ignore -else: - from typing_extensions import Literal # type: ignore +from typing_extensions import Literal TO_DICT_ADD_BY_ALIAS_FLAG: str TO_DICT_ADD_OMIT_NONE_FLAG: str ADD_DIALECT_SUPPORT: str - -CodeGenerationOption = Literal[ - "TO_DICT_ADD_BY_ALIAS_FLAG", - "TO_DICT_ADD_OMIT_NONE_FLAG", - "ADD_DIALECT_SUPPORT", -] - +ADD_SERIALIZATION_CONTEXT: str SerializationStrategyValueType = Union[SerializationStrategy, Dict[str, Union[str, Callable]]] class BaseConfig: - debug: bool = ... - code_generation_options: List[str] = ... - serialization_strategy: Dict[Any, SerializationStrategyValueType] = ... - aliases: Dict[str, str] = ... - serialize_by_alias: bool = ... - namedtuple_as_dict: bool = ... - allow_postponed_evaluation: bool = ... - dialect: Optional[Type[Dialect]] = ... + debug: bool + code_generation_options: List[str] + serialization_strategy: Dict[Any, SerializationStrategyValueType] + aliases: Dict[str, str] + serialize_by_alias: bool + namedtuple_as_dict: bool + allow_postponed_evaluation: bool + dialect: Optional[Type[Dialect]] + omit_none: Union[bool, Sentinel.MISSING] + orjson_options: Optional[int] + json_schema: Dict[str, Any] + discriminator: Optional[Discriminator] + lazy_compilation: bool diff --git a/third-party-stubs/mashumaro/core/const.pyi b/third-party-stubs/mashumaro/core/const.pyi index c76e457dfe1..dfcd13587ff 100644 --- a/third-party-stubs/mashumaro/core/const.pyi +++ b/third-party-stubs/mashumaro/core/const.pyi @@ -1,13 +1,17 @@ -from typing import Any +import enum +from _typeshed import Incomplete -PY_36: Any -PY_37: Any -PY_38: Any -PY_39: Any -PY_310: Any -PY_37_MIN: Any -PY_38_MIN: Any -PY_39_MIN: Any -PY_310_MIN = PY_310 +PY_37: Incomplete +PY_38: Incomplete +PY_39: Incomplete +PY_310: Incomplete +PY_311_MIN: Incomplete +PY_310_MIN: Incomplete +PY_39_MIN: Incomplete +PY_38_MIN: Incomplete +PY_37_MIN: Incomplete PEP_585_COMPATIBLE = PY_39_MIN PEP_586_COMPATIBLE = PY_38_MIN + +class Sentinel(enum.Enum): + MISSING: Incomplete diff --git a/third-party-stubs/mashumaro/core/helpers.pyi b/third-party-stubs/mashumaro/core/helpers.pyi index 326e5cccf4c..3470d4162f9 100644 --- a/third-party-stubs/mashumaro/core/helpers.pyi +++ b/third-party-stubs/mashumaro/core/helpers.pyi @@ -1,3 +1,10 @@ import datetime +from _typeshed import Incomplete + +UTC_OFFSET_PATTERN: str def parse_timezone(s: str) -> datetime.timezone: ... + +class ConfigValue: + name: Incomplete + def __init__(self, name: str) -> None: ... diff --git a/third-party-stubs/mashumaro/core/meta/builder.pyi b/third-party-stubs/mashumaro/core/meta/builder.pyi deleted file mode 100644 index 99bc7f98174..00000000000 --- a/third-party-stubs/mashumaro/core/meta/builder.pyi +++ /dev/null @@ -1,115 +0,0 @@ -from mashumaro.core.helpers import * -import types -import typing -from base64 import decodebytes as decodebytes, encodebytes as encodebytes -from dataclasses import Field -from mashumaro.config import ( - ADD_DIALECT_SUPPORT as ADD_DIALECT_SUPPORT, - BaseConfig as BaseConfig, - TO_DICT_ADD_BY_ALIAS_FLAG as TO_DICT_ADD_BY_ALIAS_FLAG, - TO_DICT_ADD_OMIT_NONE_FLAG as TO_DICT_ADD_OMIT_NONE_FLAG, -) -from mashumaro.core.const import PY_39_MIN as PY_39_MIN -from mashumaro.core.meta.helpers import ( - get_args as get_args, - get_class_that_defines_field as get_class_that_defines_field, - get_class_that_defines_method as get_class_that_defines_method, - get_literal_values as get_literal_values, - get_name_error_name as get_name_error_name, - get_type_origin as get_type_origin, - is_class_var as is_class_var, - is_dataclass_dict_mixin as is_dataclass_dict_mixin, - is_dataclass_dict_mixin_subclass as is_dataclass_dict_mixin_subclass, - is_dialect_subclass as is_dialect_subclass, - is_generic as is_generic, - is_init_var as is_init_var, - is_literal as is_literal, - is_named_tuple as is_named_tuple, - is_new_type as is_new_type, - is_optional as is_optional, - is_special_typing_primitive as is_special_typing_primitive, - is_type_var as is_type_var, - is_type_var_any as is_type_var_any, - is_typed_dict as is_typed_dict, - is_union as is_union, - not_none_type_arg as not_none_type_arg, - resolve_type_vars as resolve_type_vars, - type_name as type_name, -) -from mashumaro.core.meta.patch import patch_fromisoformat as patch_fromisoformat -from mashumaro.dialect import Dialect as Dialect -from mashumaro.exceptions import ( - BadDialect as BadDialect, - BadHookSignature as BadHookSignature, - InvalidFieldValue as InvalidFieldValue, - MissingField as MissingField, - ThirdPartyModuleNotFoundError as ThirdPartyModuleNotFoundError, - UnresolvedTypeReferenceError as UnresolvedTypeReferenceError, - UnserializableDataError as UnserializableDataError, - UnserializableField as UnserializableField, - UnsupportedDeserializationEngine as UnsupportedDeserializationEngine, - UnsupportedSerializationEngine as UnsupportedSerializationEngine, -) -from mashumaro.helper import pass_through as pass_through -from mashumaro.types import ( - GenericSerializableType as GenericSerializableType, - SerializableType as SerializableType, - SerializationStrategy as SerializationStrategy, -) -from typing import Any - -NoneType: Any -__PRE_SERIALIZE__: str -__PRE_DESERIALIZE__: str -__POST_SERIALIZE__: str -__POST_DESERIALIZE__: str - -class CodeLines: - def __init__(self) -> None: ... - def append(self, line: str) -> None: ... - def indent(self) -> typing.Generator[None, None, None]: ... - def as_text(self) -> str: ... - def reset(self) -> None: ... - -class CodeBuilder: - cls: Any = ... - lines: Any = ... - globals: Any = ... - type_vars: Any = ... - field_classes: Any = ... - initial_arg_types: Any = ... - dialect: Any = ... - allow_postponed_evaluation: Any = ... - def __init__( - self, - cls: Any, - arg_types: typing.Tuple = ..., - dialect: typing.Optional[typing.Type[Dialect]] = ..., - first_method: str = ..., - allow_postponed_evaluation: bool = ..., - ) -> None: ... - def reset(self) -> None: ... - @property - def namespace(self) -> typing.Dict[typing.Any, typing.Any]: ... - @property - def annotations(self) -> typing.Dict[str, typing.Any]: ... - @property - def field_types(self) -> typing.Dict[str, typing.Any]: ... - @property - def dataclass_fields(self) -> typing.Dict[str, Field]: ... - @property - def metadatas(self) -> typing.Dict[str, typing.Mapping[str, typing.Any]]: ... - def get_field_default(self, name: str) -> typing.Any: ... - def ensure_module_imported(self, module: types.ModuleType) -> None: ... - def add_line(self, line: str) -> None: ... - def indent(self) -> typing.Generator[None, None, None]: ... - def compile(self) -> None: ... - def get_declared_hook(self, method_name: str) -> typing.Any: ... - def add_from_dict(self) -> None: ... - def get_config(self, cls: Any = ...) -> typing.Type[BaseConfig]: ... - def get_to_dict_flags(self, cls: Any = ...) -> str: ... - def get_from_dict_flags(self, cls: Any = ...) -> str: ... - def get_to_dict_default_flag_values(self, cls: Any = ...) -> str: ... - def get_from_dict_default_flag_values(self, cls: Any = ...) -> str: ... - def is_code_generation_option_enabled(self, option: str, cls: Any = ...) -> bool: ... - def add_to_dict(self) -> None: ... diff --git a/third-party-stubs/mashumaro/core/meta/code/__init__.pyi b/third-party-stubs/mashumaro/core/meta/code/__init__.pyi new file mode 100644 index 00000000000..e69de29bb2d diff --git a/third-party-stubs/mashumaro/core/meta/code/builder.pyi b/third-party-stubs/mashumaro/core/meta/code/builder.pyi new file mode 100644 index 00000000000..9d575b79467 --- /dev/null +++ b/third-party-stubs/mashumaro/core/meta/code/builder.pyi @@ -0,0 +1,146 @@ +import types +import typing +from _typeshed import Incomplete +from collections.abc import Generator +from dataclasses import Field +from mashumaro.config import ( + ADD_DIALECT_SUPPORT as ADD_DIALECT_SUPPORT, + ADD_SERIALIZATION_CONTEXT as ADD_SERIALIZATION_CONTEXT, + BaseConfig as BaseConfig, + SerializationStrategyValueType as SerializationStrategyValueType, + TO_DICT_ADD_BY_ALIAS_FLAG as TO_DICT_ADD_BY_ALIAS_FLAG, + TO_DICT_ADD_OMIT_NONE_FLAG as TO_DICT_ADD_OMIT_NONE_FLAG, +) +from mashumaro.core.const import Sentinel as Sentinel +from mashumaro.core.helpers import ConfigValue as ConfigValue +from mashumaro.core.meta.code.lines import CodeLines as CodeLines +from mashumaro.core.meta.helpers import ( + get_args as get_args, + get_class_that_defines_field as get_class_that_defines_field, + get_class_that_defines_method as get_class_that_defines_method, + get_literal_values as get_literal_values, + get_name_error_name as get_name_error_name, + hash_type_args as hash_type_args, + is_class_var as is_class_var, + is_dataclass_dict_mixin as is_dataclass_dict_mixin, + is_dialect_subclass as is_dialect_subclass, + is_init_var as is_init_var, + is_literal as is_literal, + is_optional as is_optional, + is_type_var_any as is_type_var_any, + resolve_type_params as resolve_type_params, + substitute_type_params as substitute_type_params, + type_name as type_name, +) +from mashumaro.core.meta.types.common import FieldContext as FieldContext, ValueSpec as ValueSpec +from mashumaro.core.meta.types.pack import PackerRegistry as PackerRegistry +from mashumaro.core.meta.types.unpack import ( + SubtypeUnpackerBuilder as SubtypeUnpackerBuilder, + UnpackerRegistry as UnpackerRegistry, +) +from mashumaro.dialect import Dialect as Dialect +from mashumaro.exceptions import ( + BadDialect as BadDialect, + BadHookSignature as BadHookSignature, + InvalidFieldValue as InvalidFieldValue, + MissingDiscriminatorError as MissingDiscriminatorError, + MissingField as MissingField, + SuitableVariantNotFoundError as SuitableVariantNotFoundError, + ThirdPartyModuleNotFoundError as ThirdPartyModuleNotFoundError, + UnresolvedTypeReferenceError as UnresolvedTypeReferenceError, + UnserializableDataError as UnserializableDataError, + UnserializableField as UnserializableField, + UnsupportedDeserializationEngine as UnsupportedDeserializationEngine, + UnsupportedSerializationEngine as UnsupportedSerializationEngine, +) +from mashumaro.types import Discriminator as Discriminator + +__PRE_SERIALIZE__: str +__PRE_DESERIALIZE__: str +__POST_SERIALIZE__: str +__POST_DESERIALIZE__: str + +class CodeBuilder: + cls: Incomplete + lines: Incomplete + globals: Incomplete + resolved_type_params: Incomplete + field_classes: Incomplete + initial_type_args: Incomplete + dialect: Incomplete + default_dialect: Incomplete + allow_postponed_evaluation: Incomplete + format_name: Incomplete + decoder: Incomplete + encoder: Incomplete + encoder_kwargs: Incomplete + def __init__( + self, + cls: typing.Type, + type_args: typing.Tuple[typing.Type, ...] = ..., + dialect: typing.Optional[typing.Type[Dialect]] = ..., + first_method: str = ..., + allow_postponed_evaluation: bool = ..., + format_name: str = ..., + decoder: typing.Optional[typing.Any] = ..., + encoder: typing.Optional[typing.Any] = ..., + encoder_kwargs: typing.Optional[typing.Dict[str, typing.Any]] = ..., + default_dialect: typing.Optional[typing.Type[Dialect]] = ..., + ) -> None: ... + def reset(self) -> None: ... + @property + def namespace(self) -> typing.Mapping[typing.Any, typing.Any]: ... + @property + def annotations(self) -> typing.Dict[str, typing.Any]: ... + def get_field_resolved_type_params( + self, field_name: str + ) -> typing.Dict[typing.Type, typing.Type]: ... + def get_field_types(self, include_extras: bool = ...) -> typing.Dict[str, typing.Any]: ... + @property + def dataclass_fields(self) -> typing.Dict[str, Field]: ... + @property + def metadatas(self) -> typing.Dict[str, typing.Mapping[str, typing.Any]]: ... + def get_field_default(self, name: str) -> typing.Any: ... + def add_type_modules(self, *types_: typing.Type) -> None: ... + def ensure_module_imported(self, module: types.ModuleType) -> None: ... + def ensure_object_imported( + self, obj: typing.Any, name: typing.Optional[str] = ... + ) -> None: ... + def add_line(self, line: str) -> None: ... + def indent(self, expr: typing.Optional[str] = ...) -> typing.Generator[None, None, None]: ... + def compile(self) -> None: ... + def get_declared_hook(self, method_name: str) -> typing.Any: ... + def add_unpack_method(self) -> None: ... + def get_config(self, cls: Incomplete | None = ..., look_in_parents: bool = ...): ... + def get_discriminator(self) -> typing.Optional[Discriminator]: ... + def get_pack_method_flags( + self, cls: typing.Optional[typing.Type] = ..., pass_encoder: bool = ... + ) -> str: ... + def get_unpack_method_flags( + self, cls: typing.Optional[typing.Type] = ..., pass_decoder: bool = ... + ) -> str: ... + def get_pack_method_default_flag_values( + self, cls: typing.Optional[typing.Type] = ..., pass_encoder: bool = ... + ) -> str: ... + def get_unpack_method_default_flag_values(self, pass_decoder: bool = ...) -> str: ... + def is_code_generation_option_enabled( + self, option: str, cls: typing.Optional[typing.Type] = ... + ) -> bool: ... + @classmethod + def get_unpack_method_name( + cls, + type_args: typing.Iterable = ..., + format_name: str = ..., + decoder: typing.Optional[typing.Any] = ..., + ) -> str: ... + @classmethod + def get_pack_method_name( + cls, + type_args: typing.Tuple[typing.Type, ...] = ..., + format_name: str = ..., + encoder: typing.Optional[typing.Any] = ..., + ) -> str: ... + def add_pack_method(self) -> None: ... + def iter_serialization_strategies( + self, metadata, ftype + ) -> Generator[Incomplete, None, None]: ... diff --git a/third-party-stubs/mashumaro/core/meta/code/lines.pyi b/third-party-stubs/mashumaro/core/meta/code/lines.pyi new file mode 100644 index 00000000000..4d9bf5039c7 --- /dev/null +++ b/third-party-stubs/mashumaro/core/meta/code/lines.pyi @@ -0,0 +1,8 @@ +from typing import Generator, Optional + +class CodeLines: + def __init__(self) -> None: ... + def append(self, line: str) -> None: ... + def indent(self, expr: Optional[str] = ...) -> Generator[None, None, None]: ... + def as_text(self) -> str: ... + def reset(self) -> None: ... diff --git a/third-party-stubs/mashumaro/core/meta/helpers.pyi b/third-party-stubs/mashumaro/core/meta/helpers.pyi index 112a5a078ba..7ec3124e0ac 100644 --- a/third-party-stubs/mashumaro/core/meta/helpers.pyi +++ b/third-party-stubs/mashumaro/core/meta/helpers.pyi @@ -1,37 +1,58 @@ import typing -from typing import Any +from typing import Any, Dict, Optional, Sequence, Tuple, Type -def get_type_origin(t: Any): ... -def get_generic_name(t: Any, short: bool = ...) -> str: ... -def get_args(t: typing.Any) -> typing.Tuple[typing.Any, ...]: ... -def get_literal_values(t: typing.Any) -> Any: ... +def get_type_origin(typ: Type) -> Type: ... +def get_generic_name(typ: Type, short: bool = ...) -> str: ... +def get_args(typ: Optional[Type]) -> Tuple[Type, ...]: ... +def get_literal_values(typ: Type) -> Tuple[Any, ...]: ... def type_name( - t: typing.Any, + typ: Optional[Type], short: bool = ..., - type_vars: typing.Dict[str, typing.Any] = ..., + resolved_type_params: Optional[Dict[Type, Type]] = ..., is_type_origin: bool = ..., none_type_as_none: bool = ..., ) -> str: ... -def is_special_typing_primitive(t: Any) -> bool: ... -def is_generic(t: Any): ... -def is_typed_dict(t: Any) -> bool: ... -def is_named_tuple(t: Any) -> bool: ... -def is_new_type(t: Any) -> bool: ... -def is_union(t: Any): ... -def is_optional(t: Any, type_vars: typing.Dict[str, typing.Any] = ...) -> bool: ... -def is_annotated(t: Any) -> bool: ... -def is_literal(t: Any) -> bool: ... +def is_special_typing_primitive(typ: Any) -> bool: ... +def is_generic(typ: Type) -> bool: ... +def is_typed_dict(typ: Type) -> bool: ... +def is_named_tuple(typ: Type) -> bool: ... +def is_new_type(typ: Type) -> bool: ... +def is_union(typ: Type) -> bool: ... +def is_optional(typ: Type, resolved_type_params: Optional[Dict[Type, Type]] = ...) -> bool: ... +def is_annotated(typ: Type) -> bool: ... +def get_type_annotations(typ: Type) -> Sequence[Any]: ... +def is_literal(typ: Type) -> bool: ... def not_none_type_arg( - args: typing.Tuple[typing.Any, ...], type_vars: typing.Dict[str, typing.Any] = ... -) -> Any: ... -def is_type_var(t: Any) -> bool: ... -def is_type_var_any(t: Any) -> bool: ... -def is_class_var(t: Any) -> bool: ... -def is_init_var(t: Any) -> bool: ... -def get_class_that_defines_method(method_name: Any, cls: Any): ... -def get_class_that_defines_field(field_name: Any, cls: Any): ... -def is_dataclass_dict_mixin(t: Any) -> bool: ... -def is_dataclass_dict_mixin_subclass(t: Any) -> bool: ... -def resolve_type_vars(cls, arg_types: Any = ..., is_cls_created: bool = ...): ... + type_args: Tuple[Type, ...], resolved_type_params: Optional[Dict[Type, Type]] = ... +) -> Optional[Type]: ... +def is_type_var(typ: Type) -> bool: ... +def is_type_var_any(typ: Type) -> bool: ... +def is_class_var(typ: Type) -> bool: ... +def is_final(typ: Type) -> bool: ... +def is_init_var(typ: Type) -> bool: ... +def get_class_that_defines_method(method_name: str, cls: Type) -> Optional[Type]: ... +def get_class_that_defines_field(field_name: str, cls: Type) -> Optional[Type]: ... +def is_dataclass_dict_mixin(typ: Type) -> bool: ... +def is_dataclass_dict_mixin_subclass(typ: Type) -> bool: ... +def collect_type_params(typ: Type) -> Sequence[Type]: ... +def resolve_type_params( + typ: Type, type_args: Sequence[Type] = ..., include_bases: bool = ... +) -> Dict[Type, Dict[Type, Type]]: ... +def substitute_type_params(typ: Type, substitutions: Dict[Type, Type]) -> Type: ... def get_name_error_name(e: NameError) -> str: ... -def is_dialect_subclass(t: Any) -> bool: ... +def is_dialect_subclass(typ: Type) -> bool: ... +def is_self(typ: Type) -> bool: ... +def is_required(typ: Type) -> bool: ... +def is_not_required(typ: Type) -> bool: ... +def get_function_arg_annotation( + function: typing.Callable[[Any], Any], + arg_name: typing.Optional[str] = ..., + arg_pos: typing.Optional[int] = ..., +) -> typing.Type: ... +def get_function_return_annotation( + function: typing.Callable[[typing.Any], typing.Any] +) -> typing.Type: ... +def is_unpack(typ: Type) -> bool: ... +def is_type_var_tuple(typ: Type) -> bool: ... +def hash_type_args(type_args: typing.Iterable[typing.Type]) -> str: ... +def iter_all_subclasses(cls) -> typing.Iterator[Type]: ... diff --git a/third-party-stubs/mashumaro/core/meta/mixin.pyi b/third-party-stubs/mashumaro/core/meta/mixin.pyi new file mode 100644 index 00000000000..1d6734e0663 --- /dev/null +++ b/third-party-stubs/mashumaro/core/meta/mixin.pyi @@ -0,0 +1,13 @@ +from mashumaro.dialect import Dialect +from typing import Any, Dict, Optional, Tuple, Type + +def compile_mixin_packer( + cls, + format_name: str = ..., + dialect: Optional[Type[Dialect]] = ..., + encoder: Any = ..., + encoder_kwargs: Optional[Dict[str, Dict[str, Tuple[str, Any]]]] = ..., +) -> None: ... +def compile_mixin_unpacker( + cls, format_name: str = ..., dialect: Optional[Type[Dialect]] = ..., decoder: Any = ... +) -> None: ... diff --git a/third-party-stubs/mashumaro/core/meta/patch.pyi b/third-party-stubs/mashumaro/core/meta/patch.pyi deleted file mode 100644 index d3fa7446eb6..00000000000 --- a/third-party-stubs/mashumaro/core/meta/patch.pyi +++ /dev/null @@ -1 +0,0 @@ -def patch_fromisoformat() -> None: ... diff --git a/third-party-stubs/mashumaro/core/meta/types/__init__.pyi b/third-party-stubs/mashumaro/core/meta/types/__init__.pyi new file mode 100644 index 00000000000..e69de29bb2d diff --git a/third-party-stubs/mashumaro/core/meta/types/common.pyi b/third-party-stubs/mashumaro/core/meta/types/common.pyi new file mode 100644 index 00000000000..68ced55372e --- /dev/null +++ b/third-party-stubs/mashumaro/core/meta/types/common.pyi @@ -0,0 +1,67 @@ +from _typeshed import Incomplete +from functools import cached_property +from mashumaro.core.const import PEP_585_COMPATIBLE as PEP_585_COMPATIBLE +from mashumaro.core.meta.code.builder import CodeBuilder as CodeBuilder +from mashumaro.core.meta.helpers import ( + get_type_origin as get_type_origin, + is_annotated as is_annotated, + is_generic as is_generic, + type_name as type_name, +) +from mashumaro.exceptions import ( + UnserializableDataError as UnserializableDataError, + UnserializableField as UnserializableField, +) +from typing import Any, Dict, Mapping, Optional, Sequence, Type, TypeVar +from typing_extensions import TypeAlias + +cached_property = property +NoneType: Incomplete +Expression: TypeAlias +P: Incomplete +T = TypeVar("T") + +class ExpressionWrapper: + expression: Incomplete + def __init__(self, expression: str) -> None: ... + +PROPER_COLLECTION_TYPES: Dict[Type, str] + +class FieldContext: + name: str + metadata: Mapping + def copy(self, **changes: Any) -> FieldContext: ... + def __init__(self, name, metadata) -> None: ... + +class ValueSpec: + type: Type + origin_type: Type + expression: Expression + builder: CodeBuilder + field_ctx: FieldContext + could_be_none: bool + annotated_type: Optional[Type] + def __setattr__(self, key: str, value: Any) -> None: ... + def copy(self, **changes: Any) -> ValueSpec: ... + @cached_property + def annotations(self) -> Sequence[str]: ... + def __init__( + self, type, expression, builder, field_ctx, could_be_none, annotated_type + ) -> None: ... + +ValueSpecExprCreator: TypeAlias + +class Registry: + def register(self, function: ValueSpecExprCreator) -> ValueSpecExprCreator: ... + def get(self, spec: ValueSpec) -> Expression: ... + def __init__(self, _registry) -> None: ... + +def ensure_generic_collection(spec: ValueSpec) -> bool: ... +def ensure_collection_type_args_supported( + collection_type: Type, type_args: Sequence[Type] +) -> bool: ... +def ensure_generic_collection_subclass(spec: ValueSpec, *checked_types: Type) -> bool: ... +def ensure_generic_mapping(spec: ValueSpec, args: Sequence[Type], checked_type: Type) -> bool: ... +def expr_or_maybe_none(spec: ValueSpec, new_expr: Expression) -> Expression: ... +def random_hex() -> str: ... +def clean_id(value: str) -> str: ... diff --git a/third-party-stubs/mashumaro/core/meta/types/pack.pyi b/third-party-stubs/mashumaro/core/meta/types/pack.pyi new file mode 100644 index 00000000000..3231d8873f0 --- /dev/null +++ b/third-party-stubs/mashumaro/core/meta/types/pack.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +PackerRegistry: Incomplete diff --git a/third-party-stubs/mashumaro/core/meta/types/unpack.pyi b/third-party-stubs/mashumaro/core/meta/types/unpack.pyi new file mode 100644 index 00000000000..47020521425 --- /dev/null +++ b/third-party-stubs/mashumaro/core/meta/types/unpack.pyi @@ -0,0 +1,34 @@ +import abc +from _typeshed import Incomplete +from abc import ABC, abstractmethod +from mashumaro.core.meta.types.common import ValueSpec +from mashumaro.types import Discriminator +from typing import Optional, Tuple, Type + +UnpackerRegistry: Incomplete + +class AbstractUnpackerBuilder(ABC, metaclass=abc.ABCMeta): + @abstractmethod + def get_method_prefix(self) -> str: ... + def build(self, spec: ValueSpec) -> str: ... + +class UnionUnpackerBuilder(AbstractUnpackerBuilder): + union_args: Incomplete + def __init__(self, args: Tuple[Type, ...]) -> None: ... + def get_method_prefix(self) -> str: ... + +class TypeVarUnpackerBuilder(UnionUnpackerBuilder): + def get_method_prefix(self) -> str: ... + +class LiteralUnpackerBuilder(AbstractUnpackerBuilder): + def get_method_prefix(self) -> str: ... + +class DiscriminatedUnionUnpackerBuilder(AbstractUnpackerBuilder): + discriminator: Incomplete + base_variants: Incomplete + def __init__( + self, discriminator: Discriminator, base_variants: Optional[Tuple[Type, ...]] = ... + ) -> None: ... + def get_method_prefix(self) -> str: ... + +class SubtypeUnpackerBuilder(DiscriminatedUnionUnpackerBuilder): ... diff --git a/third-party-stubs/mashumaro/dialect.pyi b/third-party-stubs/mashumaro/dialect.pyi index 330c9cfa1c1..3f93bcfa2ce 100644 --- a/third-party-stubs/mashumaro/dialect.pyi +++ b/third-party-stubs/mashumaro/dialect.pyi @@ -1,7 +1,10 @@ +from mashumaro.core.const import Sentinel from mashumaro.types import SerializationStrategy -from typing import Any, Callable, Dict, Union +from typing import Callable, Dict, Union +from typing_extensions import Literal SerializationStrategyValueType = Union[SerializationStrategy, Dict[str, Union[str, Callable]]] class Dialect: - serialization_strategy: Dict[Any, SerializationStrategyValueType] = ... + serialization_strategy: Dict[str, SerializationStrategyValueType] + omit_none: Union[bool, Sentinel.MISSING] diff --git a/third-party-stubs/mashumaro/dialects/msgpack.pyi b/third-party-stubs/mashumaro/dialects/msgpack.pyi deleted file mode 100644 index f28b7c38283..00000000000 --- a/third-party-stubs/mashumaro/dialects/msgpack.pyi +++ /dev/null @@ -1,5 +0,0 @@ -from mashumaro.dialect import Dialect -from typing import Any - -class MessagePackDialect(Dialect): - serialization_strategy: Any = ... diff --git a/third-party-stubs/mashumaro/exceptions.pyi b/third-party-stubs/mashumaro/exceptions.pyi index 093eb7aa71f..d4c536a2b20 100644 --- a/third-party-stubs/mashumaro/exceptions.pyi +++ b/third-party-stubs/mashumaro/exceptions.pyi @@ -1,75 +1,91 @@ +from _typeshed import Incomplete from mashumaro.core.meta.helpers import type_name as type_name -from typing import Any, Optional +from typing import Any, Optional, Type class MissingField(LookupError): - field_name: Any = ... - field_type: Any = ... - holder_class: Any = ... - def __init__(self, field_name: Any, field_type: Any, holder_class: Any) -> None: ... + field_name: Incomplete + field_type: Incomplete + holder_class: Incomplete + def __init__(self, field_name: str, field_type: Type, holder_class: Type) -> None: ... @property - def field_type_name(self): ... + def field_type_name(self) -> str: ... @property - def holder_class_name(self): ... + def holder_class_name(self) -> str: ... class UnserializableDataError(TypeError): ... class UnserializableField(UnserializableDataError): - field_name: Any = ... - field_type: Any = ... - holder_class: Any = ... - msg: Any = ... + field_name: Incomplete + field_type: Incomplete + holder_class: Incomplete + msg: Incomplete def __init__( - self, field_name: Any, field_type: Any, holder_class: Any, msg: Optional[Any] = ... + self, field_name: str, field_type: Type, holder_class: Type, msg: Optional[str] = ... ) -> None: ... @property - def field_type_name(self): ... + def field_type_name(self) -> str: ... @property - def holder_class_name(self): ... + def holder_class_name(self) -> str: ... class UnsupportedSerializationEngine(UnserializableField): def __init__( - self, field_name: Any, field_type: Any, holder_class: Any, engine: Any + self, field_name: str, field_type: Type, holder_class: Type, engine: Any ) -> None: ... class UnsupportedDeserializationEngine(UnserializableField): def __init__( - self, field_name: Any, field_type: Any, holder_class: Any, engine: Any + self, field_name: str, field_type: Type, holder_class: Type, engine: Any ) -> None: ... class InvalidFieldValue(ValueError): - field_name: Any = ... - field_type: Any = ... - field_value: Any = ... - holder_class: Any = ... - msg: Any = ... + field_name: Incomplete + field_type: Incomplete + field_value: Incomplete + holder_class: Incomplete + msg: Incomplete def __init__( self, - field_name: Any, - field_type: Any, + field_name: str, + field_type: Type, field_value: Any, - holder_class: Any, - msg: Optional[Any] = ..., + holder_class: Type, + msg: Optional[str] = ..., ) -> None: ... @property - def field_type_name(self): ... + def field_type_name(self) -> str: ... @property - def holder_class_name(self): ... + def holder_class_name(self) -> str: ... + +class MissingDiscriminatorError(LookupError): + field_name: Incomplete + def __init__(self, field_name: str) -> None: ... + +class SuitableVariantNotFoundError(ValueError): + variants_type: Incomplete + discriminator_name: Incomplete + discriminator_value: Incomplete + def __init__( + self, + variants_type: Type, + discriminator_name: Optional[str] = ..., + discriminator_value: Any = ..., + ) -> None: ... class BadHookSignature(TypeError): ... class ThirdPartyModuleNotFoundError(ModuleNotFoundError): - module_name: Any = ... - field_name: Any = ... - holder_class: Any = ... - def __init__(self, module_name: Any, field_name: Any, holder_class: Any) -> None: ... + module_name: Incomplete + field_name: Incomplete + holder_class: Incomplete + def __init__(self, module_name: str, field_name: str, holder_class: Type) -> None: ... @property - def holder_class_name(self): ... + def holder_class_name(self) -> str: ... class UnresolvedTypeReferenceError(NameError): - holder_class: Any = ... - name: Any = ... - def __init__(self, holder_class: Any, unresolved_type_name: Any) -> None: ... + holder_class: Incomplete + name: Incomplete + def __init__(self, holder_class: Type, unresolved_type_name: str) -> None: ... @property - def holder_class_name(self): ... + def holder_class_name(self) -> str: ... class BadDialect(ValueError): ... diff --git a/third-party-stubs/mashumaro/helper.pyi b/third-party-stubs/mashumaro/helper.pyi index acc7c6b50b3..0eb8254dd27 100644 --- a/third-party-stubs/mashumaro/helper.pyi +++ b/third-party-stubs/mashumaro/helper.pyi @@ -1,5 +1,5 @@ from mashumaro.types import SerializationStrategy -from typing import Any, Callable, Optional, Union +from typing import Any, Callable, Dict, Optional, TypeVar, Union from typing_extensions import Literal NamedTupleDeserializationEngine = Literal["as_dict", "as_list"] @@ -7,18 +7,21 @@ DateTimeDeserializationEngine = Literal["ciso8601", "pendulum"] AnyDeserializationEngine = Literal[NamedTupleDeserializationEngine, DateTimeDeserializationEngine] NamedTupleSerializationEngine = Literal["as_dict", "as_list"] -AnySerializationEngine = NamedTupleSerializationEngine +AnySerializationEngine = Union[NamedTupleSerializationEngine, OmitSerializationEngine] +OmitSerializationEngine = Literal["omit"] + +T = TypeVar("T") def field_options( serialize: Optional[Union[AnySerializationEngine, Callable[[Any], Any]]] = ..., deserialize: Optional[Union[AnyDeserializationEngine, Callable[[Any], Any]]] = ..., serialization_strategy: Optional[SerializationStrategy] = ..., alias: Optional[str] = ..., -) -> Any: ... +) -> Dict[str, Any]: ... class _PassThrough(SerializationStrategy): - def __call__(self, *args: Any, **kwargs: Any) -> None: ... - def serialize(self, value: Any): ... - def deserialize(self, value: Any): ... + def __call__(self, *args: Any, **kwargs: Any) -> Any: ... + def serialize(self, value: T) -> T: ... + def deserialize(self, value: T) -> T: ... pass_through: Any diff --git a/third-party-stubs/mashumaro/jsonschema/__init__.pyi b/third-party-stubs/mashumaro/jsonschema/__init__.pyi new file mode 100644 index 00000000000..9c6436c675e --- /dev/null +++ b/third-party-stubs/mashumaro/jsonschema/__init__.pyi @@ -0,0 +1,2 @@ +from .builder import JSONSchemaBuilder as JSONSchemaBuilder, build_json_schema as build_json_schema +from .dialects import DRAFT_2020_12 as DRAFT_2020_12, OPEN_API_3_1 as OPEN_API_3_1 diff --git a/third-party-stubs/mashumaro/jsonschema/annotations.pyi b/third-party-stubs/mashumaro/jsonschema/annotations.pyi new file mode 100644 index 00000000000..f39d8003626 --- /dev/null +++ b/third-party-stubs/mashumaro/jsonschema/annotations.pyi @@ -0,0 +1,80 @@ +from mashumaro.jsonschema.models import JSONSchema, Number +from typing import Dict, Set + +class Annotation: ... +class Constraint(Annotation): ... +class NumberConstraint(Constraint): ... + +class Minimum(NumberConstraint): + value: Number + def __init__(self, value) -> None: ... + +class Maximum(NumberConstraint): + value: Number + def __init__(self, value) -> None: ... + +class ExclusiveMinimum(NumberConstraint): + value: Number + def __init__(self, value) -> None: ... + +class ExclusiveMaximum(NumberConstraint): + value: Number + def __init__(self, value) -> None: ... + +class MultipleOf(NumberConstraint): + value: Number + def __init__(self, value) -> None: ... + +class StringConstraint(Constraint): ... + +class MinLength(StringConstraint): + value: int + def __init__(self, value) -> None: ... + +class MaxLength(StringConstraint): + value: int + def __init__(self, value) -> None: ... + +class Pattern(StringConstraint): + value: str + def __init__(self, value) -> None: ... + +class ArrayConstraint(Constraint): ... + +class MinItems(ArrayConstraint): + value: int + def __init__(self, value) -> None: ... + +class MaxItems(ArrayConstraint): + value: int + def __init__(self, value) -> None: ... + +class UniqueItems(ArrayConstraint): + value: bool + def __init__(self, value) -> None: ... + +class Contains(ArrayConstraint): + value: JSONSchema + def __init__(self, value) -> None: ... + +class MinContains(ArrayConstraint): + value: int + def __init__(self, value) -> None: ... + +class MaxContains(ArrayConstraint): + value: int + def __init__(self, value) -> None: ... + +class ObjectConstraint(Constraint): ... + +class MaxProperties(ObjectConstraint): + value: int + def __init__(self, value) -> None: ... + +class MinProperties(ObjectConstraint): + value: int + def __init__(self, value) -> None: ... + +class DependentRequired(ObjectConstraint): + value: Dict[str, Set[str]] + def __init__(self, value) -> None: ... diff --git a/third-party-stubs/mashumaro/jsonschema/builder.pyi b/third-party-stubs/mashumaro/jsonschema/builder.pyi new file mode 100644 index 00000000000..8f973240a85 --- /dev/null +++ b/third-party-stubs/mashumaro/jsonschema/builder.pyi @@ -0,0 +1,31 @@ +from _typeshed import Incomplete +from mashumaro.jsonschema.dialects import JSONSchemaDialect +from mashumaro.jsonschema.models import Context, JSONSchema +from mashumaro.mixins.json import DataClassJSONMixin +from typing import Any, Dict, List, Optional, Type + +def build_json_schema( + instance_type: Type, + context: Optional[Context] = ..., + with_definitions: bool = ..., + all_refs: Optional[bool] = ..., + with_dialect_uri: bool = ..., + dialect: Optional[JSONSchemaDialect] = ..., + ref_prefix: Optional[str] = ..., +) -> JSONSchema: ... + +class JSONSchemaDefinitions(DataClassJSONMixin): + definitions: Dict[str, JSONSchema] + def __post_serialize__(self, d: Dict[Any, Any], context: Optional[Dict]) -> List[Dict[str, Any]]: ... # type: ignore + def __init__(self, definitions) -> None: ... + +class JSONSchemaBuilder: + context: Incomplete + def __init__( + self, + dialect: JSONSchemaDialect = ..., + all_refs: Optional[bool] = ..., + ref_prefix: Optional[str] = ..., + ) -> None: ... + def build(self, instance_type: Type) -> JSONSchema: ... + def get_definitions(self) -> JSONSchemaDefinitions: ... diff --git a/third-party-stubs/mashumaro/jsonschema/dialects.pyi b/third-party-stubs/mashumaro/jsonschema/dialects.pyi new file mode 100644 index 00000000000..88af0707fa0 --- /dev/null +++ b/third-party-stubs/mashumaro/jsonschema/dialects.pyi @@ -0,0 +1,22 @@ +from _typeshed import Incomplete + +class JSONSchemaDialect: + uri: str + definitions_root_pointer: str + all_refs: bool + def __init__(self, uri, definitions_root_pointer, all_refs) -> None: ... + +class JSONSchemaDraft202012Dialect(JSONSchemaDialect): + uri: str + definitions_root_pointer: str + all_refs: bool + def __init__(self, uri, definitions_root_pointer, all_refs) -> None: ... + +class OpenAPISchema31Dialect(JSONSchemaDialect): + uri: str + definitions_root_pointer: str + all_refs: bool + def __init__(self, uri, definitions_root_pointer, all_refs) -> None: ... + +DRAFT_2020_12: Incomplete +OPEN_API_3_1: Incomplete diff --git a/third-party-stubs/mashumaro/jsonschema/models.pyi b/third-party-stubs/mashumaro/jsonschema/models.pyi new file mode 100644 index 00000000000..4b18bc9b3cc --- /dev/null +++ b/third-party-stubs/mashumaro/jsonschema/models.pyi @@ -0,0 +1,244 @@ +from _typeshed import Incomplete +from enum import Enum +from mashumaro.config import BaseConfig as BaseConfig +from mashumaro.helper import pass_through as pass_through +from mashumaro.jsonschema.dialects import ( + DRAFT_2020_12 as DRAFT_2020_12, + JSONSchemaDialect as JSONSchemaDialect, +) +from mashumaro.mixins.json import DataClassJSONMixin as DataClassJSONMixin +from typing import Any, Dict, List, Optional, Set, Union +from typing_extensions import TypeAlias + +Number: TypeAlias = Union[int, float] +Null = object() + +class JSONSchemaInstanceType(Enum): + NULL: str + BOOLEAN: str + OBJECT: str + ARRAY: str + NUMBER: str + STRING: str + INTEGER: str + +class JSONSchemaInstanceFormat(Enum): ... + +class JSONSchemaStringFormat(JSONSchemaInstanceFormat): + DATETIME: str + DATE: str + TIME: str + DURATION: str + EMAIL: str + IDN_EMAIL: str + HOSTNAME: str + IDN_HOSTNAME: str + IPV4ADDRESS: str + IPV6ADDRESS: str + URI: str + URI_REFERENCE: str + IRI: str + IRI_REFERENCE: str + UUID: str + URI_TEMPLATE: str + JSON_POINTER: str + RELATIVE_JSON_POINTER: str + REGEX: str + +class JSONSchemaInstanceFormatExtension(JSONSchemaInstanceFormat): + TIMEDELTA: str + TIME_ZONE: str + IPV4NETWORK: str + IPV6NETWORK: str + IPV4INTERFACE: str + IPV6INTERFACE: str + DECIMAL: str + FRACTION: str + BASE64: str + PATH: str + +DATETIME_FORMATS: Incomplete +IPADDRESS_FORMATS: Incomplete + +class JSONSchema(DataClassJSONMixin): + schema: Optional[str] + type: Optional[JSONSchemaInstanceType] + enum: Optional[List[Any]] + const: Optional[Any] + format: Optional[ + Union[JSONSchemaInstanceFormat, JSONSchemaStringFormat, JSONSchemaInstanceFormatExtension] + ] + title: Optional[str] + description: Optional[str] + anyOf: Optional[List["JSONSchema"]] + reference: Optional[str] + definitions: Optional[Dict[str, "JSONSchema"]] + default: Optional[Any] + deprecated: Optional[bool] + examples: Optional[List[Any]] + properties: Optional[Dict[str, "JSONSchema"]] + patternProperties: Optional[Dict[str, "JSONSchema"]] + additionalProperties: Union["JSONSchema", bool, None] + propertyNames: Optional["JSONSchema"] + prefixItems: Optional[List["JSONSchema"]] + items: Optional["JSONSchema"] + contains: Optional["JSONSchema"] + multipleOf: Optional[Number] + maximum: Optional[Number] + exclusiveMaximum: Optional[Number] + minimum: Optional[Number] + exclusiveMinimum: Optional[Number] + maxLength: Optional[int] + minLength: Optional[int] + pattern: Optional[str] + maxItems: Optional[int] + minItems: Optional[int] + uniqueItems: Optional[bool] + maxContains: Optional[int] + minContains: Optional[int] + maxProperties: Optional[int] + minProperties: Optional[int] + required: Optional[List[str]] + dependentRequired: Optional[Dict[str, Set[str]]] + + class Config(BaseConfig): + omit_none: bool + serialize_by_alias: bool + aliases: Incomplete + serialization_strategy: Incomplete + + def __pre_serialize__(self, context: Optional[Dict]) -> JSONSchema: ... + def __post_serialize__(self, d: Dict[Any, Any], context: Optional[Dict]) -> Dict[Any, Any]: ... + def __init__( + self, + schema, + type, + enum, + const, + format, + title, + description, + anyOf, + reference, + definitions, + default, + deprecated, + examples, + properties, + patternProperties, + additionalProperties, + propertyNames, + prefixItems, + items, + contains, + multipleOf, + maximum, + exclusiveMaximum, + minimum, + exclusiveMinimum, + maxLength, + minLength, + pattern, + maxItems, + minItems, + uniqueItems, + maxContains, + minContains, + maxProperties, + minProperties, + required, + dependentRequired, + ) -> None: ... + +class JSONObjectSchema(JSONSchema): + type: JSONSchemaInstanceType + def __init__( + self, + schema, + type, + enum, + const, + format, + title, + description, + anyOf, + reference, + definitions, + default, + deprecated, + examples, + properties, + patternProperties, + additionalProperties, + propertyNames, + prefixItems, + items, + contains, + multipleOf, + maximum, + exclusiveMaximum, + minimum, + exclusiveMinimum, + maxLength, + minLength, + pattern, + maxItems, + minItems, + uniqueItems, + maxContains, + minContains, + maxProperties, + minProperties, + required, + dependentRequired, + ) -> None: ... + +class JSONArraySchema(JSONSchema): + type: JSONSchemaInstanceType + def __init__( + self, + schema, + type, + enum, + const, + format, + title, + description, + anyOf, + reference, + definitions, + default, + deprecated, + examples, + properties, + patternProperties, + additionalProperties, + propertyNames, + prefixItems, + items, + contains, + multipleOf, + maximum, + exclusiveMaximum, + minimum, + exclusiveMinimum, + maxLength, + minLength, + pattern, + maxItems, + minItems, + uniqueItems, + maxContains, + minContains, + maxProperties, + minProperties, + required, + dependentRequired, + ) -> None: ... + +class Context: + dialect: JSONSchemaDialect + definitions: Dict[str, JSONSchema] + all_refs: Optional[bool] + ref_prefix: Optional[str] + def __init__(self, dialect, definitions, all_refs, ref_prefix) -> None: ... diff --git a/third-party-stubs/mashumaro/jsonschema/schema.pyi b/third-party-stubs/mashumaro/jsonschema/schema.pyi new file mode 100644 index 00000000000..e3cd1b5cd19 --- /dev/null +++ b/third-party-stubs/mashumaro/jsonschema/schema.pyi @@ -0,0 +1,72 @@ +from mashumaro.config import BaseConfig +from mashumaro.jsonschema.annotations import Annotation +from mashumaro.jsonschema.models import Context, JSONSchema +from typing import Any, Callable, Iterable, List, Mapping, Optional, Tuple, Type, Union + +class Instance: + type: Type + name: Optional[str] + origin_type: Type + annotations: List[Annotation] + @property + def metadata(self) -> Mapping[str, Any]: ... + @property + def alias(self) -> Optional[str]: ... + @property + def holder_class(self) -> Optional[Type]: ... + def copy(self, **changes: Any) -> Instance: ... + def __post_init__(self) -> None: ... + def update_type(self, new_type: Type) -> None: ... + def fields(self) -> Iterable[Tuple[str, Type, bool, Any]]: ... + def get_overridden_serialization_method(self) -> Optional[Union[Callable, str]]: ... + def get_config(self) -> Type[BaseConfig]: ... + def __init__(self, type, name, __builder) -> None: ... + +class InstanceSchemaCreatorRegistry: + def register(self, func: InstanceSchemaCreator) -> InstanceSchemaCreator: ... + def iter(self) -> Iterable[InstanceSchemaCreator]: ... + def __init__(self, _registry) -> None: ... + +class EmptyJSONSchema(JSONSchema): + def __init__( + self, + schema, + type, + enum, + const, + format, + title, + description, + anyOf, + reference, + definitions, + default, + deprecated, + examples, + properties, + patternProperties, + additionalProperties, + propertyNames, + prefixItems, + items, + contains, + multipleOf, + maximum, + exclusiveMaximum, + minimum, + exclusiveMinimum, + maxLength, + minLength, + pattern, + maxItems, + minItems, + uniqueItems, + maxContains, + minContains, + maxProperties, + minProperties, + required, + dependentRequired, + ) -> None: ... + +def get_schema(instance: Instance, ctx: Context, with_dialect_uri: bool = ...) -> JSONSchema: ... diff --git a/third-party-stubs/mashumaro/mixins/dict.pyi b/third-party-stubs/mashumaro/mixins/dict.pyi index fb904b0e97c..c6ec9accad1 100644 --- a/third-party-stubs/mashumaro/mixins/dict.pyi +++ b/third-party-stubs/mashumaro/mixins/dict.pyi @@ -1,15 +1,15 @@ -from typing import Any, Dict, Mapping, Type, TypeVar +from typing import Any, Dict, Mapping, Type, TypeVar, Optional T = TypeVar("T", bound="DataClassDictMixin") class DataClassDictMixin: def __init_subclass__(cls: Type[T], **kwargs: Any) -> None: ... - def to_dict(self, omit_none: bool = True, validate: bool = False) -> dict: ... + def to_dict(self, **kwargs: Any) -> dict: ... @classmethod - def from_dict(cls, d: Mapping, validate=True) -> Any: ... + def from_dict(cls, d: Mapping, **kwargs: Any) -> Any: ... @classmethod def __pre_deserialize__(cls: Type[T], d: Dict[Any, Any]) -> Dict[Any, Any]: ... @classmethod def __post_deserialize__(cls: Type[T], obj: T) -> T: ... - def __pre_serialize__(self: T) -> T: ... - def __post_serialize__(self, d: Dict[Any, Any]) -> Dict[Any, Any]: ... + def __pre_serialize__(self: T, context: Optional[Dict]) -> T: ... + def __post_serialize__(self, d: Dict[Any, Any], context: Optional[Dict]) -> Dict[Any, Any]: ... diff --git a/third-party-stubs/mashumaro/mixins/json.pyi b/third-party-stubs/mashumaro/mixins/json.pyi index 14066559f31..267c277e258 100644 --- a/third-party-stubs/mashumaro/mixins/json.pyi +++ b/third-party-stubs/mashumaro/mixins/json.pyi @@ -1,6 +1,5 @@ from mashumaro.mixins.dict import DataClassDictMixin as DataClassDictMixin -from typing import Any, Dict, Type, TypeVar, Union -from typing_extensions import Protocol as Protocol +from typing import Any, Callable, Dict, TypeVar, Union, Type EncodedData = Union[str, bytes, bytearray] T = TypeVar("T", bound="DataClassJSONMixin") diff --git a/third-party-stubs/mashumaro/mixins/msgpack.pyi b/third-party-stubs/mashumaro/mixins/msgpack.pyi index b75b35488a5..d1467bf4a79 100644 --- a/third-party-stubs/mashumaro/mixins/msgpack.pyi +++ b/third-party-stubs/mashumaro/mixins/msgpack.pyi @@ -1,17 +1,16 @@ -from mashumaro.dialects.msgpack import MessagePackDialect as MessagePackDialect +from _typeshed import Incomplete +from mashumaro.dialect import Dialect as Dialect +from mashumaro.helper import pass_through as pass_through from mashumaro.mixins.dict import DataClassDictMixin as DataClassDictMixin -from typing import Any, Dict, Type, TypeVar -from typing_extensions import Protocol as Protocol +from typing import Any, Callable, Dict, TypeVar, Type -EncodedData = bytes T = TypeVar("T", bound="DataClassMessagePackMixin") -DEFAULT_DICT_PARAMS: Any - -class Encoder: - def __call__(self, o: Any, **kwargs: Any) -> EncodedData: ... +EncodedData = bytes +Encoder = Callable[[Any], EncodedData] +Decoder = Callable[[EncodedData], Dict[Any, Any]] -class Decoder: - def __call__(self, packed: EncodedData, **kwargs: Any) -> Dict[Any, Any]: ... +class MessagePackDialect(Dialect): + serialization_strategy: Incomplete def default_encoder(data: Any) -> EncodedData: ... def default_decoder(data: EncodedData) -> Dict[Any, Any]: ... diff --git a/third-party-stubs/mashumaro/mixins/orjson.pyi b/third-party-stubs/mashumaro/mixins/orjson.pyi new file mode 100644 index 00000000000..d56f063e52b --- /dev/null +++ b/third-party-stubs/mashumaro/mixins/orjson.pyi @@ -0,0 +1,23 @@ +from mashumaro.dialect import Dialect as Dialect +from mashumaro.mixins.dict import DataClassDictMixin as DataClassDictMixin +from typing import Any, Callable, Dict, TypeVar, Union + +T = TypeVar("T", bound="DataClassORJSONMixin") +EncodedData = Union[str, bytes, bytearray] +Encoder = Callable[[Any], EncodedData] +Decoder = Callable[[EncodedData], Dict[Any, Any]] + +class OrjsonDialect(Dialect): + serialization_strategy: Any + +class DataClassORJSONMixin(DataClassDictMixin): + def to_jsonb( + self, encoder: Encoder = ..., *, orjson_options: int = ..., **to_dict_kwargs: Any + ) -> bytes: ... + def to_json( + self, encoder: Encoder = ..., *, orjson_options: int = ..., **to_dict_kwargs: Any + ) -> bytes: ... + @classmethod + def from_json( + cls, data: EncodedData, decoder: Decoder = ..., **from_dict_kwargs: Any + ) -> T: ... diff --git a/third-party-stubs/mashumaro/mixins/toml.pyi b/third-party-stubs/mashumaro/mixins/toml.pyi new file mode 100644 index 00000000000..bb56adee966 --- /dev/null +++ b/third-party-stubs/mashumaro/mixins/toml.pyi @@ -0,0 +1,21 @@ +from _typeshed import Incomplete +from mashumaro.dialect import Dialect as Dialect +from mashumaro.helper import pass_through as pass_through +from mashumaro.mixins.dict import DataClassDictMixin as DataClassDictMixin +from typing import Any, Callable, Dict, TypeVar + +T = TypeVar("T", bound="DataClassTOMLMixin") +EncodedData = str +Encoder = Callable[[Any], EncodedData] +Decoder = Callable[[EncodedData], Dict[Any, Any]] + +class TOMLDialect(Dialect): + omit_none: bool + serialization_strategy: Incomplete + +class DataClassTOMLMixin(DataClassDictMixin): + def to_toml(self, encoder: Encoder = ..., **to_dict_kwargs: Any) -> EncodedData: ... + @classmethod + def from_toml( + cls, data: EncodedData, decoder: Decoder = ..., **from_dict_kwargs: Any + ) -> T: ... diff --git a/third-party-stubs/mashumaro/mixins/yaml.pyi b/third-party-stubs/mashumaro/mixins/yaml.pyi index 4033741163b..25e3571bbbb 100644 --- a/third-party-stubs/mashumaro/mixins/yaml.pyi +++ b/third-party-stubs/mashumaro/mixins/yaml.pyi @@ -1,18 +1,13 @@ +from _typeshed import Incomplete from mashumaro.mixins.dict import DataClassDictMixin as DataClassDictMixin -from typing import Any, Dict, Type, TypeVar, Union -from typing_extensions import Protocol as Protocol +from typing import Any, Callable, Dict, TypeVar, Union -EncodedData = Union[str, bytes] T = TypeVar("T", bound="DataClassYAMLMixin") - -class Encoder: - def __call__(self, o: Any, **kwargs: Any) -> EncodedData: ... - -class Decoder: - def __call__(self, packed: EncodedData, **kwargs: Any) -> Dict[Any, Any]: ... - -DefaultLoader: Any -DefaultDumper: Any +EncodedData = Union[str, bytes] +Encoder = Callable[[Any], EncodedData] +Decoder = Callable[[EncodedData], Dict[Any, Any]] +DefaultLoader: Incomplete +DefaultDumper: Incomplete def default_encoder(data: Any) -> EncodedData: ... def default_decoder(data: EncodedData) -> Dict[Any, Any]: ... @@ -21,5 +16,5 @@ class DataClassYAMLMixin(DataClassDictMixin): def to_yaml(self, encoder: Encoder = ..., **to_dict_kwargs: Any) -> EncodedData: ... @classmethod def from_yaml( - cls: Type[T], data: EncodedData, decoder: Decoder = ..., **from_dict_kwargs: Any + cls, data: EncodedData, decoder: Decoder = ..., **from_dict_kwargs: Any ) -> T: ... diff --git a/third-party-stubs/mashumaro/types.pyi b/third-party-stubs/mashumaro/types.pyi index ca63363c51c..536c5d2c4c5 100644 --- a/third-party-stubs/mashumaro/types.pyi +++ b/third-party-stubs/mashumaro/types.pyi @@ -1,5 +1,8 @@ import decimal -from typing import Any, Optional +from _typeshed import Incomplete +from mashumaro.core.const import Sentinel +from typing import Any, Optional, Union +from typing_extensions import Literal class SerializableType: ... class GenericSerializableType: ... @@ -9,8 +12,15 @@ class SerializationStrategy: def deserialize(self, value: Any) -> Any: ... class RoundedDecimal(SerializationStrategy): - exp: Any = ... - rounding: Any = ... - def __init__(self, places: Optional[Any] = ..., rounding: Optional[Any] = ...) -> None: ... - def serialize(self, value: Any) -> str: ... - def deserialize(self, value: str) -> Any: ... + exp: Incomplete + rounding: Incomplete + def __init__(self, places: Optional[int] = ..., rounding: Optional[str] = ...) -> None: ... + def serialize(self, value: decimal.Decimal) -> str: ... + def deserialize(self, value: str) -> decimal.Decimal: ... + +class Discriminator: + field: Optional[str] + include_supertypes: bool + include_subtypes: bool + def __post_init__(self) -> None: ... + def __init__(self, field, include_supertypes, include_subtypes) -> None: ... diff --git a/tox.ini b/tox.ini index 2da444102fe..a0b0912ab6f 100644 --- a/tox.ini +++ b/tox.ini @@ -23,12 +23,14 @@ passenv = DBT_* POSTGRES_TEST_* PYTEST_ADDOPTS - DD_SERVICE + DD_CIVISIBILITY_AGENTLESS_ENABLED + DD_API_KEY + DD_SITE DD_ENV + DD_SERVICE commands = {envpython} -m pytest --cov=core --cov-append --cov-report=xml {posargs} tests/functional -k "not tests/functional/graph_selection" {envpython} -m pytest --cov=core --cov-append --cov-report=xml {posargs} tests/functional/graph_selection - {envpython} -m pytest --cov=core --cov-append --cov-report=xml {posargs} tests/adapter deps = -rdev-requirements.txt