From 52d64dc316696e41a50343af342e845747699a3b Mon Sep 17 00:00:00 2001 From: John Paul Alcala Date: Thu, 12 Sep 2024 14:41:35 +0800 Subject: [PATCH] delete+insert requires incremental_predicates if there's no unique_key This allows us to perform fine-grained optimizations that does not rely on the unique key. This optimization is important for data warehouses like Redshift, where there is no concept of primary/unique keys and indexes. --- .../materializations/models/incremental/merge.sql | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/dbt/include/global_project/macros/materializations/models/incremental/merge.sql b/dbt/include/global_project/macros/materializations/models/incremental/merge.sql index ca972c9f..68dcc8f5 100644 --- a/dbt/include/global_project/macros/materializations/models/incremental/merge.sql +++ b/dbt/include/global_project/macros/materializations/models/incremental/merge.sql @@ -89,6 +89,18 @@ {%- endif -%}; {% endif %} + {% else %} + {% if not incremental_predicates %} + {{ exceptions.raise_compiler_error("incremental_predicates is required when there is no unique_key") }} + {% endif %} + + delete from {{ target }} + where ( + {% for predicate in incremental_predicates %} + {{ predicate|replace('DBT_INTERNAL_DEST', target)|replace('DBT_INTERNAL_SOURCE', source) }} + {{ "and " if not loop.last}} + {% endfor %} + ); {% endif %} insert into {{ target }} ({{ dest_cols_csv }})