From d8a0badc2aff21c930d3212fb51b40a24c97946a Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Fri, 27 Dec 2024 14:06:58 -0700 Subject: [PATCH 01/46] chore!: start refactor --- src/dbt_osmosis/core/audit_macros.jinja2 | 452 ----------------------- src/dbt_osmosis/core/diff.py | 188 ---------- src/dbt_osmosis/core/macros.py | 14 - src/dbt_osmosis/core/osmosis.py | 140 ++++--- 4 files changed, 66 insertions(+), 728 deletions(-) delete mode 100644 src/dbt_osmosis/core/audit_macros.jinja2 delete mode 100644 src/dbt_osmosis/core/diff.py delete mode 100644 src/dbt_osmosis/core/macros.py diff --git a/src/dbt_osmosis/core/audit_macros.jinja2 b/src/dbt_osmosis/core/audit_macros.jinja2 deleted file mode 100644 index 3137bf95..00000000 --- a/src/dbt_osmosis/core/audit_macros.jinja2 +++ /dev/null @@ -1,452 +0,0 @@ -{% macro _dbt_osmosis_compare_queries(a_query, b_query, primary_key=None) -%} -{{ - return( - adapter.dispatch("_dbt_osmosis_compare_queries")( - a_query, b_query, primary_key - ) - ) -}} -{%- endmacro %} - -{% macro default___dbt_osmosis_compare_queries(a_query, b_query, primary_key=None) %} - -with - - a as ({{ a_query }}), - - b as ({{ b_query }}), - - a_intersect_b as (select * from a {{ dbt_utils.intersect() }} select * from b), - - a_except_b as (select * from a {{ dbt_utils.except() }} select * from b), - - b_except_a as (select * from b {{ dbt_utils.except() }} select * from a), - - all_records as ( - - select *, true as in_a, true as in_b - from a_intersect_b - - union all - - select *, true as in_a, false as in_b - from a_except_b - - union all - - select *, false as in_a, true as in_b - from b_except_a - - ) - -select * -from all_records -where not (in_a and in_b) -order by {{ primary_key ~ ", " if primary_key is not none }} in_a desc, in_b desc - -{% endmacro %} - - -- - - {% macro _dbt_osmosis_compare_queries_agg(a_query, b_query, primary_key=None) -%} - {{ - return( - adapter.dispatch("_dbt_osmosis_compare_queries_agg")( - a_query, b_query, primary_key - ) - ) - }} - {%- endmacro %} - -{% macro default___dbt_osmosis_compare_queries_agg( - a_query, b_query, primary_key=None -) %} - -with - - a as ({{ a_query }}), - - b as ({{ b_query }}), - - a_intersect_b as (select * from a {{ dbt_utils.intersect() }} select * from b), - - a_except_b as (select * from a {{ dbt_utils.except() }} select * from b), - - b_except_a as (select * from b {{ dbt_utils.except() }} select * from a), - - all_records as ( - - select *, true as in_a, true as in_b - from a_intersect_b - - union all - - select *, true as in_a, false as in_b - from a_except_b - - union all - - select *, false as in_a, true as in_b - from b_except_a - - ), - - summary_stats as ( - select in_a, in_b, count(*) as count from all_records group by 1, 2 - ) - -select *, round(100.0 * count / sum(count) over (), 2) as percent_of_total - -from summary_stats -order by in_a desc, in_b desc - -{% endmacro %} - - -- - - {% macro _dbt_osmosis_pop_columns(columns, columns_to_pop) %} - {% set popped_columns = [] %} - - {% for column in columns %} - {% if column.name | lower not in columns_to_pop | lower %} - {% do popped_columns.append(column) %} - {% endif %} - {% endfor %} - - {{ return(popped_columns) }} - {% endmacro %} - - -- - - {% macro _dbt_osmosis_compare_relations( - a_relation, b_relation, exclude_columns=[], primary_key=none -) %} - - {%- set a_columns = adapter.get_columns_in_relation(a_relation) -%} - - {% set check_columns = _dbt_osmosis_pop_columns(a_columns, exclude_columns) %} - - {% set check_cols_csv = check_columns | map(attribute="quoted") | join(", ") %} - -{% set a_query %} -select - {{ check_cols_csv }} - {% if primary_key is none %}, {{ hash(check_cols_csv) }} as _pk{% endif %} - -from {{ a_relation }} -{% endset %} - -{% set b_query %} -select - {{ check_cols_csv }} - {% if primary_key is none %}, {{ hash(check_cols_csv) }} as _pk{% endif %} - -from {{ b_relation }} -{% endset %} - - {{ _dbt_osmosis_compare_queries(a_query, b_query, primary_key or "_pk") }} - - {% endmacro %} - - -- - - {% macro _dbt_osmosis_compare_relations_agg( - a_relation, b_relation, exclude_columns=[], primary_key=none -) %} - - {%- set a_columns = adapter.get_columns_in_relation(a_relation) -%} - - {% set check_columns = _dbt_osmosis_pop_columns(a_columns, exclude_columns) %} - - {% set check_cols_csv = check_columns | map(attribute="quoted") | join(", ") %} - -{% set a_query %} -select - {{ check_cols_csv }} - {% if primary_key is none %}, {{ hash(check_cols_csv) }} as _pk{% endif %} - -from {{ a_relation }} -{% endset %} - -{% set b_query %} -select - {{ check_cols_csv }} - {% if primary_key is none %}, {{ hash(check_cols_csv) }} as _pk{% endif %} - -from {{ b_relation }} -{% endset %} - - {{ _dbt_osmosis_compare_queries_agg(a_query, b_query, primary_key or "_pk") }} - - {% endmacro %} - - -- - - {% macro _dbt_osmosis_compare_relation_columns(a_relation, b_relation) %} - {{ - return( - adapter.dispatch("_dbt_osmosis_compare_relation_columns")( - a_relation, b_relation - ) - ) - }} - {% endmacro %} - -{% macro default___dbt_osmosis_compare_relation_columns(a_relation, b_relation) %} - -with - - a_cols as ({{ get_columns_in_relation_sql_dosmo(a_relation) }}), - - b_cols as ({{ get_columns_in_relation_sql_dosmo(b_relation) }}) - -select - column_name, - a_cols.ordinal_position as a_ordinal_position, - b_cols.ordinal_position as b_ordinal_position, - a_cols.data_type as a_data_type, - b_cols.data_type as b_data_type, - coalesce( - a_cols.ordinal_position = b_cols.ordinal_position, false - ) as has_ordinal_position_match, - coalesce(a_cols.data_type = b_cols.data_type, false) as has_data_type_match -from a_cols -full outer join b_cols using (column_name) -order by coalesce(a_cols.ordinal_position, b_cols.ordinal_position) - -{% endmacro %} - - -- - - {% macro get_columns_in_relation_sql_dosmo(relation) %} - - {{ adapter.dispatch("get_columns_in_relation_sql_dosmo")(relation) }} - - {% endmacro %} - -{% macro redshift__get_columns_in_relation_sql_dosmo(relation) %} -{#- -See https://github.com/dbt-labs/dbt/blob/23484b18b71010f701b5312f920f04529ceaa6b2/plugins/redshift/dbt/include/redshift/macros/adapters.sql#L71 -Edited to include ordinal_position --#} -with - - bound_views as ( - select - ordinal_position, - table_schema, - column_name, - data_type, - character_maximum_length, - numeric_precision, - numeric_scale - - from information_schema."columns" - where table_name = '{{ relation.identifier }}' - ), - - unbound_views as ( - select - ordinal_position, - view_schema, - col_name, - case - when col_type ilike 'character varying%' - then 'character varying' - when col_type ilike 'numeric%' - then 'numeric' - else col_type - end as col_type, - case - when col_type like 'character%' - then nullif(regexp_substr(col_type, '[0-9]+'), '')::int - else null - end as character_maximum_length, - case - when col_type like 'numeric%' - then - nullif( - split_part(regexp_substr(col_type, '[0-9,]+'), ',', 1), '' - )::int - else null - end as numeric_precision, - case - when col_type like 'numeric%' - then - nullif( - split_part(regexp_substr(col_type, '[0-9,]+'), ',', 2), '' - )::int - else null - end as numeric_scale - - from - pg_get_late_binding_view_cols() - cols( - view_schema name, - view_name name, - col_name name, - col_type varchar, - ordinal_position int - ) - where view_name = '{{ relation.identifier }}' - ), - - unioned as ( - select * - from bound_views - union all - select * - from unbound_views - ) - -select * - -from unioned -{% if relation.schema %} where table_schema = '{{ relation.schema }}' {% endif %} -order by ordinal_position - -{% endmacro %} - -{% macro snowflake__get_columns_in_relation_sql_dosmo(relation) %} -{#- -From: https://github.com/dbt-labs/dbt/blob/dev/louisa-may-alcott/plugins/snowflake/dbt/include/snowflake/macros/adapters.sql#L48 -Edited to include ordinal_position --#} -select - ordinal_position, - column_name, - data_type, - character_maximum_length, - numeric_precision, - numeric_scale - -from {{ relation.information_schema("columns") }} - -where - table_name ilike '{{ relation.identifier }}' - {% if relation.schema %} and table_schema ilike '{{ relation.schema }}' {% endif %} - {% if relation.database %} - and table_catalog ilike '{{ relation.database }}' - {% endif %} -order by ordinal_position -{% endmacro %} - -{% macro postgres__get_columns_in_relation_sql_dosmo(relation) %} -{#- -From: https://github.com/dbt-labs/dbt/blob/23484b18b71010f701b5312f920f04529ceaa6b2/plugins/postgres/dbt/include/postgres/macros/adapters.sql#L32 -Edited to include ordinal_position --#} -select - ordinal_position, - column_name, - data_type, - character_maximum_length, - numeric_precision, - numeric_scale - -from {{ relation.information_schema("columns") }} -where - table_name = '{{ relation.identifier }}' - {% if relation.schema %} and table_schema = '{{ relation.schema }}' {% endif %} -order by ordinal_position -{% endmacro %} - - -{% macro bigquery__get_columns_in_relation_sql_dosmo(relation) %} - -select ordinal_position, column_name, data_type - -from `{{ relation.database }}`.`{{ relation.schema }}`.information_schema.columns -where table_name = '{{ relation.identifier }}' - -{% endmacro %} - - -- - - {% macro _dbt_osmosis_compare_column_values( - a_query, b_query, primary_key, column_to_compare -) -%} - {{ - return( - adapter.dispatch("_dbt_osmosis_compare_column_values")( - a_query, b_query, primary_key, column_to_compare - ) - ) - }} - {%- endmacro %} - -{% macro default___dbt_osmosis_compare_column_values( - a_query, b_query, primary_key, column_to_compare -) -%} -with - - a_query as ({{ a_query }}), - - b_query as ({{ b_query }}), - - joined as ( - select - coalesce( - a_query.{{ primary_key }}, b_query.{{ primary_key }} - ) as {{ primary_key }}, - a_query.{{ column_to_compare }} as a_query_value, - b_query.{{ column_to_compare }} as b_query_value, - case - when a_query.{{ column_to_compare }} = b_query.{{ column_to_compare }} - then '✅: perfect match' - when - a_query.{{ column_to_compare }} is null - and b_query.{{ column_to_compare }} is null - then '✅: both are null' - when a_query.{{ primary_key }} is null - then '🤷: ‍missing from a' - when b_query.{{ primary_key }} is null - then '🤷: missing from b' - when a_query.{{ column_to_compare }} is null - then '🤷: value is null in a only' - when b_query.{{ column_to_compare }} is null - then '🤷: value is null in b only' - when a_query.{{ column_to_compare }} != b_query.{{ column_to_compare }} - then '🙅: ‍values do not match' - else 'unknown' -- this should never happen - end as match_status, - case - when a_query.{{ column_to_compare }} = b_query.{{ column_to_compare }} - then 0 - when - a_query.{{ column_to_compare }} is null - and b_query.{{ column_to_compare }} is null - then 1 - when a_query.{{ primary_key }} is null - then 2 - when b_query.{{ primary_key }} is null - then 3 - when a_query.{{ column_to_compare }} is null - then 4 - when b_query.{{ column_to_compare }} is null - then 5 - when a_query.{{ column_to_compare }} != b_query.{{ column_to_compare }} - then 6 - else 7 -- this should never happen - end as match_order - - from a_query - - full outer join b_query on a_query.{{ primary_key }} = b_query.{{ primary_key }} - ), - - aggregated as ( - select - '{{ column_to_compare }}' as column_name, - match_status, - match_order, - count(*) as count_records - from joined - - group by column_name, match_status, match_order - ) - -select - column_name, - match_status, - count_records, - round(100.0 * count_records / sum(count_records) over (), 2) as percent_of_total - -from aggregated - -order by match_order - -{% endmacro %} diff --git a/src/dbt_osmosis/core/diff.py b/src/dbt_osmosis/core/diff.py deleted file mode 100644 index 7ce4f0f3..00000000 --- a/src/dbt_osmosis/core/diff.py +++ /dev/null @@ -1,188 +0,0 @@ -import hashlib -from pathlib import Path -from typing import Tuple - -import agate -from dbt.adapters.base.relation import BaseRelation -from git import Repo - -from dbt_osmosis.core.log_controller import logger -from dbt_osmosis.vendored.dbt_core_interface.project import DbtProject - - -def build_diff_queries(model: str, runner: DbtProject) -> Tuple[str, str]: - """Leverage git to build two temporary tables for diffing the results of a query - throughout a change - """ - # Resolve git node - node = runner.get_ref_node(model) - dbt_path = Path(node.root_path) - repo = Repo(dbt_path, search_parent_directories=True) - t = next(Path(repo.working_dir).rglob(node.original_file_path)).relative_to(repo.working_dir) - sha = repo.head.object.hexsha - target = repo.head.object.tree[str(t)] - - # Create original node - git_node_name = "z_" + sha[-7:] - original_node = runner.get_server_node(target.data_stream.read().decode("utf-8"), git_node_name) - - # Alias changed node - changed_node = node - - # Compile models - original_node = runner.compile_node(original_node) - changed_node = runner.compile_node(changed_node) - - return original_node.compiled_sql, changed_node.compiled_sql - - -def build_diff_tables(model: str, runner: DbtProject) -> Tuple[BaseRelation, BaseRelation]: - """Leverage git to build two temporary tables for diffing the results of a query throughout a change""" - # Resolve git node - node = runner.get_ref_node(model) - dbt_path = Path(node.root_path) - repo = Repo(dbt_path, search_parent_directories=True) - t = next(Path(repo.working_dir).rglob(node.original_file_path)).relative_to(repo.working_dir) - sha = repo.head.object.hexsha - target = repo.head.object.tree[str(t)] - - # Create original node - git_node_name = "z_" + sha[-7:] - original_node = runner.get_server_node(target.data_stream.read().decode("utf-8"), git_node_name) - - # Alias changed node - changed_node = node - - # Compile models - original_node = runner.compile_node(original_node).node - changed_node = runner.compile_node(changed_node).node - - # Lookup and resolve original ref based on git sha - git_node_parts = original_node.database, "dbt_diff", git_node_name - ref_A, did_exist = runner.get_or_create_relation(*git_node_parts) - if not did_exist: - logger().info("Creating new relation for %s", ref_A) - with runner.adapter.connection_named("dbt-osmosis"): - runner.execute_macro( - "create_schema", - kwargs={"relation": ref_A}, - ) - runner.execute_macro( - "create_table_as", - kwargs={ - "sql": original_node.compiled_sql, - "relation": ref_A, - "temporary": True, - }, - run_compiled_sql=True, - ) - - # Resolve modified fake ref based on hash of it compiled SQL - temp_node_name = "z_" + hashlib.md5(changed_node.compiled_sql.encode("utf-8")).hexdigest()[-7:] - git_node_parts = original_node.database, "dbt_diff", temp_node_name - ref_B, did_exist = runner.get_or_create_relation(*git_node_parts) - if not did_exist: - ref_B = runner.adapter.Relation.create(*git_node_parts) - logger().info("Creating new relation for %s", ref_B) - with runner.adapter.connection_named("dbt-osmosis"): - runner.execute_macro( - "create_schema", - kwargs={"relation": ref_B}, - ) - runner.execute_macro( - "create_table_as", - kwargs={ - "sql": original_node.compiled_sql, - "relation": ref_B, - "temporary": True, - }, - run_compiled_sql=True, - ) - - return ref_A, ref_B - - -def diff_tables( - ref_A: BaseRelation, - ref_B: BaseRelation, - pk: str, - runner: DbtProject, - aggregate: bool = True, -) -> agate.Table: - logger().info("Running diff") - _, table = runner.adapter_execute( - runner.execute_macro( - "_dbt_osmosis_compare_relations_agg" if aggregate else "_dbt_osmosis_compare_relations", - kwargs={ - "a_relation": ref_A, - "b_relation": ref_B, - "primary_key": pk, - }, - ), - auto_begin=True, - fetch=True, - ) - return table - - -def diff_queries( - sql_A: str, sql_B: str, pk: str, runner: DbtProject, aggregate: bool = True -) -> agate.Table: - logger().info("Running diff") - _, table = runner.adapter_execute( - runner.execute_macro( - "_dbt_osmosis_compare_queries_agg" if aggregate else "_dbt_osmosis_compare_queries", - kwargs={ - "a_query": sql_A, - "b_query": sql_B, - "primary_key": pk, - }, - ), - auto_begin=True, - fetch=True, - ) - return table - - -def diff_and_print_to_console( - model: str, - pk: str, - runner: DbtProject, - make_temp_tables: bool = False, - agg: bool = True, - output: str = "table", -) -> None: - """ - Compare two tables and print the results to the console - """ - if make_temp_tables: - table = diff_tables(*build_diff_tables(model, runner), pk, runner, agg) - else: - table = diff_queries(*build_diff_queries(model, runner), pk, runner, agg) - print("") - output = output.lower() - if output == "table": - table.print_table() - elif output in ("chart", "bar"): - if not agg: - logger().warn( - "Cannot render output format chart with --no-agg option, defaulting to table" - ) - table.print_table() - else: - _table = table.compute( - [ - ( - "in_original, in_changed", - agate.Formula(agate.Text(), lambda r: "%(in_a)s, %(in_b)s" % r), - ) - ] - ) - _table.print_bars( - label_column_name="in_original, in_changed", value_column_name="count" - ) - elif output == "csv": - table.to_csv("dbt-osmosis-diff.csv") - else: - logger().warn("No such output format %s, defaulting to table", output) - table.print_table() diff --git a/src/dbt_osmosis/core/macros.py b/src/dbt_osmosis/core/macros.py deleted file mode 100644 index ba788b4b..00000000 --- a/src/dbt_osmosis/core/macros.py +++ /dev/null @@ -1,14 +0,0 @@ -from pathlib import Path - -from dbt_osmosis.core.log_controller import logger -from dbt_osmosis.core.osmosis import DbtProject - - -def inject_macros(dbt: DbtProject) -> None: - logger().info("Injecting macros, please wait...") - macro_overrides = {} - for node in dbt.macro_parser.parse_remote( - (Path(__file__).parent / "audit_macros.jinja2").read_text() - ): - macro_overrides[node.unique_id] = node - dbt.dbt.macros.update(macro_overrides) diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index b5fa875b..a4d5c05b 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -1,25 +1,16 @@ import json import os import re +import sys +import typing as t from collections import OrderedDict +from collections.abc import Iterable, Iterator, MutableMapping, Sequence from concurrent.futures import ThreadPoolExecutor, wait from dataclasses import dataclass, field from functools import lru_cache from itertools import chain from pathlib import Path from threading import Lock -from typing import ( - Any, - Dict, - Iterable, - Iterator, - List, - MutableMapping, - Optional, - Sequence, - Set, - Tuple, -) import ruamel.yaml from dbt.adapters.base.column import Column @@ -53,7 +44,7 @@ def __init__(self, **kwargs) -> None: @dataclass class SchemaFileLocation: target: Path - current: Optional[Path] = None + current: Path | None = None node_type: NodeType = NodeType.Model @property @@ -63,15 +54,16 @@ def is_valid(self) -> bool: @dataclass class SchemaFileMigration: - output: Dict[str, Any] = field(default_factory=dict) - supersede: Dict[Path, List[str]] = field(default_factory=dict) + output: dict[str, t.Any] = field(default_factory=dict) + supersede: dict[Path, list[str]] = field(default_factory=dict) +@(t.final if sys.version_info >= (3, 8) else lambda f: f) class DbtYamlManager(DbtProject): """The DbtYamlManager class handles developer automation tasks surrounding schema yaml files organziation, documentation, and coverage.""" - audit_report = """ + audit_report: t.ClassVar[str] = """ :white_check_mark: [bold]Audit Report[/bold] ------------------------------- @@ -91,7 +83,7 @@ class DbtYamlManager(DbtProject): # TODO: Let user supply a custom arg / config file / csv of strings which we # consider placeholders which are not valid documentation, these are just my own # We may well drop the placeholder concept too. It is just a convenience for refactors - placeholders = [ + placeholders: t.ClassVar[list[str]] = [ "Pending further documentation", "Pending further documentation.", "No description for this column", @@ -105,18 +97,18 @@ class DbtYamlManager(DbtProject): # NOTE: we use an arbitrarily large TTL since the YAML manager is not # a long-running service which needs to periodically invalidate and refresh - ADAPTER_TTL = 1e9 + ADAPTER_TTL: t.ClassVar[float] = 1e9 def __init__( self, - target: Optional[str] = None, - profiles_dir: Optional[str] = None, - project_dir: Optional[str] = None, - catalog_file: Optional[str] = None, - threads: Optional[int] = 1, - fqn: Optional[str] = None, + target: str | None = None, + profiles_dir: str | None = None, + project_dir: str | None = None, + catalog_file: str | None = None, + threads: int | None = 1, + fqn: str | None = None, dry_run: bool = False, - models: Optional[List[str]] = None, + models: list[str] | None = None, skip_add_columns: bool = False, skip_add_tags: bool = False, skip_add_data_types: bool = False, @@ -124,19 +116,19 @@ def __init__( char_length: bool = False, skip_merge_meta: bool = False, add_progenitor_to_meta: bool = False, - vars: Optional[str] = None, + vars: str | None = None, use_unrendered_descriptions: bool = False, - profile: Optional[str] = None, - add_inheritance_for_specified_keys: Optional[List[str]] = None, + profile: str | None = None, + add_inheritance_for_specified_keys: list[str] | None = None, output_to_lower: bool = False, ): """Initializes the DbtYamlManager class.""" - super().__init__(target, profiles_dir, project_dir, threads, vars=vars, profile=profile) + super().__init__(target, profiles_dir, project_dir, threads, vars=vars, profile=profile) # pyright: ignore[reportArgumentType] self.fqn = fqn self.models = models or [] self.dry_run = dry_run self.catalog_file = catalog_file - self._catalog: Optional[CatalogArtifact] = None + self._catalog: CatalogArtifact | None = None self.skip_add_columns = skip_add_columns self.skip_add_tags = skip_add_tags self.skip_add_data_types = skip_add_data_types @@ -156,7 +148,7 @@ def __init__( ) logger().info( "Please supply a valid fqn segment if using --fqn or a valid model name, path, or" - " subpath if using positional arguments" + + " subpath if using positional arguments" ) exit(0) @@ -240,14 +232,14 @@ def _filter_model(self, node: ManifestNode) -> bool: ) @staticmethod - def get_patch_path(node: ManifestNode) -> Optional[Path]: + def get_patch_path(node: ManifestNode) -> Path | None: """Returns the patch path for a node if it exists""" if node is not None and node.patch_path: return as_path(node.patch_path.split("://")[-1]) def filtered_models( - self, subset: Optional[MutableMapping[str, ManifestNode]] = None - ) -> Iterator[Tuple[str, ManifestNode]]: + self, subset: MutableMapping[str, ManifestNode] | None = None + ) -> Iterator[tuple[str, ManifestNode]]: """Generates an iterator of valid models""" for unique_id, dbt_node in ( subset.items() @@ -257,7 +249,7 @@ def filtered_models( if self._filter_model(dbt_node): yield unique_id, dbt_node - def get_osmosis_path_spec(self, node: ManifestNode) -> Optional[str]: + def get_osmosis_path_spec(self, node: ManifestNode) -> str | None: """Validates a config string. If input is a source, we return the resource type str instead @@ -286,7 +278,7 @@ def get_node_path(self, node: ManifestNode): """Resolve absolute file path for a manifest node""" return as_path(self.config.project_root, node.original_file_path).resolve() - def get_schema_path(self, node: ManifestNode) -> Optional[Path]: + def get_schema_path(self, node: ManifestNode) -> Path | None: """Resolve absolute schema file path for a manifest node""" schema_path = None if node.resource_type == NodeType.Model and node.patch_path: @@ -327,7 +319,7 @@ def get_catalog_key(node: ManifestNode) -> CatalogKey: return CatalogKey(node.database, node.schema, getattr(node, "identifier", node.name)) return CatalogKey(node.database, node.schema, getattr(node, "alias", node.name)) - def get_base_model(self, node: ManifestNode, output_to_lower: bool) -> Dict[str, Any]: + def get_base_model(self, node: ManifestNode, output_to_lower: bool) -> dict[str, t.Any]: """Construct a base model object with model name, column names populated from database""" columns = self.get_columns(self.get_catalog_key(node), output_to_lower) return { @@ -336,10 +328,10 @@ def get_base_model(self, node: ManifestNode, output_to_lower: bool) -> Dict[str, } def augment_existing_model( - self, documentation: Dict[str, Any], node: ManifestNode, output_to_lower: bool - ) -> Dict[str, Any]: + self, documentation: dict[str, t.Any], node: ManifestNode, output_to_lower: bool + ) -> dict[str, t.Any]: """Injects columns from database into existing model if not found""" - model_columns: List[str] = [c["name"] for c in documentation.get("columns", [])] + model_columns: list[str] = [c["name"] for c in documentation.get("columns", [])] database_columns = self.get_columns(self.get_catalog_key(node), output_to_lower) for column in ( c for c in database_columns if not any(c.lower() == m.lower() for m in model_columns) @@ -357,13 +349,13 @@ def augment_existing_model( ) return documentation - def get_columns(self, catalog_key: CatalogKey, output_to_lower: bool) -> List[str]: + def get_columns(self, catalog_key: CatalogKey, output_to_lower: bool) -> list[str]: """Get all columns in a list for a model""" return list(self.get_columns_meta(catalog_key, output_to_lower).keys()) @property - def catalog(self) -> Optional[CatalogArtifact]: + def catalog(self) -> CatalogArtifact | None: """Get the catalog data from the catalog file Catalog data is cached in memory to avoid reading and parsing the file multiple times @@ -391,13 +383,13 @@ def _get_column_type(self, column: Column) -> str: @lru_cache(maxsize=5000) def get_columns_meta( self, catalog_key: CatalogKey, output_to_lower: bool = False - ) -> Dict[str, ColumnMetadata]: + ) -> dict[str, ColumnMetadata]: """Get all columns in a list for a model""" columns = OrderedDict() blacklist = self.config.vars.vars.get("dbt-osmosis", {}).get("_blacklist", []) # If we provide a catalog, we read from it if self.catalog: - matching_models_or_sources: List[CatalogTable] = [ + matching_models_or_sources: list[CatalogTable] = [ model_or_source_values for model_or_source, model_or_source_values in dict( **self.catalog.nodes, **self.catalog.sources @@ -548,7 +540,7 @@ def bootstrap_sources(self, output_to_lower: bool = False) -> None: logger().info("...reloading project to pick up new sources") self.safe_parse_project(reinit=True) - def build_schema_folder_mapping(self, output_to_lower: bool) -> Dict[str, SchemaFileLocation]: + def build_schema_folder_mapping(self, output_to_lower: bool) -> dict[str, SchemaFileLocation]: """Builds a mapping of models or sources to their existing and target schema file paths""" # Resolve target nodes @@ -605,8 +597,8 @@ def _draft( # Model/Source Is Documented but Must be Migrated with self.mutex: schema = self.yaml_handler.load(schema_file.current) - models_in_file: Sequence[Dict[str, Any]] = schema.get("models", []) - sources_in_file: Sequence[Dict[str, Any]] = schema.get("sources", []) + models_in_file: Sequence[dict[str, t.Any]] = schema.get("models", []) + sources_in_file: Sequence[dict[str, t.Any]] = schema.get("sources", []) for documented_model in ( model for model in models_in_file if model["name"] == node.name ): @@ -669,7 +661,7 @@ def _draft( def draft_project_structure_update_plan( self, output_to_lower: bool = False - ) -> Dict[Path, SchemaFileMigration]: + ) -> dict[Path, SchemaFileMigration]: """Build project structure update plan based on `dbt-osmosis:` configs set across dbt_project.yml and model files. The update plan includes injection of undocumented models. Unless this plan is constructed and executed by the `commit_project_restructure` function, @@ -682,7 +674,7 @@ def draft_project_structure_update_plan( """ # Container for output - blueprint: Dict[Path, SchemaFileMigration] = {} + blueprint: dict[Path, SchemaFileMigration] = {} logger().info( ":chart_increasing: Searching project stucture for required updates and building action" " plan" @@ -711,7 +703,7 @@ def cleanup_blueprint(self, blueprint: dict) -> None: def commit_project_restructure_to_disk( self, - blueprint: Optional[Dict[Path, SchemaFileMigration]] = None, + blueprint: dict[Path, SchemaFileMigration] | None = None, output_to_lower: bool = False, ) -> bool: """Given a project restrucure plan of pathlib Paths to a mapping of output and supersedes @@ -720,9 +712,9 @@ def commit_project_restructure_to_disk( as needed. Args: - blueprint (Dict[Path, SchemaFileMigration]): Project restructure plan as typically + blueprint (dict[Path, SchemaFileMigration]): Project restructure plan as typically created by `build_project_structure_update_plan` - output_to_lower (bool): Set column casing to lowercase. + output_to_lower (bool): set column casing to lowercase. Returns: bool: True if the project was restructured, False if no action was required @@ -759,7 +751,7 @@ def commit_project_restructure_to_disk( else: # Update File logger().info(":toolbox: Updating schema file %s", target.name) - target_schema: Optional[Dict[str, Any]] = self.yaml_handler.load(target) + target_schema: dict[str, t.Any] | None = self.yaml_handler.load(target) # Add version if not present if not target_schema: target_schema = {"version": 2} @@ -776,7 +768,7 @@ def commit_project_restructure_to_disk( # Clean superseded schema files for dir, nodes in structure.supersede.items(): - raw_schema: Dict[str, Any] = self.yaml_handler.load(dir) + raw_schema: dict[str, t.Any] = self.yaml_handler.load(dir) # Gather models and sources marked for superseding models_marked_for_superseding = set( node.name for node in nodes if node.resource_type == NodeType.Model @@ -793,7 +785,7 @@ def commit_project_restructure_to_disk( for s in raw_schema.get("sources", []) for t in s.get("tables", []) ) - # Set difference to determine non-superseded models and sources + # set difference to determine non-superseded models and sources non_superseded_models = models_in_schema - models_marked_for_superseding non_superseded_sources = sources_in_schema - sources_marked_for_superseding if len(non_superseded_models) + len(non_superseded_sources) == 0: @@ -835,7 +827,7 @@ def commit_project_restructure_to_disk( return True @staticmethod - def pretty_print_restructure_plan(blueprint: Dict[Path, SchemaFileMigration]) -> None: + def pretty_print_restructure_plan(blueprint: dict[Path, SchemaFileMigration]) -> None: logger().info( list( map( @@ -854,7 +846,7 @@ def get_column_sets( database_columns: Iterable[str], yaml_columns: Iterable[str], documented_columns: Iterable[str], - ) -> Tuple[List[str], List[str], List[str]]: + ) -> tuple[list[str], list[str], list[str]]: """Returns: missing_columns: Columns in database not in dbt -- will be injected into schema file undocumented_columns: Columns missing documentation -- descriptions will be inherited and @@ -876,7 +868,7 @@ def _run( self, unique_id: str, node: ManifestNode, - schema_map: Dict[str, SchemaFileLocation], + schema_map: dict[str, SchemaFileLocation], force_inheritance: bool = False, output_to_lower: bool = False, ): @@ -884,7 +876,7 @@ def _run( with self.mutex: logger().info(":point_right: Processing model: [bold]%s[/bold]", unique_id) # Get schema file path, must exist to propagate documentation - schema_path: Optional[SchemaFileLocation] = schema_map.get(unique_id) + schema_path: SchemaFileLocation | None = schema_map.get(unique_id) if schema_path is None or schema_path.current is None: with self.mutex: logger().info( @@ -892,13 +884,13 @@ def _run( ) # We can't take action return - # Build Sets + # Build sets logger().info(":mag: Resolving columns in database") database_columns_ordered = self.get_columns(self.get_catalog_key(node), output_to_lower) columns_db_meta = self.get_columns_meta(self.get_catalog_key(node), output_to_lower) - database_columns: Set[str] = set(database_columns_ordered) + database_columns: set[str] = set(database_columns_ordered) yaml_columns_ordered = [column for column in node.columns] - yaml_columns: Set[str] = set(yaml_columns_ordered) + yaml_columns: set[str] = set(yaml_columns_ordered) if not database_columns: with self.mutex: @@ -911,7 +903,7 @@ def _run( database_columns = yaml_columns # Get documentated columns - documented_columns: Set[str] = set( + documented_columns: set[str] = set( column for column, info in node.columns.items() if info.description and info.description not in self.placeholders @@ -1056,7 +1048,7 @@ def propagate_documentation_downstream( def remove_columns_not_in_database( extra_columns: Iterable[str], node: ManifestNode, - yaml_file_model_section: Dict[str, Any], + yaml_file_model_section: dict[str, t.Any], ) -> int: """Removes columns found in dbt model that do not exist in database from both node and model simultaneously @@ -1076,11 +1068,11 @@ def remove_columns_not_in_database( def update_columns_attribute( self, node: ManifestNode, - yaml_file_model_section: Dict[str, Any], - columns_db_meta: Dict[str, ColumnMetadata], + yaml_file_model_section: dict[str, t.Any], + columns_db_meta: dict[str, ColumnMetadata], attribute_name: str, meta_key: str, - skip_attribute_update: Any, + skip_attribute_update: t.Any, output_to_lower: bool = False, ) -> int: changes_committed = 0 @@ -1114,8 +1106,8 @@ def update_columns_attribute( def add_missing_cols_to_node_and_model( missing_columns: Iterable, node: ManifestNode, - yaml_file_model_section: Dict[str, Any], - columns_db_meta: Dict[str, ColumnMetadata], + yaml_file_model_section: dict[str, t.Any], + columns_db_meta: dict[str, ColumnMetadata], output_to_lower: bool, ) -> int: """Add missing columns to node and model simultaneously @@ -1164,10 +1156,10 @@ def update_schema_file_and_node( undocumented_columns: Iterable[str], extra_columns: Iterable[str], node: ManifestNode, - section: Dict[str, Any], - columns_db_meta: Dict[str, ColumnMetadata], + section: dict[str, t.Any], + columns_db_meta: dict[str, ColumnMetadata], output_to_lower: bool, - ) -> Tuple[int, int, int, int, int]: + ) -> tuple[int, int, int, int, int]: """Take action on a schema file mirroring changes in the node.""" logger().info(":microscope: Looking for actions for %s", node.unique_id) n_cols_added = 0 @@ -1224,8 +1216,8 @@ def update_schema_file_and_node( @staticmethod def maybe_get_section_from_schema_file( - yaml_file: Dict[str, Any], node: ManifestNode - ) -> Optional[Dict[str, Any]]: + yaml_file: dict[str, t.Any], node: ManifestNode + ) -> dict[str, t.Any] | None: """Get the section of a schema file that corresponds to a node.""" if node.resource_type == NodeType.Source: section = next( From d0c69655a75e87841df742cea62ddc69c40a5f46 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Sat, 28 Dec 2024 21:08:10 -0700 Subject: [PATCH 02/46] chore: small update --- src/dbt_osmosis/core/osmosis.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index a4d5c05b..1624a1dc 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -1,7 +1,6 @@ import json import os import re -import sys import typing as t from collections import OrderedDict from collections.abc import Iterable, Iterator, MutableMapping, Sequence @@ -58,7 +57,7 @@ class SchemaFileMigration: supersede: dict[Path, list[str]] = field(default_factory=dict) -@(t.final if sys.version_info >= (3, 8) else lambda f: f) +@t.final class DbtYamlManager(DbtProject): """The DbtYamlManager class handles developer automation tasks surrounding schema yaml files organziation, documentation, and coverage.""" @@ -153,9 +152,12 @@ def __init__( exit(0) self.mutex = Lock() - self.tpe = ThreadPoolExecutor(max_workers=os.cpu_count() * 2) + self.tpe = ThreadPoolExecutor( + max_workers=os.cpu_count() * 2 + ) # TODO: configurable via env var, dont use cpu count x 2 also... use default in stdlib self.mutations = 0 + # TODO: use cachedproperty @property def yaml_handler(self): """Returns a cached instance of the YAML handler.""" From 0e76577a73c8d4f54805d35e7435aac46c5f7376 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Sat, 28 Dec 2024 22:00:32 -0700 Subject: [PATCH 03/46] wip: commit updates so far --- src/dbt_osmosis/core/osmosis.py | 1985 ++++++++++++++++--------------- 1 file changed, 1025 insertions(+), 960 deletions(-) diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index 1624a1dc..89f00df8 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -1,47 +1,80 @@ +# pyright: reportUnknownVariableType=false, reportPrivateImportUsage=false, reportAny=false, reportUnknownMemberType=false import json +import logging import os import re +import sys +import threading +import time import typing as t -from collections import OrderedDict -from collections.abc import Iterable, Iterator, MutableMapping, Sequence +import uuid +from argparse import Namespace +from collections import OrderedDict, UserDict +from collections.abc import Iterable, Iterator, MutableMapping from concurrent.futures import ThreadPoolExecutor, wait +from contextlib import contextmanager +from copy import copy from dataclasses import dataclass, field from functools import lru_cache from itertools import chain from pathlib import Path -from threading import Lock import ruamel.yaml -from dbt.adapters.base.column import Column +from dbt.adapters.factory import get_adapter_class_by_name +from dbt.config.runtime import RuntimeConfig +from dbt.contracts.graph.manifest import Manifest +from dbt.contracts.graph.nodes import ColumnInfo, ManifestNode from dbt.contracts.results import CatalogArtifact, CatalogKey, CatalogTable, ColumnMetadata +from dbt.flags import set_from_args +from dbt.node_types import NodeType +from dbt.parser.manifest import ManifestLoader, process_node +from dbt.parser.sql import SqlBlockParser, SqlMacroParser +from dbt.task.sql import SqlCompileRunner +from dbt.tracking import disable_tracking -from dbt_osmosis.core.column_level_knowledge_propagator import ColumnLevelKnowledgePropagator -from dbt_osmosis.core.exceptions import InvalidOsmosisConfig, MissingOsmosisConfig -from dbt_osmosis.core.log_controller import logger -from dbt_osmosis.vendored.dbt_core_interface.project import ( - ColumnInfo, - DbtProject, - ManifestNode, - NodeType, -) +# Disabling dbt tracking for non-standard usage +disable_tracking() -as_path = Path + +def logger() -> logging.Logger: + """Get the log handle for dbt-osmosis""" + return logging.getLogger("dbt-osmosis") + + +def has_jinja(code: str) -> bool: + """Check if code contains Jinja tokens""" + return any(token in code for token in ("{{", "}}", "{%", "%}", "{#", "#}")) + + +def column_casing(column: str, credentials_type: str, to_lower: bool) -> str: + """Utility to handle column name casing based on dbt adapter & user flag.""" + # If quoted in snowflake, pass verbatim + if credentials_type == "snowflake" and column.startswith('"') and column.endswith('"'): + return column + # Otherwise apply user-specified transformations + if to_lower: + return column.lower() + if credentials_type == "snowflake": + return column.upper() + return column class YamlHandler(ruamel.yaml.YAML): - """A `ruamel.yaml` wrapper to handle dbt YAML files with sane defaults""" + """A ruamel.yaml wrapper to handle dbt YAML files with sane defaults.""" - def __init__(self, **kwargs) -> None: + def __init__(self, **kwargs: t.Any) -> None: super().__init__(**kwargs) self.indent(mapping=2, sequence=4, offset=2) - self.width = 800 - self.preserve_quotes = True - self.default_flow_style = False - self.encoding = os.getenv("DBT_OSMOSIS_ENCODING", "utf-8") + self.width: int = 800 + self.preserve_quotes: bool = True + self.default_flow_style: bool = False + self.encoding: str = os.getenv("DBT_OSMOSIS_ENCODING", "utf-8") @dataclass class SchemaFileLocation: + """Dataclass to store schema file location details.""" + target: Path current: Path | None = None node_type: NodeType = NodeType.Model @@ -53,155 +86,486 @@ def is_valid(self) -> bool: @dataclass class SchemaFileMigration: - output: dict[str, t.Any] = field(default_factory=dict) + """Dataclass to store schema file migration details.""" + + output: dict[str, t.Any] = field( + default_factory=lambda: {"version": 2, "models": [], "sources": []} + ) supersede: dict[Path, list[str]] = field(default_factory=dict) -@t.final -class DbtYamlManager(DbtProject): - """The DbtYamlManager class handles developer automation tasks surrounding - schema yaml files organziation, documentation, and coverage.""" +@dataclass +class DbtConfiguration: + """Stores dbt project configuration in a namespace""" - audit_report: t.ClassVar[str] = """ - :white_check_mark: [bold]Audit Report[/bold] - ------------------------------- + project_dir: str + profiles_dir: str + threads: int = 1 + single_threaded: bool = True + which: str = "" + target: str | None = None + profile: str | None = None - Database: [bold green]{database}[/bold green] - Schema: [bold green]{schema}[/bold green] - Table: [bold green]{table}[/bold green] + DEBUG: bool = False - Total Columns in Database: {total_columns} - Total Documentation Coverage: {coverage}% + _vars: str | dict[str, t.Any] = field(default_factory=dict) - Action Log: - Columns Added to dbt: {n_cols_added} - Column Knowledge Inherited: {n_cols_doc_inherited} - Extra Columns Removed: {n_cols_removed} - """ + def __post_init__(self) -> None: + if self.threads != 1: + self.single_threaded = False - # TODO: Let user supply a custom arg / config file / csv of strings which we - # consider placeholders which are not valid documentation, these are just my own - # We may well drop the placeholder concept too. It is just a convenience for refactors - placeholders: t.ClassVar[list[str]] = [ - "Pending further documentation", - "Pending further documentation.", - "No description for this column", - "No description for this column.", - "Not documented", - "Not documented.", - "Undefined", - "Undefined.", - "", # This is the important one - ] + @property + def vars(self) -> str: + if isinstance(self._vars, dict): + return json.dumps(self._vars) + return self._vars + + @vars.setter + def vars(self, v: t.Any) -> None: + if not isinstance(v, (str, dict)): + raise ValueError("vars must be a string or dict") + self._vars = v + + +class DbtManifestProxy(UserDict[str, t.Any]): + """Proxy for the manifest's flat_graph, read-only by design.""" - # NOTE: we use an arbitrarily large TTL since the YAML manager is not - # a long-running service which needs to periodically invalidate and refresh - ADAPTER_TTL: t.ClassVar[float] = 1e9 + def _readonly(self, *args: t.Any, **kwargs: t.Any) -> t.Never: + _ = args, kwargs + raise RuntimeError("Cannot modify DbtManifestProxy") + + __setitem__: t.Callable[..., None] = _readonly + __delitem__: t.Callable[..., None] = _readonly + pop: t.Callable[..., None] = _readonly + popitem: t.Callable[..., t.Any] = _readonly + clear: t.Callable[..., None] = _readonly + update: t.Callable[..., None] = _readonly + setdefault: t.Callable[..., None] = _readonly + + +@dataclass +class DbtAdapterExecutionResult: + adapter_response: t.Any + table: t.Any + raw_code: str + compiled_code: str + + +@dataclass +class DbtAdapterCompilationResult: + raw_code: str + compiled_code: str + node: ManifestNode + injected_code: str | None = None + + +def find_default_project_dir() -> str: + cwd = Path.cwd() + # Walk up if needed + for p in [cwd] + list(cwd.parents): + if (p / "dbt_project.yml").exists(): + return str(p.resolve()) + return str(cwd.resolve()) + + +def find_default_profiles_dir() -> str: + # Common fallback for DBT_PROFILES_DIR + if (Path.cwd() / "profiles.yml").exists(): + return str(Path.cwd().resolve()) + return str(Path.home() / ".dbt") + + +class DbtProject: + """Wraps dbt's in-memory project & adapter, enabling queries, compilation, etc.""" + + ADAPTER_TTL: float = 3600.0 def __init__( self, target: str | None = None, profiles_dir: str | None = None, project_dir: str | None = None, - catalog_file: str | None = None, - threads: int | None = 1, - fqn: str | None = None, - dry_run: bool = False, - models: list[str] | None = None, - skip_add_columns: bool = False, - skip_add_tags: bool = False, - skip_add_data_types: bool = False, - numeric_precision: bool = False, - char_length: bool = False, - skip_merge_meta: bool = False, - add_progenitor_to_meta: bool = False, - vars: str | None = None, - use_unrendered_descriptions: bool = False, + threads: int = 1, + vars: str | dict[str, t.Any] | None = None, profile: str | None = None, - add_inheritance_for_specified_keys: list[str] | None = None, - output_to_lower: bool = False, ): - """Initializes the DbtYamlManager class.""" - super().__init__(target, profiles_dir, project_dir, threads, vars=vars, profile=profile) # pyright: ignore[reportArgumentType] - self.fqn = fqn - self.models = models or [] - self.dry_run = dry_run - self.catalog_file = catalog_file - self._catalog: CatalogArtifact | None = None - self.skip_add_columns = skip_add_columns - self.skip_add_tags = skip_add_tags - self.skip_add_data_types = skip_add_data_types - self.numeric_precision = numeric_precision - self.char_length = char_length - self.skip_merge_meta = skip_merge_meta - self.add_progenitor_to_meta = add_progenitor_to_meta - self.use_unrendered_descriptions = use_unrendered_descriptions - self.add_inheritance_for_specified_keys = add_inheritance_for_specified_keys or [] - self.output_to_lower = output_to_lower - - if len(list(self.filtered_models())) == 0: - logger().warning( - "No models found to process. Check your filters: --fqn='%s', pos args %s", - fqn, - models, + if not profiles_dir: + profiles_dir = find_default_profiles_dir() + if not project_dir: + project_dir = find_default_project_dir() + + self.base_config: DbtConfiguration = DbtConfiguration( + project_dir=project_dir, + profiles_dir=profiles_dir, + target=target, + threads=threads, + profile=profile, + ) + if vars: + self.base_config.vars = vars + + self.adapter_mutex: threading.Lock = threading.Lock() + self.parsing_mutex: threading.Lock = threading.Lock() + self.manifest_mutation_mutex: threading.Lock = threading.Lock() + + self._config: RuntimeConfig | None = None + self._manifest: Manifest | None = None + self.parse_project(init=True) + + self._sql_parser: SqlBlockParser | None = None + self._macro_parser: SqlMacroParser | None = None + self._adapter_created_at: float = 0.0 + + @property + def config(self) -> RuntimeConfig: + """Get the dbt project configuration.""" + if self._config is None: + raise RuntimeError("DbtProject not initialized. parse_project() must be called first.") + return self._config + + @property + def manifest(self) -> Manifest: + """Get the dbt project manifest.""" + if self._manifest is None: + raise RuntimeError("DbtProject not initialized. parse_project() must be called first.") + return self._manifest + + def parse_project(self, init: bool = False) -> None: + """Parse the dbt project configuration and manifest.""" + with self.parsing_mutex: + if init: + ns = Namespace( + **self.base_config.__dict__ + ) # TODO: replace with method call to handle _vars prop + set_from_args(ns, ns) + self._config = RuntimeConfig.from_args(ns) + self.initialize_adapter() + loader = ManifestLoader( + self.config, + self.config.load_dependencies(), + self.adapter.connections.set_query_header, ) - logger().info( - "Please supply a valid fqn segment if using --fqn or a valid model name, path, or" - + " subpath if using positional arguments" + self._manifest = loader.load() + self._manifest.build_flat_graph() + loader.save_macros_to_adapter(self.adapter) + self._sql_parser = None + self._macro_parser = None + + def safe_parse_project(self, init: bool = False) -> None: + """Safely re-parse the dbt project configuration and manifest preserving internal state on error.""" + old_config = copy(getattr(self, "config", None)) + try: + self.parse_project(init=init) + except Exception as exc: + if old_config: + self._config = old_config + raise exc + # Write manifest to disk here + self.write_manifest_artifact() + + def initialize_adapter(self) -> None: + """Initialize the dbt adapter.""" + if hasattr(self, "_adapter"): + try: + self.adapter.connections.cleanup_all() + except Exception: + pass + try: + adapter_cls = get_adapter_class_by_name( + self.base_config.target or self.base_config.profile or "" ) - exit(0) + except Exception: + # fallback if none found (dbt should raise if invalid type) + raise RuntimeError("Could not find an adapter class by name.") + if not adapter_cls: + raise RuntimeError("No valid adapter class found.") + + # NOTE: this smooths over an API change upstream + try: + self.adapter = adapter_cls(self.config) + except TypeError: + from dbt.mp_context import get_mp_context + + self.adapter = adapter_cls(self.config, get_mp_context()) # pyright: ignore[reportCallIssue] - self.mutex = Lock() - self.tpe = ThreadPoolExecutor( - max_workers=os.cpu_count() * 2 - ) # TODO: configurable via env var, dont use cpu count x 2 also... use default in stdlib - self.mutations = 0 + self.adapter.connections.set_connection_name() + self._adapter_created_at = time.time() + setattr(self.config, "adapter", self.adapter) - # TODO: use cachedproperty @property - def yaml_handler(self): - """Returns a cached instance of the YAML handler.""" + def adapter(self) -> t.Any: + """Get the dbt adapter. Automatically refreshes if TTL exceeded.""" + if (time.time() - getattr(self, "_adapter_created_at", 0)) > self.ADAPTER_TTL: + self.initialize_adapter() + return self._adapter # FIXME: add to init + + @adapter.setter + def adapter(self, v: t.Any) -> None: + """Set the dbt adapter. Thread-safe.""" + if self.adapter_mutex.acquire(blocking=False): + try: + setattr(self, "_adapter", v) + v.debug_query() # Verify connection + self._adapter_created_at = time.time() + setattr(self.config, "adapter", v) + finally: + self.adapter_mutex.release() + + @property + def manifest_dict(self) -> DbtManifestProxy: + """Get a read-only proxy for the manifest's flat_graph.""" + return DbtManifestProxy(self.manifest.flat_graph) + + def write_manifest_artifact(self) -> None: + """Convenience method to write the manifest to disk.""" + artifact_path = Path(self.config.project_root) / self.config.target_path / "manifest.json" + self.manifest.write(str(artifact_path)) + + def clear_internal_caches(self) -> None: + """Clear internal lru caches for the project instance.""" + self.compile_code.cache_clear() + self.unsafe_compile_code.cache_clear() + + def get_relation(self, database: str, schema: str, name: str) -> t.Any: + """Get a relation from the adapter.""" + return self.adapter.get_relation(database, schema, name) + + def adapter_execute( + self, sql: str, auto_begin: bool = False, fetch: bool = False + ) -> tuple[t.Any, t.Any]: + """Convenience method to execute a query via the adapter.""" + return self.adapter.execute(sql, auto_begin, fetch) + + def execute_code(self, raw_code: str) -> DbtAdapterExecutionResult: + """Execute SQL, compiling jinja if necessary and wrapping result in a consistent interface.""" + compiled = raw_code + if has_jinja(raw_code): + compiled = self.compile_code(raw_code).compiled_code + resp, table = self.adapter_execute(compiled, fetch=True) + return DbtAdapterExecutionResult(resp, table, raw_code, compiled) + + @contextmanager + def generate_server_node(self, sql: str, node_name: str = "anonymous_node"): + """Generate a server node, process it, and clear it after use. Mutates manifest during context.""" + with self.manifest_mutation_mutex: + self._clear_node(node_name) + sql_node = self.sql_parser.parse_remote(sql, node_name) + process_node(self.config, self.manifest, sql_node) + yield sql_node + self._clear_node(node_name) + + def unsafe_generate_server_node( + self, sql: str, node_name: str = "anonymous_node" + ) -> ManifestNode: + """Generate a server node without context, mutating manifest.""" + self._clear_node(node_name) + sql_node = self.sql_parser.parse_remote(sql, node_name) + process_node(self.config, self.manifest, sql_node) + return sql_node + + def _clear_node(self, name: str) -> None: + """Clear a node from the manifest.""" + _ = self.manifest.nodes.pop( + f"{NodeType.SqlOperation}.{self.config.project_name}.{name}", None + ) + + @property + def sql_parser(self) -> SqlBlockParser: + """Lazy handle to the dbt SQL parser for the project.""" + if not self._sql_parser: + self._sql_parser = SqlBlockParser(self.config, self.manifest, self._config) + return self._sql_parser + + @property + def macro_parser(self) -> SqlMacroParser: + """Lazy handle to the dbt SQL macro parser for the project.""" + if not self._macro_parser: + self._macro_parser = SqlMacroParser(self.config, self.manifest) + return self._macro_parser + + def compile_from_node(self, node: ManifestNode) -> DbtAdapterCompilationResult: + """Compile a node and wrap the result in a consistent interface.""" + compiled_node = SqlCompileRunner( + self._config, self.adapter, node=node, node_index=1, num_nodes=1 + ).compile(self.manifest) + return DbtAdapterCompilationResult( + raw_code=getattr(compiled_node, "raw_code"), + compiled_code=getattr(compiled_node, "compiled_code"), + node=compiled_node, + ) + + @lru_cache(maxsize=100) + def compile_code(self, raw_code: str) -> DbtAdapterCompilationResult: + """Compile raw SQL and wrap the result in a consistent interface, leveraging lru cache.""" + tmp_id = str(uuid.uuid4()) + with self.generate_server_node(raw_code, tmp_id) as node: + return self.compile_from_node(node) + + @lru_cache(maxsize=100) + def unsafe_compile_code(self, raw_code: str, retry: int = 3) -> DbtAdapterCompilationResult: + """Compile raw SQL and wrap the result in a consistent interface, leveraging lru cache. Technically less thread-safe than compile_code but faster in a high throughput server scenario""" + tmp_id = str(uuid.uuid4()) + try: + node = self.unsafe_generate_server_node(raw_code, tmp_id) + return self.compile_from_node(node) + except Exception as e: + if retry > 0: + return self.compile_code(raw_code) + raise e + finally: + self._clear_node(tmp_id) + + +# TODO: we will collapse this from the file it is in currently +class ColumnLevelKnowledgePropagator: + """Stub for doc-propagation logic. For brevity, only the relevant part is included.""" + + @staticmethod + def get_node_columns_with_inherited_knowledge( + manifest: t.Any, + node: ManifestNode, + placeholders: list[str], + project_dir: str, + use_unrendered_descriptions: bool, + ) -> dict[str, dict[str, t.Any]]: + """ + Return known doc/metadata from related lineage. + In real usage, you would gather from multiple upstream nodes. + """ + # This is a stub. + # For now, returning an empty dict or minimal placeholders + _ = manifest, node, placeholders, project_dir, use_unrendered_descriptions + return {} + + @staticmethod + def update_undocumented_columns_with_prior_knowledge( + columns_to_update: Iterable[str], + node: ManifestNode, + yaml_section: dict[str, t.Any], + known_knowledge: dict[str, dict[str, t.Any]], + skip_add_tags: bool, + skip_merge_meta: bool, + add_progenitor_to_meta: bool, + add_inheritance_keys: list[str], + ) -> int: + """ + Propagate docs from known_knowledge onto columns in node + yaml_section. + Return count of columns that changed. + """ + _ = skip_add_tags, skip_merge_meta, add_progenitor_to_meta, add_inheritance_keys + n = 0 + for col in columns_to_update: + if col not in node.columns: + continue + cinfo = node.columns[col] + old_desc = getattr(cinfo, "description", "") + # If we have prior knowledge, do something + # (for example, update cinfo.description if old_desc is blank). + new_desc = old_desc + if col in known_knowledge and not old_desc: + new_desc = known_knowledge[col].get("description", "") + if new_desc and new_desc != old_desc: + setattr(cinfo, "description", new_desc) + # Mirror in yaml + for c in yaml_section.get("columns", []): + if c["name"].lower() == col.lower(): + c["description"] = new_desc + n += 1 + return n + + +class MissingOsmosisConfig(Exception): + pass + + +class InvalidOsmosisConfig(Exception): + pass + + +@dataclass +class DbtYamlManager(DbtProject): + """Automates tasks around schema yml files, organization, coverage, etc. + + Inherits from DbtProject to access manifest and adapter. + """ + + fqn: str | None = None + models: list[str] = field(default_factory=list) + dry_run: bool = False + catalog_file: str | None = None + skip_add_columns: bool = False + skip_add_tags: bool = False + skip_add_data_types: bool = False + numeric_precision: bool = False + char_length: bool = False + skip_merge_meta: bool = False + add_progenitor_to_meta: bool = False + use_unrendered_descriptions: bool = False + add_inheritance_for_specified_keys: list[str] = field(default_factory=list) + output_to_lower: bool = False + + _mutex: threading.Lock = threading.Lock() + _pool: ThreadPoolExecutor = ThreadPoolExecutor(max_workers=(os.cpu_count() or 1) * 2) + _catalog: CatalogArtifact | None = field(default=None, init=False, repr=False) + _mutations: int = 0 + + placeholders: tuple[str, ...] = ( + "Pending further documentation", + "Pending further documentation.", + "No description for this column", + "No description for this column.", + "Not documented", + "Not documented.", + "Undefined", + "Undefined.", + "", + ) + + def __post_init__(self) -> None: + super(DbtProject, self).__init__() # FIXME: this is not right + + # Re-parse to ensure our newly added attributes (like skip_add_columns) are recognized + if not list(self.filtered_models()): + logger().warning("No models found to process given fqn/models arguments") + logger().info("Check your filters or supply a valid model name/fqn.") + sys.exit(0) + + @property + def yaml_handler(self) -> YamlHandler: + """Get a canonical YAML handler for dbt project files""" if not hasattr(self, "_yaml_handler"): - self._yaml_handler = YamlHandler() + self._yaml_handler = YamlHandler() # FIXME: do like DbtProject return self._yaml_handler - def column_casing(self, column: str, output_to_lower: bool) -> str: - """Converts a column name to the correct casing for the target database.""" - # leave column name as is if encapsulated by quotes. - if self.config.credentials.type == "snowflake" and '"' == column[0] and '"' == column[-1]: - return column - elif output_to_lower: - return column.lower() - elif self.config.credentials.type == "snowflake": - return column.upper() - return column + @property + def catalog(self) -> CatalogArtifact | None: + """Get the catalog artifact, loading from disk if needed.""" + if self._catalog: + return self._catalog + if not self.catalog_file: + return None + fp = Path(self.catalog_file) + if not fp.exists(): + return None + self._catalog = CatalogArtifact.from_dict(json.loads(fp.read_text())) + return self._catalog def _filter_model_by_fqn(self, node: ManifestNode) -> bool: - """Validates a node as being selected. - - Check FQN length - Check FQN matches parts - """ + """Filter a model node by its fqn.""" if not self.fqn: return True - fqn = self.fqn or ".".join(node.fqn[1:]) - fqn_parts = fqn.split(".") + fqn_parts = self.fqn.split(".") return len(node.fqn[1:]) >= len(fqn_parts) and all( left == right for left, right in zip(fqn_parts, node.fqn[1:]) ) def _filter_model_by_models(self, node: ManifestNode) -> bool: - """Validates a node as being selected. - - Check if the node name matches a model name - Check if the node path matches a model path - Check if the node path is a child of a model path - """ - for model in self.models: - if node.name == model: + """Filter a model node by its name.""" + for m in self.models: + if node.name == m: return True node_path = self.get_node_path(node) - inp_path = as_path(model).resolve() + inp_path = Path(m).resolve() if inp_path.is_dir(): if node_path and inp_path in node_path.parents: return True @@ -211,19 +575,18 @@ def _filter_model_by_models(self, node: ManifestNode) -> bool: return False def _filter_model(self, node: ManifestNode) -> bool: - """Validates a node as being actionable. - - Check if the node is a model - Check if the node is a source - Check if the node is a model and not ephemeral - Check if the node is a model and matches the fqn or models filter if supplied - """ + """Filter a model node by fqn or models depending on input.""" if self.models: filter_method = self._filter_model_by_models elif self.fqn: filter_method = self._filter_model_by_fqn else: - filter_method = lambda _: True # noqa: E731 + # FIXME: make this more concise + def _filter_method(_): + return True + + filter_method = _filter_method + return ( node.resource_type in (NodeType.Model, NodeType.Source) and node.package_name == self.project_name @@ -233,293 +596,202 @@ def _filter_model(self, node: ManifestNode) -> bool: and filter_method(node) ) - @staticmethod - def get_patch_path(node: ManifestNode) -> Path | None: - """Returns the patch path for a node if it exists""" - if node is not None and node.patch_path: - return as_path(node.patch_path.split("://")[-1]) - def filtered_models( self, subset: MutableMapping[str, ManifestNode] | None = None ) -> Iterator[tuple[str, ManifestNode]]: - """Generates an iterator of valid models""" - for unique_id, dbt_node in ( + """Iterate over models in the manifest, applying filters.""" + items = ( subset.items() if subset else chain(self.manifest.nodes.items(), self.manifest.sources.items()) - ): + ) + for unique_id, dbt_node in items: if self._filter_model(dbt_node): yield unique_id, dbt_node - def get_osmosis_path_spec(self, node: ManifestNode) -> str | None: - """Validates a config string. - - If input is a source, we return the resource type str instead - """ - if node.resource_type == NodeType.Source: - source_specs = self.config.vars.vars.get("dbt-osmosis", {}) - source_spec = source_specs.get(node.source_name) - if isinstance(source_spec, dict): - return source_spec.get("path") - else: - return source_spec - osmosis_spec = node.unrendered_config.get("dbt-osmosis") - if not osmosis_spec: - raise MissingOsmosisConfig( - f"Config not set for model {node.name}, we recommend setting the config at a" - " directory level through the `dbt_project.yml`" - ) - try: - return osmosis_spec - except ValueError as exc: - raise InvalidOsmosisConfig( - f"Invalid config for model {node.name}: {osmosis_spec}" - ) from exc - - def get_node_path(self, node: ManifestNode): - """Resolve absolute file path for a manifest node""" - return as_path(self.config.project_root, node.original_file_path).resolve() - - def get_schema_path(self, node: ManifestNode) -> Path | None: - """Resolve absolute schema file path for a manifest node""" - schema_path = None - if node.resource_type == NodeType.Model and node.patch_path: - schema_path: str = node.patch_path.partition("://")[-1] - elif node.resource_type == NodeType.Source: - if hasattr(node, "source_name"): - schema_path: str = node.path - if schema_path: - return as_path(self.project_root).joinpath(schema_path) - - def get_target_schema_path(self, node: ManifestNode) -> Path: - """Resolve the correct schema yml target based on the dbt-osmosis - config for the model / directory - """ - osmosis_path_spec = self.get_osmosis_path_spec(node) - if not osmosis_path_spec: - # If no config is set, it is a no-op essentially - return as_path(self.config.project_root, node.original_file_path) - schema = osmosis_path_spec.format(node=node, model=node.name, parent=node.fqn[-2]) - parts = [] - - # Part 1: path from project root to base model directory - if node.resource_type == NodeType.Source: - parts += [self.config.model_paths[0]] - else: - parts += [as_path(node.original_file_path).parent] - - # Part 2: path from base model directory to file - parts += [schema if schema.endswith((".yml", ".yaml")) else f"{schema}.yml"] - - # Part 3: join parts relative to project root - return as_path(self.config.project_root).joinpath(*parts) - @staticmethod - def get_catalog_key(node: ManifestNode) -> CatalogKey: - """Returns CatalogKey for a given node.""" - if node.resource_type == NodeType.Source: - return CatalogKey(node.database, node.schema, getattr(node, "identifier", node.name)) - return CatalogKey(node.database, node.schema, getattr(node, "alias", node.name)) - - def get_base_model(self, node: ManifestNode, output_to_lower: bool) -> dict[str, t.Any]: - """Construct a base model object with model name, column names populated from database""" - columns = self.get_columns(self.get_catalog_key(node), output_to_lower) - return { - "name": node.name, - "columns": [{"name": column_name, "description": ""} for column_name in columns], - } - - def augment_existing_model( - self, documentation: dict[str, t.Any], node: ManifestNode, output_to_lower: bool - ) -> dict[str, t.Any]: - """Injects columns from database into existing model if not found""" - model_columns: list[str] = [c["name"] for c in documentation.get("columns", [])] - database_columns = self.get_columns(self.get_catalog_key(node), output_to_lower) - for column in ( - c for c in database_columns if not any(c.lower() == m.lower() for m in model_columns) - ): - logger().info( - ":syringe: Injecting column %s into dbt schema for %s", - self.column_casing(column, output_to_lower), - node.unique_id, - ) - documentation.setdefault("columns", []).append( - { - "name": self.column_casing(column, output_to_lower), - "description": getattr(column, "description", ""), - } - ) - return documentation - - def get_columns(self, catalog_key: CatalogKey, output_to_lower: bool) -> list[str]: - """Get all columns in a list for a model""" - - return list(self.get_columns_meta(catalog_key, output_to_lower).keys()) + def get_node_path(node: ManifestNode) -> Path | None: + """Get the resolved path for a node.""" + if node.original_file_path: + return Path(node.root_path, node.original_file_path).resolve() + return None - @property - def catalog(self) -> CatalogArtifact | None: - """Get the catalog data from the catalog file - - Catalog data is cached in memory to avoid reading and parsing the file multiple times - """ - if self._catalog: - return self._catalog - if not self.catalog_file: - return None - file_path = Path(self.catalog_file) - if not file_path.exists(): - return None - self._catalog = CatalogArtifact.from_dict(json.loads(file_path.read_text())) - return self._catalog + @staticmethod + def get_patch_path(node: ManifestNode) -> Path | None: + """Get the resolved path for a node's patch (YAML) file.""" + if node.patch_path: + return Path(node.patch_path.split("://")[-1]) + return None - def _get_column_type(self, column: Column) -> str: - if ( - column.is_numeric() - and self.numeric_precision - or column.is_string() - and self.char_length - ): - return column.data_type - return column.dtype - - @lru_cache(maxsize=5000) def get_columns_meta( self, catalog_key: CatalogKey, output_to_lower: bool = False ) -> dict[str, ColumnMetadata]: - """Get all columns in a list for a model""" + """ + Resolve columns metadata (type, comment, etc.) either from an external CatalogArtifact + or from a live introspection query with the adapter. + """ columns = OrderedDict() - blacklist = self.config.vars.vars.get("dbt-osmosis", {}).get("_blacklist", []) - # If we provide a catalog, we read from it + blacklist = self._config.vars.get("dbt-osmosis", {}).get("_blacklist", []) + # if catalog is loaded: if self.catalog: - matching_models_or_sources: list[CatalogTable] = [ - model_or_source_values - for model_or_source, model_or_source_values in dict( - **self.catalog.nodes, **self.catalog.sources - ).items() - if model_or_source.split(".")[-1] == catalog_key.name + # Attempt to match node in catalog + cat_objs = {**self.catalog.nodes, **self.catalog.sources} + matched = [ + obj for key, obj in cat_objs.items() if key.split(".")[-1] == catalog_key.name ] - if matching_models_or_sources: - for col in matching_models_or_sources[0].columns.values(): - if any(re.match(pattern, col.name) for pattern in blacklist): + if matched: + for col in matched[0].columns.values(): + if any(re.match(pat, col.name) for pat in blacklist): continue - columns[self.column_casing(col.name, output_to_lower)] = ColumnMetadata( - name=self.column_casing(col.name, output_to_lower), + columns[ + column_casing(col.name, self._config.credentials.type, output_to_lower) + ] = ColumnMetadata( + name=column_casing( + col.name, self._config.credentials.type, output_to_lower + ), type=col.type, index=col.index, comment=col.comment, ) - else: return columns - # If we don't provide a catalog we query the warehouse to get the columns - else: - with self.adapter.connection_named("dbt-osmosis"): - table = self.adapter.get_relation(*catalog_key) - - if not table: - logger().info( - ":cross_mark: Relation %s.%s.%s does not exist in target database," - " cannot resolve columns", - *catalog_key, + # fallback to adapter-based introspection + with self.adapter.connection_named("dbt-osmosis"): + table = self.adapter.get_relation( + catalog_key.database, catalog_key.schema, catalog_key.name + ) + if not table: + return columns + try: + for c in self.adapter.get_columns_in_relation(table): + if any(re.match(p, c.name) for p in blacklist): + continue + col_cased = column_casing( + c.name, self._config.credentials.type, output_to_lower ) - return columns - try: - for c in self.adapter.get_columns_in_relation(table): - if any(re.match(pattern, c.name) for pattern in blacklist): - continue - columns[self.column_casing(c.name, output_to_lower)] = ColumnMetadata( - name=self.column_casing(c.name, output_to_lower), - type=self._get_column_type(c), - index=None, # type: ignore - comment=getattr(c, "comment", None), + columns[col_cased] = ColumnMetadata( + name=col_cased, + type=c.dtype + if not ( + c.is_numeric() + and self.numeric_precision + or c.is_string() + and self.char_length ) - if hasattr(c, "flatten"): - for exp in c.flatten(): - if any(re.match(pattern, exp.name) for pattern in blacklist): - continue - columns[self.column_casing(exp.name, output_to_lower)] = ( - ColumnMetadata( - name=self.column_casing(exp.name, output_to_lower), - type=self._get_column_type(exp), - index=None, # type: ignore - comment=getattr(exp, "comment", None), - ) - ) - except Exception as error: - logger().info( - ":cross_mark: Could not resolve relation %s.%s.%s against database" - " active tables during introspective query: %s", - *catalog_key, - str(error), + else c.data_type, + index=None, + comment=getattr(c, "comment", None), ) + if hasattr(c, "flatten"): + for exp in c.flatten(): + if any(re.match(p, exp.name) for p in blacklist): + continue + col_exp_cased = column_casing( + exp.name, self._config.credentials.type, output_to_lower + ) + columns[col_exp_cased] = ColumnMetadata( + name=col_exp_cased, + type=exp.dtype + if not ( + exp.is_numeric() + and self.numeric_precision + or exp.is_string() + and self.char_length + ) + else exp.data_type, + index=None, + comment=getattr(exp, "comment", None), + ) + except Exception as e: + logger().info(f"Could not resolve columns for {catalog_key}: {e}") return columns + def get_catalog_key(self, node: ManifestNode) -> CatalogKey: + if node.resource_type == NodeType.Source: + return CatalogKey(node.database, node.schema, getattr(node, "identifier", node.name)) + return CatalogKey(node.database, node.schema, getattr(node, "alias", node.name)) + + def propagate_documentation_downstream( + self, force_inheritance: bool = False, output_to_lower: bool = False + ) -> None: + schema_map = self.build_schema_folder_mapping(output_to_lower) + futures = [] + with self.adapter.connection_named("dbt-osmosis"): + for unique_id, node in self.filtered_models(): + futures.append( + self._pool.submit( + self._run, unique_id, node, schema_map, force_inheritance, output_to_lower + ) + ) + wait(futures) + + def build_schema_folder_mapping(self, output_to_lower: bool) -> dict[str, SchemaFileLocation]: + """ + Build a mapping of model unique_id -> (target schema yml path, existing path) + """ + self.bootstrap_sources(output_to_lower) + out = {} + for uid, node in self.filtered_models(): + sc_path = self.get_schema_path(node) + target_sc_path = self.get_target_schema_path(node) + out[uid] = SchemaFileLocation( + target=target_sc_path.resolve(), + current=sc_path.resolve() if sc_path else None, + node_type=node.resource_type, + ) + return out + def bootstrap_sources(self, output_to_lower: bool = False) -> None: - """Bootstrap sources from the dbt-osmosis vars config""" + """ + Quick approach: if the user has declared sources in 'dbt-osmosis' vars, + create or augment the schema files for them. For brevity, direct approach only. + """ performed_disk_mutation = False - blacklist = self.config.vars.vars.get("dbt-osmosis", {}).get("_blacklist", []) - for source, spec in self.config.vars.vars.get("dbt-osmosis", {}).items(): - # Skip blacklist + spec_dict = self._config.vars.get("dbt-osmosis", {}) + blacklist = spec_dict.get("_blacklist", []) + + for source, spec in spec_dict.items(): if source == "_blacklist": continue - - # Parse source config if isinstance(spec, str): schema = source - database = self.config.credentials.database + database = self._config.credentials.database path = spec elif isinstance(spec, dict): schema = spec.get("schema", source) - database = spec.get("database", self.config.credentials.database) + database = spec.get("database", self._config.credentials.database) path = spec["path"] else: - raise TypeError( - f"Invalid dbt-osmosis var config for source {source}, must be a string or dict" - ) + continue - # Check if source exists in manifest + # Check if source in manifest dbt_node = next( (s for s in self.manifest.sources.values() if s.source_name == source), None ) - if not dbt_node: - # Create a source file if it doesn't exist - osmosis_schema_path = as_path(self.config.project_root).joinpath( - self.config.model_paths[0], path.lstrip(os.sep) + # create file with tables from introspection + sc_file = ( + Path(self._config.project_root) + / self._config.model_paths[0] + / path.lstrip(os.sep) ) - relations = self.adapter.list_relations( - database=database, - schema=schema, - ) - tables = [ - { - "name": relation.identifier, - "description": "", - "columns": ( - [ - { - "name": self.column_casing(exp.name, output_to_lower), - "description": getattr( - exp, "description", getattr(c, "description", "") - ), - "data_type": getattr(exp, "dtype", getattr(c, "dtype", "")), - } - for c in self.adapter.get_columns_in_relation(relation) - for exp in [c] + getattr(c, "flatten", lambda: [])() - if not any(re.match(pattern, exp.name) for pattern in blacklist) - ] - if not self.skip_add_columns - else [] - ), - } - for relation in relations - ] - osmosis_schema_path.parent.mkdir(parents=True, exist_ok=True) - with open(osmosis_schema_path, "w") as schema_file: - logger().info( - ":syringe: Injecting source %s into dbt project", - source, - ) + relations = self.adapter.list_relations(database=database, schema=schema) + tables_data = [] + for rel in relations: + cols = [] + for c in self.adapter.get_columns_in_relation(rel): + if any(re.match(p, c.name) for p in blacklist): + continue + col_cased = column_casing( + c.name, self._config.credentials.type, output_to_lower + ) + dt = c.dtype.lower() if output_to_lower else c.dtype + cols.append({"name": col_cased, "description": "", "data_type": dt}) + tables_data.append({"name": rel.identifier, "description": "", "columns": cols}) + + sc_file.parent.mkdir(parents=True, exist_ok=True) + with open(sc_file, "w") as f: + logger().info(f"Injecting source {source} => {sc_file}") self.yaml_handler.dump( { "version": 2, @@ -528,660 +800,481 @@ def bootstrap_sources(self, output_to_lower: bool = False) -> None: "name": source, "database": database, "schema": schema, - "tables": tables, + "tables": tables_data, } ], }, - schema_file, + f, ) - self.mutations += 1 + self._mutations += 1 performed_disk_mutation = True if performed_disk_mutation: - # Reload project to pick up new sources - logger().info("...reloading project to pick up new sources") - self.safe_parse_project(reinit=True) + logger().info("Reloading project to pick up new sources.") + self.safe_parse_project(init=True) - def build_schema_folder_mapping(self, output_to_lower: bool) -> dict[str, SchemaFileLocation]: - """Builds a mapping of models or sources to their existing and target schema file paths""" + def get_schema_path(self, node: ManifestNode) -> Optional[Path]: + if node.resource_type == NodeType.Model and node.patch_path: + return Path(self._config.project_root).joinpath(node.patch_path.partition("://")[-1]) + if node.resource_type == NodeType.Source and hasattr(node, "source_name"): + return Path(self._config.project_root).joinpath(node.path) + return None - # Resolve target nodes - self.bootstrap_sources(output_to_lower) + def get_target_schema_path(self, node: ManifestNode) -> Path: + path_spec = self.get_osmosis_path_spec(node) + if not path_spec: + return Path(self._config.project_root, node.original_file_path) + sc = path_spec.format(node=node, model=node.name, parent=node.fqn[-2]) + parts = [] + if node.resource_type == NodeType.Source: + parts.append(self._config.model_paths[0]) + else: + parts.append(Path(node.original_file_path).parent) + if not (sc.endswith(".yml") or sc.endswith(".yaml")): + sc += ".yml" + parts.append(sc) + return Path(self._config.project_root, *parts) - # Container for output - schema_map = {} - logger().info("...building project structure mapping in memory") - - # Iterate over models and resolve current path vs declarative target path - for unique_id, dbt_node in self.filtered_models(): - schema_path = self.get_schema_path(dbt_node) - osmosis_schema_path = self.get_target_schema_path(dbt_node) - schema_map[unique_id] = SchemaFileLocation( - target=osmosis_schema_path.resolve(), - current=schema_path.resolve() if schema_path else None, - node_type=dbt_node.resource_type, - ) + def get_osmosis_path_spec(self, node: ManifestNode) -> Optional[str]: + if node.resource_type == NodeType.Source: + source_specs = self._config.vars.get("dbt-osmosis", {}) + source_spec = source_specs.get(node.source_name) + if isinstance(source_spec, dict): + return source_spec.get("path") + return source_spec + osm_spec = node.unrendered_config.get("dbt-osmosis") + if not osm_spec: + raise MissingOsmosisConfig(f"Config not set for model {node.name}") + return osm_spec - return schema_map + def get_columns(self, key: CatalogKey, to_lower: bool) -> list[str]: + return list(self.get_columns_meta(key, to_lower).keys()) - def _draft( - self, - schema_file: SchemaFileLocation, - unique_id: str, - blueprint: dict, - output_to_lower: bool, - ) -> None: - try: - with self.mutex: - blueprint.setdefault( - schema_file.target, - SchemaFileMigration( - output={"version": 2, "models": [], "sources": []}, supersede={} - ), - ) - if schema_file.node_type == NodeType.Model: - node = self.manifest.nodes[unique_id] - elif schema_file.node_type == NodeType.Source: - node = self.manifest.sources[unique_id] - else: - return - if schema_file.current is None: - # Bootstrapping undocumented NodeType.Model - # NodeType.Source files are guaranteed to exist by this point - with self.mutex: - assert schema_file.node_type == NodeType.Model - blueprint[schema_file.target].output["models"].append( - self.get_base_model(node, output_to_lower) - ) - else: - # Sanity check that the file exists before we try to load it, this should never be false - assert schema_file.current.exists(), f"File {schema_file.current} does not exist" - # Model/Source Is Documented but Must be Migrated - with self.mutex: - schema = self.yaml_handler.load(schema_file.current) - models_in_file: Sequence[dict[str, t.Any]] = schema.get("models", []) - sources_in_file: Sequence[dict[str, t.Any]] = schema.get("sources", []) - for documented_model in ( - model for model in models_in_file if model["name"] == node.name - ): - # Augment Documented Model - augmented_model = self.augment_existing_model( - documented_model, node, output_to_lower - ) - with self.mutex: - blueprint[schema_file.target].output["models"].append(augmented_model) - # Target to supersede current - blueprint[schema_file.target].supersede.setdefault( - schema_file.current, [] - ).append(node) - break - for documented_model, i in ( - (table, j) - for j, source in enumerate(sources_in_file) - if source["name"] == node.source_name - for table in source["tables"] - if table["name"] == node.name - ): - # Augment Documented Source - augmented_model = self.augment_existing_model( - documented_model, node, output_to_lower - ) - with self.mutex: - if not any( - s["name"] == node.source_name - for s in blueprint[schema_file.target].output["sources"] - ): - # Add the source if it doesn't exist in the blueprint - blueprint[schema_file.target].output["sources"].append( - sources_in_file[i] - ) - # Find source in blueprint - for src in blueprint[schema_file.target].output["sources"]: - if src["name"] == node.source_name: - # Find table in blueprint - for tbl in src["tables"]: - if tbl["name"] == node.name: - # Augment table - tbl = augmented_model - break - break - else: - # This should never happen - raise RuntimeError(f"Source {node.source_name} not found in blueprint?") - # Target to supersede current - blueprint[schema_file.target].supersede.setdefault( - schema_file.current, [] - ).append(node) - break + def get_base_model(self, node: ManifestNode, to_lower: bool) -> dict[str, t.Any]: + cols = self.get_columns(self.get_catalog_key(node), to_lower) + return { + "name": node.name, + "columns": [{"name": c, "description": ""} for c in cols], + } - except Exception as e: - with self.mutex: - logger().error( - "Failed to draft project structure update plan for %s: %s", unique_id, e - ) - raise e + def augment_existing_model( + self, doc: dict[str, t.Any], node: ManifestNode, to_lower: bool + ) -> dict[str, t.Any]: + existing_cols = [c["name"] for c in doc.get("columns", [])] + db_cols = self.get_columns(self.get_catalog_key(node), to_lower) + new_cols = [c for c in db_cols if not any(c.lower() == e.lower() for e in existing_cols)] + for col in new_cols: + doc.setdefault("columns", []).append({"name": col, "description": ""}) + logger().info(f"Injecting column {col} into {node.unique_id}") + return doc def draft_project_structure_update_plan( self, output_to_lower: bool = False ) -> dict[Path, SchemaFileMigration]: - """Build project structure update plan based on `dbt-osmosis:` configs set across - dbt_project.yml and model files. The update plan includes injection of undocumented models. - Unless this plan is constructed and executed by the `commit_project_restructure` function, - dbt-osmosis will only operate on models it is aware of through the existing documentation. - - Returns: - MutableMapping: Update plan where dict keys consist of targets and contents consist of - outputs which match the contents of the `models` to be output in the - target file and supersede lists of what files are superseded by a migration - """ - - # Container for output - blueprint: dict[Path, SchemaFileMigration] = {} - logger().info( - ":chart_increasing: Searching project stucture for required updates and building action" - " plan" - ) + blueprint = {} + logger().info("Building structure update plan.") futs = [] with self.adapter.connection_named("dbt-osmosis"): - for unique_id, schema_file in self.build_schema_folder_mapping(output_to_lower).items(): - if not schema_file.is_valid: + for uid, sf_loc in self.build_schema_folder_mapping(output_to_lower).items(): + if not sf_loc.is_valid: futs.append( - self.tpe.submit( - self._draft, schema_file, unique_id, blueprint, output_to_lower - ) + self._pool.submit(self._draft, sf_loc, uid, blueprint, output_to_lower) ) wait(futs) return blueprint - def cleanup_blueprint(self, blueprint: dict) -> None: - with self.mutex: - for k in list(blueprint.keys()): - # Remove if sources or models are empty - if blueprint[k].output.get("sources", None) == []: - del blueprint[k].output["sources"] - if blueprint[k].output.get("models", None) == []: - del blueprint[k].output["models"] + def _draft( + self, + sf_loc: SchemaFileLocation, + uid: str, + blueprint: dict[Path, SchemaFileMigration], + to_lower: bool, + ): + try: + with self._mutex: + if sf_loc.target not in blueprint: + blueprint[sf_loc.target] = SchemaFileMigration() + if sf_loc.node_type == NodeType.Model: + node = self.manifest.nodes[uid] + else: + node = self.manifest.sources[uid] + + if sf_loc.current is None: + # model not documented yet + with self._mutex: + if sf_loc.node_type == NodeType.Model: + blueprint[sf_loc.target].output["models"].append( + self.get_base_model(node, to_lower) + ) + else: + # We have existing doc, but we want to unify it into the new location + with self._mutex: + doc = self.yaml_handler.load(sf_loc.current) + if sf_loc.node_type == NodeType.Model: + for m in doc.get("models", []): + if m["name"] == node.name: + newm = self.augment_existing_model(m, node, to_lower) + with self._mutex: + blueprint[sf_loc.target].output["models"].append(newm) + blueprint[sf_loc.target].supersede.setdefault( + sf_loc.current, [] + ).append(node) + break + else: + for source in doc.get("sources", []): + if source["name"] == node.source_name: + for table in source["tables"]: + if table["name"] == node.name: + newt = self.augment_existing_model(table, node, to_lower) + with self._mutex: + if not any( + s["name"] == node.source_name + for s in blueprint[sf_loc.target].output["sources"] + ): + blueprint[sf_loc.target].output["sources"].append( + source + ) + for s in blueprint[sf_loc.target].output["sources"]: + if s["name"] == node.source_name: + for t2 in s["tables"]: + if t2["name"] == node.name: + t2.update(newt) + break + blueprint[sf_loc.target].supersede.setdefault( + sf_loc.current, [] + ).append(node) + break + except Exception as e: + logger().error(f"Drafting structure plan for {uid} failed: {e}") + raise e + + def cleanup_blueprint( + self, blueprint: dict[Path, SchemaFileMigration] + ) -> dict[Path, SchemaFileMigration]: + for k in list(blueprint.keys()): + out = blueprint[k].output + # remove empty models/sources + if "models" in out and not out["models"]: + del out["models"] + if "sources" in out and not out["sources"]: + del out["sources"] + if not out.get("models") and not out.get("sources"): + del blueprint[k] return blueprint def commit_project_restructure_to_disk( self, - blueprint: dict[Path, SchemaFileMigration] | None = None, + blueprint: Optional[dict[Path, SchemaFileMigration]] = None, output_to_lower: bool = False, ) -> bool: - """Given a project restrucure plan of pathlib Paths to a mapping of output and supersedes - which is in itself a mapping of Paths to model names, commit changes to filesystem to - conform project to defined structure as code fully or partially superseding existing models - as needed. - - Args: - blueprint (dict[Path, SchemaFileMigration]): Project restructure plan as typically - created by `build_project_structure_update_plan` - output_to_lower (bool): set column casing to lowercase. - - Returns: - bool: True if the project was restructured, False if no action was required - """ - - # Build blueprint if not user supplied if not blueprint: blueprint = self.draft_project_structure_update_plan(output_to_lower) - blueprint = self.cleanup_blueprint(blueprint) - - # Verify we have actions in the plan if not blueprint: - logger().info(":1st_place_medal: Project structure approved") + logger().info("Project structure is already conformed.") return False - - # Print plan for user auditability self.pretty_print_restructure_plan(blueprint) - logger().info( - ":construction_worker: Executing action plan and conforming projecting schemas to" - " defined structure" - ) - for target, structure in blueprint.items(): + for target, struct in blueprint.items(): if not target.exists(): - # Build File - logger().info(":construction: Building schema file %s", target.name) + logger().info(f"Creating schema file {target}") if not self.dry_run: - target.parent.mkdir(exist_ok=True, parents=True) + target.parent.mkdir(parents=True, exist_ok=True) target.touch() - self.yaml_handler.dump(structure.output, target) - self.mutations += 1 - + self.yaml_handler.dump(struct.output, target) + self._mutations += 1 else: - # Update File - logger().info(":toolbox: Updating schema file %s", target.name) - target_schema: dict[str, t.Any] | None = self.yaml_handler.load(target) - # Add version if not present - if not target_schema: - target_schema = {"version": 2} - elif "version" not in target_schema: - target_schema["version"] = 2 - # Add models and sources (if available) to target schema - if structure.output["models"]: - target_schema.setdefault("models", []).extend(structure.output["models"]) - if structure.output.get("sources") is not None: - target_schema.setdefault("sources", []).extend(structure.output["sources"]) + logger().info(f"Updating schema file {target}") + existing = self.yaml_handler.load(target) + if not existing: + existing = {"version": 2} + if "version" not in existing: + existing["version"] = 2 + + if "models" in struct.output: + existing.setdefault("models", []).extend(struct.output["models"]) + if "sources" in struct.output: + existing.setdefault("sources", []).extend(struct.output["sources"]) if not self.dry_run: - self.yaml_handler.dump(target_schema, target) - self.mutations += 1 - - # Clean superseded schema files - for dir, nodes in structure.supersede.items(): - raw_schema: dict[str, t.Any] = self.yaml_handler.load(dir) - # Gather models and sources marked for superseding - models_marked_for_superseding = set( - node.name for node in nodes if node.resource_type == NodeType.Model - ) - sources_marked_for_superseding = set( - (node.source_name, node.name) - for node in nodes - if node.resource_type == NodeType.Source - ) - # Gather models and sources in schema file - models_in_schema = set(m["name"] for m in raw_schema.get("models", [])) - sources_in_schema = set( - (s["name"], t["name"]) - for s in raw_schema.get("sources", []) - for t in s.get("tables", []) - ) - # set difference to determine non-superseded models and sources - non_superseded_models = models_in_schema - models_marked_for_superseding - non_superseded_sources = sources_in_schema - sources_marked_for_superseding - if len(non_superseded_models) + len(non_superseded_sources) == 0: - logger().info(":rocket: Superseded schema file %s", dir.name) + self.yaml_handler.dump(existing, target) + self._mutations += 1 + + # handle superseded + for sup_path, nodes in struct.supersede.items(): + raw_sc = self.yaml_handler.load(sup_path) + # figure out which ones to remove + to_remove_models = {n.name for n in nodes if n.resource_type == NodeType.Model} + to_remove_sources = { + (n.source_name, n.name) for n in nodes if n.resource_type == NodeType.Source + } + + keep_models = [] + for m in raw_sc.get("models", []): + if m["name"] not in to_remove_models: + keep_models.append(m) + raw_sc["models"] = keep_models + + # remove relevant source tables + keep_src = [] + for s in raw_sc.get("sources", []): + keep_tables = [] + for t_ in s.get("tables", []): + if (s["name"], t_["name"]) not in to_remove_sources: + keep_tables.append(t_) + if keep_tables: + s["tables"] = keep_tables + keep_src.append(s) + raw_sc["sources"] = keep_src + + # if file is empty => remove it + if (not raw_sc.get("models")) and (not raw_sc.get("sources")): + logger().info(f"Superseding entire file {sup_path}") if not self.dry_run: - dir.unlink(missing_ok=True) - if len(list(dir.parent.iterdir())) == 0: - dir.parent.rmdir() + sup_path.unlink(missing_ok=True) + if sup_path.parent.exists() and not any(sup_path.parent.iterdir()): + sup_path.parent.rmdir() else: - # Preserve non-superseded models - preserved_models = [] - for model in raw_schema.get("models", []): - if model["name"] in non_superseded_models: - preserved_models.append(model) - raw_schema["models"] = preserved_models - # Preserve non-superseded sources - ix = [] - for i, source in enumerate(raw_schema.get("sources", [])): - for j, table in enumerate(source.get("tables", [])): - if (source["name"], table["name"]) not in non_superseded_sources: - ix.append((i, j)) - for i, j in reversed(ix): - raw_schema["sources"][i]["tables"].pop(j) - ix = [] - for i, source in enumerate(raw_schema.get("sources", [])): - if not source["tables"]: - ix.append(i) - for i in reversed(ix): - if not raw_schema["sources"][i]["tables"]: - raw_schema["sources"].pop(i) if not self.dry_run: - self.yaml_handler.dump(raw_schema, dir) - self.mutations += 1 - logger().info( - ":satellite: Model documentation migrated from %s to %s", - dir.name, - target.name, - ) + self.yaml_handler.dump(raw_sc, sup_path) + self._mutations += 1 + logger().info(f"Migrated doc from {sup_path} -> {target}") return True @staticmethod def pretty_print_restructure_plan(blueprint: dict[Path, SchemaFileMigration]) -> None: - logger().info( - list( - map( - lambda plan: ( - [s.name for s in blueprint[plan].supersede] or "CREATE", - "->", - plan, - ), - blueprint.keys(), - ) - ) - ) - + summary = [] + for plan in blueprint.keys(): + files_superseded = [s.name for s in blueprint[plan].supersede] or ["CREATE"] + summary.append((files_superseded, "->", plan.name)) + logger().info(summary) + + ############################################################################ + # Column Sync + ############################################################################ @staticmethod def get_column_sets( - database_columns: Iterable[str], - yaml_columns: Iterable[str], - documented_columns: Iterable[str], - ) -> tuple[list[str], list[str], list[str]]: - """Returns: - missing_columns: Columns in database not in dbt -- will be injected into schema file - undocumented_columns: Columns missing documentation -- descriptions will be inherited and - injected into schema file where prior knowledge exists - extra_columns: Columns in schema file not in database -- will be removed from schema file + database_cols: Iterable[str], + yaml_cols: Iterable[str], + documented_cols: Iterable[str], + ) -> t.tuple[list[str], list[str], list[str]]: """ - missing_columns = [ - x for x in database_columns if x.lower() not in (y.lower() for y in yaml_columns) - ] - undocumented_columns = [ - x for x in database_columns if x.lower() not in (y.lower() for y in documented_columns) - ] - extra_columns = [ - x for x in yaml_columns if x.lower() not in (y.lower() for y in database_columns) + Return: (missing_in_yaml, undocumented_in_yaml, extra_in_yaml) + """ + missing = [x for x in database_cols if x.lower() not in (y.lower() for y in yaml_cols)] + undocumented = [ + x for x in database_cols if x.lower() not in (y.lower() for y in documented_cols) ] - return missing_columns, undocumented_columns, extra_columns + extra = [x for x in yaml_cols if x.lower() not in (y.lower() for y in database_cols)] + return missing, undocumented, extra def _run( self, - unique_id: str, + uid: str, node: ManifestNode, schema_map: dict[str, SchemaFileLocation], - force_inheritance: bool = False, - output_to_lower: bool = False, + force_inheritance: bool, + output_to_lower: bool, ): try: - with self.mutex: - logger().info(":point_right: Processing model: [bold]%s[/bold]", unique_id) - # Get schema file path, must exist to propagate documentation - schema_path: SchemaFileLocation | None = schema_map.get(unique_id) - if schema_path is None or schema_path.current is None: - with self.mutex: - logger().info( - ":bow: No valid schema file found for model %s", unique_id - ) # We can't take action - return - - # Build sets - logger().info(":mag: Resolving columns in database") - database_columns_ordered = self.get_columns(self.get_catalog_key(node), output_to_lower) - columns_db_meta = self.get_columns_meta(self.get_catalog_key(node), output_to_lower) - database_columns: set[str] = set(database_columns_ordered) - yaml_columns_ordered = [column for column in node.columns] - yaml_columns: set[str] = set(yaml_columns_ordered) - - if not database_columns: - with self.mutex: + with self._mutex: + logger().info(f"Processing model: {uid}") + sf_loc = schema_map.get(uid) + if not sf_loc or not sf_loc.current: + with self._mutex: + logger().info(f"No schema file for {uid}, skipping.") + return + db_cols_list = self.get_columns(self.get_catalog_key(node), output_to_lower) + if not db_cols_list: + with self._mutex: logger().info( - ":safety_vest: Unable to resolve columns in database, falling back to" - " using yaml columns as base column set for model %s", - unique_id, + f"No database columns found for {uid}, falling back to yaml columns." ) - database_columns_ordered = yaml_columns_ordered - database_columns = yaml_columns + db_cols_list = list(node.columns.keys()) - # Get documentated columns - documented_columns: set[str] = set( - column - for column, info in node.columns.items() + db_cols_set = set(db_cols_list) + yaml_cols_list = list(node.columns.keys()) + documented_cols_set = { + c + for c, info in node.columns.items() if info.description and info.description not in self.placeholders - ) + } - # Queue - missing_columns, undocumented_columns, extra_columns = self.get_column_sets( - database_columns, yaml_columns, documented_columns + missing, undocumented, extra = self.get_column_sets( + db_cols_list, yaml_cols_list, documented_cols_set ) if force_inheritance: - # Consider all columns "undocumented" so that inheritance is not selective - undocumented_columns = database_columns - - # Engage - n_cols_added = 0 - n_cols_doc_inherited = 0 - n_cols_removed = 0 - n_cols_data_type_changed = 0 - n_cols_description_changed = 0 - - with self.mutex: - schema_file = self.yaml_handler.load(schema_path.current) - section = self.maybe_get_section_from_schema_file(schema_file, node) - if section is None: # If we can't find the section, we can't take action - logger().info(":thumbs_up: No actions needed for %s", node.unique_id) - return + undocumented = list(db_cols_set) # treat all as needing doc - should_dump = False - ( - n_cols_added, - n_cols_doc_inherited, - n_cols_removed, - n_cols_data_type_changed, - n_cols_description_changed, - ) = (0, 0, 0, 0, 0) - if len(missing_columns) > 0 or len(undocumented_columns) or len(extra_columns) > 0: - # Update schema file + with self._mutex: + sc_data = self.yaml_handler.load(sf_loc.current) + section = self.maybe_get_section_from_schema_file(sc_data, node) + if not section: + logger().info(f"No section in {sf_loc.current} for {uid}") + return + # Perform updates + n_added = n_doc_inh = n_removed = n_type_changed = n_desc_changed = 0 + if any([missing, undocumented, extra]): ( - n_cols_added, - n_cols_doc_inherited, - n_cols_removed, - n_cols_data_type_changed, - n_cols_description_changed, + n_added, + n_doc_inh, + n_removed, + n_type_changed, + n_desc_changed, ) = self.update_schema_file_and_node( - missing_columns, - undocumented_columns, - extra_columns, + missing, + undocumented, + extra, node, section, - columns_db_meta, + self.get_columns_meta(self.get_catalog_key(node), output_to_lower), output_to_lower, ) - if ( - n_cols_added - + n_cols_doc_inherited - + n_cols_removed - + n_cols_data_type_changed - + n_cols_description_changed - > 0 - ): - should_dump = True - if tuple(database_columns_ordered) != tuple(yaml_columns_ordered): - # Sort columns in schema file to match database - logger().info( - ":wrench: Reordering columns in schema file for model %s", unique_id - ) - last_ix: int = int( - 1e6 - ) # Arbitrary starting value which increments, ensuring sort order + reorder = tuple(db_cols_list) != tuple(yaml_cols_list) + if reorder: - def _sort_columns(column_info: dict) -> int: - nonlocal last_ix + def _sort(c: dict[str, t.Any]) -> int: try: - normalized_name = self.column_casing( - column_info["name"], output_to_lower + return db_cols_list.index( + column_casing( + c["name"], self._config.credentials.type, output_to_lower + ) ) - return database_columns_ordered.index(normalized_name) - except IndexError: - last_ix += 1 - return last_ix - - section["columns"].sort(key=_sort_columns) - should_dump = True - if should_dump and not self.dry_run: - # Dump the mutated schema file back to the disk - self.yaml_handler.dump(schema_file, schema_path.current) - self.mutations += 1 - logger().info( - ":sparkles: Schema file %s updated", - schema_path.current, - ) + except ValueError: + return 999999 + + section["columns"].sort(key=_sort) + + if ( + n_added + n_doc_inh + n_removed + n_type_changed + n_desc_changed or reorder + ) and not self.dry_run: + self.yaml_handler.dump(sc_data, sf_loc.current) + self._mutations += 1 + logger().info(f"Updated {sf_loc.current}") else: - logger().info( - ":sparkles: Schema file is up to date for model %s", - unique_id, - ) + logger().info(f"{sf_loc.current} is up to date") - # Print Audit Report - n_cols = float(len(database_columns)) - n_cols_documented = float(len(documented_columns)) + n_cols_doc_inherited - perc_coverage = ( - min(100.0 * round(n_cols_documented / n_cols, 3), 100.0) - if n_cols > 0 - else "Unable to Determine" - ) - if logger().level <= 10: - with self.mutex: - logger().debug( - self.audit_report.format( - database=node.database, - schema=node.schema, - table=node.name, - total_columns=n_cols, - n_cols_added=n_cols_added, - n_cols_doc_inherited=n_cols_doc_inherited, - n_cols_removed=n_cols_removed, - coverage=perc_coverage, - ) - ) except Exception as e: - with self.mutex: - logger().error("Error occurred while processing model %s: %s", unique_id, e) + logger().error(f"Error while processing {uid}: {e}") raise e - def propagate_documentation_downstream( - self, force_inheritance: bool = False, output_to_lower: bool = False - ) -> None: - schema_map = self.build_schema_folder_mapping(output_to_lower) - futs = [] - with self.adapter.connection_named("dbt-osmosis"): - for unique_id, node in self.filtered_models(): - futs.append( - self.tpe.submit( - self._run, unique_id, node, schema_map, force_inheritance, output_to_lower - ) - ) - wait(futs) + @staticmethod + def maybe_get_section_from_schema_file( + yaml_data: dict[str, t.Any], node: ManifestNode + ) -> Optional[dict[str, t.Any]]: + if node.resource_type == NodeType.Source: + for s in yaml_data.get("sources", []): + for t_ in s.get("tables", []): + if s["name"] == node.source_name and t_["name"] == node.name: + return t_ + else: + for m in yaml_data.get("models", []): + if m["name"] == node.name: + return m + return None @staticmethod def remove_columns_not_in_database( extra_columns: Iterable[str], node: ManifestNode, - yaml_file_model_section: dict[str, t.Any], + yaml_section: dict[str, t.Any], ) -> int: - """Removes columns found in dbt model that do not exist in database from both node - and model simultaneously - THIS MUTATES THE NODE AND MODEL OBJECTS so that state is always accurate""" - changes_committed = 0 - for column in extra_columns: - node.columns.pop(column, None) - yaml_file_model_section["columns"] = [ - c for c in yaml_file_model_section["columns"] if c["name"] != column - ] - changes_committed += 1 - logger().info( - ":wrench: Removing column %s from dbt schema for model %s", column, node.unique_id - ) - return changes_committed + c = 0 + for e in extra_columns: + node.columns.pop(e, None) + yaml_section["columns"] = [col for col in yaml_section["columns"] if col["name"] != e] + c += 1 + return c def update_columns_attribute( self, node: ManifestNode, - yaml_file_model_section: dict[str, t.Any], - columns_db_meta: dict[str, ColumnMetadata], - attribute_name: str, + yaml_section: dict[str, t.Any], + db_meta: dict[str, ColumnMetadata], + attr: str, meta_key: str, - skip_attribute_update: t.Any, - output_to_lower: bool = False, + skip_flag: bool, + output_to_lower: bool, ) -> int: - changes_committed = 0 - if (skip_attribute_update is True) or (skip_attribute_update is None): - return changes_committed - for column in columns_db_meta: - cased_column_name = self.column_casing(column, output_to_lower) - if cased_column_name in node.columns: - column_meta_obj = columns_db_meta.get(cased_column_name) - if column_meta_obj: - column_meta = getattr(column_meta_obj, meta_key, "") - if column_meta is None: - column_meta = "" - current_value = getattr(node.columns[cased_column_name], attribute_name, "") - if current_value == column_meta: - continue - setattr(node.columns[cased_column_name], attribute_name, column_meta) - for model_column in yaml_file_model_section["columns"]: + if skip_flag: + return 0 + changed = 0 + for col_name, col_meta in db_meta.items(): + if col_name in node.columns: + new_val = getattr(col_meta, meta_key, "") or "" + old_val = getattr(node.columns[col_name], attr, "") + if new_val and old_val != new_val: + setattr(node.columns[col_name], attr, new_val) + for c in yaml_section["columns"]: if ( - self.column_casing(model_column["name"], output_to_lower) - == cased_column_name + column_casing(c["name"], self._config.credentials.type, output_to_lower) + == col_name ): - if output_to_lower: - model_column.update({attribute_name: column_meta.lower()}) - else: - model_column.update({attribute_name: column_meta}) - changes_committed += 1 - return changes_committed + if output_to_lower and isinstance(new_val, str): + new_val = new_val.lower() + c[attr] = new_val + changed += 1 + return changed - @staticmethod def add_missing_cols_to_node_and_model( - missing_columns: Iterable, + self, + missing_cols: Iterable[str], node: ManifestNode, - yaml_file_model_section: dict[str, t.Any], - columns_db_meta: dict[str, ColumnMetadata], + yaml_section: dict[str, t.Any], + db_meta: dict[str, ColumnMetadata], output_to_lower: bool, ) -> int: - """Add missing columns to node and model simultaneously - THIS MUTATES THE NODE AND MODEL OBJECTS so that state is always accurate""" - changes_committed = 0 - for column in missing_columns: - if output_to_lower: - node.columns[column] = ColumnInfo.from_dict( - { - "name": column.lower(), - "description": columns_db_meta[column].comment or "", - "data_type": columns_db_meta[column].type.lower(), - } - ) - yaml_file_model_section.setdefault("columns", []).append( - { - "name": column.lower(), - "description": columns_db_meta[column].comment or "", - "data_type": columns_db_meta[column].type.lower(), - } - ) - else: - node.columns[column] = ColumnInfo.from_dict( - { - "name": column, - "description": columns_db_meta[column].comment or "", - "data_type": columns_db_meta[column].type, - } - ) - yaml_file_model_section.setdefault("columns", []).append( - { - "name": column, - "description": columns_db_meta[column].comment or "", - "data_type": columns_db_meta[column].type, - } - ) - changes_committed += 1 - logger().info( - ":syringe: Injecting column %s into dbt schema for model %s", column, node.unique_id + c = 0 + for col in missing_cols: + if col not in db_meta: + continue + dtype = db_meta[col].type or "" + desc = db_meta[col].comment or "" + meta_name = col.lower() if output_to_lower else col + meta_type = dtype.lower() if output_to_lower else dtype + node.columns[col] = ColumnInfo.from_dict( + {"name": meta_name, "description": desc, "data_type": meta_type} + ) + yaml_section.setdefault("columns", []).append( + {"name": meta_name, "description": desc, "data_type": meta_type} ) - return changes_committed + c += 1 + return c def update_schema_file_and_node( self, - missing_columns: Iterable[str], - undocumented_columns: Iterable[str], - extra_columns: Iterable[str], + missing_cols: Iterable[str], + undocumented_cols: Iterable[str], + extra_cols: Iterable[str], node: ManifestNode, - section: dict[str, t.Any], - columns_db_meta: dict[str, ColumnMetadata], + yaml_section: dict[str, t.Any], + db_meta: dict[str, ColumnMetadata], output_to_lower: bool, - ) -> tuple[int, int, int, int, int]: - """Take action on a schema file mirroring changes in the node.""" - logger().info(":microscope: Looking for actions for %s", node.unique_id) - n_cols_added = 0 + ) -> t.tuple[int, int, int, int, int]: + n_added = 0 + n_doc_inherited = 0 + n_removed = 0 + n_type_updated = 0 + n_desc_updated = 0 + if not self.skip_add_columns: - n_cols_added = self.add_missing_cols_to_node_and_model( - missing_columns, node, section, columns_db_meta, output_to_lower + n_added = self.add_missing_cols_to_node_and_model( + missing_cols, node, yaml_section, db_meta, output_to_lower ) knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( self.manifest, node, self.placeholders, - self.base_config.project_dir, + self._config.project_root, self.use_unrendered_descriptions, ) - n_cols_doc_inherited = ( + n_doc_inherited = ( ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( - undocumented_columns, + undocumented_cols, node, - section, + yaml_section, knowledge, self.skip_add_tags, self.skip_merge_meta, @@ -1189,52 +1282,24 @@ def update_schema_file_and_node( self.add_inheritance_for_specified_keys, ) ) - n_cols_data_type_updated = self.update_columns_attribute( + n_type_updated = self.update_columns_attribute( node, - section, - columns_db_meta, - "data_type", - "type", - self.skip_add_data_types, - self.output_to_lower, + yaml_section, + db_meta, + attr="data_type", + meta_key="type", + skip_flag=self.skip_add_data_types, + output_to_lower=output_to_lower, ) - n_cols_description_updated = self.update_columns_attribute( + # We piggyback the "catalog_file" presence as "update description?" flag in original code + n_desc_updated = self.update_columns_attribute( node, - section, - columns_db_meta, - "description", - "comment", - self.catalog_file, - self.output_to_lower, - ) - n_cols_removed = self.remove_columns_not_in_database(extra_columns, node, section) - return ( - n_cols_added, - n_cols_doc_inherited, - n_cols_removed, - n_cols_data_type_updated, - n_cols_description_updated, + yaml_section, + db_meta, + attr="description", + meta_key="comment", + skip_flag=(self.catalog_file is None), + output_to_lower=output_to_lower, ) - - @staticmethod - def maybe_get_section_from_schema_file( - yaml_file: dict[str, t.Any], node: ManifestNode - ) -> dict[str, t.Any] | None: - """Get the section of a schema file that corresponds to a node.""" - if node.resource_type == NodeType.Source: - section = next( - ( - table - for source in yaml_file["sources"] - if node.source_name == source["name"] - for table in source["tables"] - if table["name"] == node.name - ), - None, - ) - else: - section = next( - (s for s in yaml_file["models"] if s["name"] == node.name), - None, - ) - return section + n_removed = self.remove_columns_not_in_database(extra_cols, node, yaml_section) + return n_added, n_doc_inherited, n_removed, n_type_updated, n_desc_updated From 690f04ed0dfbf74a8152ff7d550e38b1676d7626 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Sun, 29 Dec 2024 00:31:26 -0700 Subject: [PATCH 04/46] wip: further refactoring into functional style module --- src/dbt_osmosis/core/osmosis.py | 1537 ++++++++--------------------- src/dbt_osmosis/core/osmosis_o.py | 1305 ++++++++++++++++++++++++ src/dbt_osmosis/main.py | 8 +- 3 files changed, 1709 insertions(+), 1141 deletions(-) create mode 100644 src/dbt_osmosis/core/osmosis_o.py diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index 89f00df8..d22c4cf7 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -1,57 +1,52 @@ # pyright: reportUnknownVariableType=false, reportPrivateImportUsage=false, reportAny=false, reportUnknownMemberType=false + +import argparse import json import logging -import os import re -import sys import threading import time import typing as t import uuid -from argparse import Namespace -from collections import OrderedDict, UserDict -from collections.abc import Iterable, Iterator, MutableMapping +from collections import OrderedDict +from collections.abc import Iterable, Iterator from concurrent.futures import ThreadPoolExecutor, wait from contextlib import contextmanager -from copy import copy from dataclasses import dataclass, field from functools import lru_cache from itertools import chain from pathlib import Path +import dbt.flags as dbt_flags import ruamel.yaml -from dbt.adapters.factory import get_adapter_class_by_name +from dbt.adapters.contracts.connection import AdapterResponse +from dbt.adapters.factory import Adapter, get_adapter_class_by_name from dbt.config.runtime import RuntimeConfig from dbt.contracts.graph.manifest import Manifest -from dbt.contracts.graph.nodes import ColumnInfo, ManifestNode +from dbt.contracts.graph.nodes import ColumnInfo, ManifestNode, ManifestSQLNode, SourceDefinition from dbt.contracts.results import CatalogArtifact, CatalogKey, CatalogTable, ColumnMetadata -from dbt.flags import set_from_args from dbt.node_types import NodeType from dbt.parser.manifest import ManifestLoader, process_node from dbt.parser.sql import SqlBlockParser, SqlMacroParser from dbt.task.sql import SqlCompileRunner from dbt.tracking import disable_tracking -# Disabling dbt tracking for non-standard usage disable_tracking() +EMPTY_STRING = "" -def logger() -> logging.Logger: - """Get the log handle for dbt-osmosis""" - return logging.getLogger("dbt-osmosis") +logger = logging.getLogger("dbt-osmosis") def has_jinja(code: str) -> bool: - """Check if code contains Jinja tokens""" + """Check if a code string contains jinja tokens.""" return any(token in code for token in ("{{", "}}", "{%", "%}", "{#", "#}")) def column_casing(column: str, credentials_type: str, to_lower: bool) -> str: - """Utility to handle column name casing based on dbt adapter & user flag.""" - # If quoted in snowflake, pass verbatim + """Apply case normalization to a column name based on the credentials type.""" if credentials_type == "snowflake" and column.startswith('"') and column.endswith('"'): return column - # Otherwise apply user-specified transformations if to_lower: return column.lower() if credentials_type == "snowflake": @@ -59,55 +54,19 @@ def column_casing(column: str, credentials_type: str, to_lower: bool) -> str: return column -class YamlHandler(ruamel.yaml.YAML): - """A ruamel.yaml wrapper to handle dbt YAML files with sane defaults.""" - - def __init__(self, **kwargs: t.Any) -> None: - super().__init__(**kwargs) - self.indent(mapping=2, sequence=4, offset=2) - self.width: int = 800 - self.preserve_quotes: bool = True - self.default_flow_style: bool = False - self.encoding: str = os.getenv("DBT_OSMOSIS_ENCODING", "utf-8") - - -@dataclass -class SchemaFileLocation: - """Dataclass to store schema file location details.""" - - target: Path - current: Path | None = None - node_type: NodeType = NodeType.Model - - @property - def is_valid(self) -> bool: - return self.current == self.target - - -@dataclass -class SchemaFileMigration: - """Dataclass to store schema file migration details.""" - - output: dict[str, t.Any] = field( - default_factory=lambda: {"version": 2, "models": [], "sources": []} - ) - supersede: dict[Path, list[str]] = field(default_factory=dict) - - @dataclass class DbtConfiguration: - """Stores dbt project configuration in a namespace""" + """Configuration for a dbt project.""" project_dir: str profiles_dir: str + target: str | None = None + profile: str | None = None threads: int = 1 single_threaded: bool = True which: str = "" - target: str | None = None - profile: str | None = None - - DEBUG: bool = False + debug: bool = False _vars: str | dict[str, t.Any] = field(default_factory=dict) def __post_init__(self) -> None: @@ -121,320 +80,95 @@ def vars(self) -> str: return self._vars @vars.setter - def vars(self, v: t.Any) -> None: - if not isinstance(v, (str, dict)): + def vars(self, value: t.Any) -> None: + if not isinstance(value, (str, dict)): raise ValueError("vars must be a string or dict") - self._vars = v - - -class DbtManifestProxy(UserDict[str, t.Any]): - """Proxy for the manifest's flat_graph, read-only by design.""" - - def _readonly(self, *args: t.Any, **kwargs: t.Any) -> t.Never: - _ = args, kwargs - raise RuntimeError("Cannot modify DbtManifestProxy") - - __setitem__: t.Callable[..., None] = _readonly - __delitem__: t.Callable[..., None] = _readonly - pop: t.Callable[..., None] = _readonly - popitem: t.Callable[..., t.Any] = _readonly - clear: t.Callable[..., None] = _readonly - update: t.Callable[..., None] = _readonly - setdefault: t.Callable[..., None] = _readonly + self._vars = value + + +def config_to_namespace(cfg: DbtConfiguration) -> argparse.Namespace: + """Convert a DbtConfiguration into a dbt-friendly argparse.Namespace.""" + return argparse.Namespace( + project_dir=cfg.project_dir, + profiles_dir=cfg.profiles_dir, + target=cfg.target, + profile=cfg.profile, + threads=cfg.threads, + single_threaded=cfg.single_threaded, + which=cfg.which, + vars=cfg.vars, + DEBUG=cfg.debug, + ) -@dataclass -class DbtAdapterExecutionResult: - adapter_response: t.Any - table: t.Any - raw_code: str - compiled_code: str +def create_yaml_instance( + indent_mapping: int = 2, + indent_sequence: int = 4, + indent_offset: int = 2, + width: int = 800, + preserve_quotes: bool = True, + default_flow_style: bool = False, + encoding: str = "utf-8", +) -> ruamel.yaml.YAML: + """Returns a ruamel.yaml.YAML instance configured with the provided settings.""" + y = ruamel.yaml.YAML() + y.indent(mapping=indent_mapping, sequence=indent_sequence, offset=indent_offset) + y.width = width + y.preserve_quotes = preserve_quotes + y.default_flow_style = default_flow_style + y.encoding = encoding + return y @dataclass -class DbtAdapterCompilationResult: - raw_code: str - compiled_code: str - node: ManifestNode - injected_code: str | None = None - - -def find_default_project_dir() -> str: - cwd = Path.cwd() - # Walk up if needed - for p in [cwd] + list(cwd.parents): - if (p / "dbt_project.yml").exists(): - return str(p.resolve()) - return str(cwd.resolve()) - - -def find_default_profiles_dir() -> str: - # Common fallback for DBT_PROFILES_DIR - if (Path.cwd() / "profiles.yml").exists(): - return str(Path.cwd().resolve()) - return str(Path.home() / ".dbt") - - -class DbtProject: - """Wraps dbt's in-memory project & adapter, enabling queries, compilation, etc.""" - - ADAPTER_TTL: float = 3600.0 - - def __init__( - self, - target: str | None = None, - profiles_dir: str | None = None, - project_dir: str | None = None, - threads: int = 1, - vars: str | dict[str, t.Any] | None = None, - profile: str | None = None, - ): - if not profiles_dir: - profiles_dir = find_default_profiles_dir() - if not project_dir: - project_dir = find_default_project_dir() - - self.base_config: DbtConfiguration = DbtConfiguration( - project_dir=project_dir, - profiles_dir=profiles_dir, - target=target, - threads=threads, - profile=profile, - ) - if vars: - self.base_config.vars = vars - - self.adapter_mutex: threading.Lock = threading.Lock() - self.parsing_mutex: threading.Lock = threading.Lock() - self.manifest_mutation_mutex: threading.Lock = threading.Lock() - - self._config: RuntimeConfig | None = None - self._manifest: Manifest | None = None - self.parse_project(init=True) - - self._sql_parser: SqlBlockParser | None = None - self._macro_parser: SqlMacroParser | None = None - self._adapter_created_at: float = 0.0 - - @property - def config(self) -> RuntimeConfig: - """Get the dbt project configuration.""" - if self._config is None: - raise RuntimeError("DbtProject not initialized. parse_project() must be called first.") - return self._config - - @property - def manifest(self) -> Manifest: - """Get the dbt project manifest.""" - if self._manifest is None: - raise RuntimeError("DbtProject not initialized. parse_project() must be called first.") - return self._manifest - - def parse_project(self, init: bool = False) -> None: - """Parse the dbt project configuration and manifest.""" - with self.parsing_mutex: - if init: - ns = Namespace( - **self.base_config.__dict__ - ) # TODO: replace with method call to handle _vars prop - set_from_args(ns, ns) - self._config = RuntimeConfig.from_args(ns) - self.initialize_adapter() - loader = ManifestLoader( - self.config, - self.config.load_dependencies(), - self.adapter.connections.set_query_header, - ) - self._manifest = loader.load() - self._manifest.build_flat_graph() - loader.save_macros_to_adapter(self.adapter) - self._sql_parser = None - self._macro_parser = None - - def safe_parse_project(self, init: bool = False) -> None: - """Safely re-parse the dbt project configuration and manifest preserving internal state on error.""" - old_config = copy(getattr(self, "config", None)) - try: - self.parse_project(init=init) - except Exception as exc: - if old_config: - self._config = old_config - raise exc - # Write manifest to disk here - self.write_manifest_artifact() - - def initialize_adapter(self) -> None: - """Initialize the dbt adapter.""" - if hasattr(self, "_adapter"): - try: - self.adapter.connections.cleanup_all() - except Exception: - pass - try: - adapter_cls = get_adapter_class_by_name( - self.base_config.target or self.base_config.profile or "" - ) - except Exception: - # fallback if none found (dbt should raise if invalid type) - raise RuntimeError("Could not find an adapter class by name.") - if not adapter_cls: - raise RuntimeError("No valid adapter class found.") - - # NOTE: this smooths over an API change upstream - try: - self.adapter = adapter_cls(self.config) - except TypeError: - from dbt.mp_context import get_mp_context - - self.adapter = adapter_cls(self.config, get_mp_context()) # pyright: ignore[reportCallIssue] - - self.adapter.connections.set_connection_name() - self._adapter_created_at = time.time() - setattr(self.config, "adapter", self.adapter) +class SchemaFileLocation: + """Describes the current and target location of a schema file.""" - @property - def adapter(self) -> t.Any: - """Get the dbt adapter. Automatically refreshes if TTL exceeded.""" - if (time.time() - getattr(self, "_adapter_created_at", 0)) > self.ADAPTER_TTL: - self.initialize_adapter() - return self._adapter # FIXME: add to init - - @adapter.setter - def adapter(self, v: t.Any) -> None: - """Set the dbt adapter. Thread-safe.""" - if self.adapter_mutex.acquire(blocking=False): - try: - setattr(self, "_adapter", v) - v.debug_query() # Verify connection - self._adapter_created_at = time.time() - setattr(self.config, "adapter", v) - finally: - self.adapter_mutex.release() + target: Path + current: Path | None = None + node_type: NodeType = NodeType.Model @property - def manifest_dict(self) -> DbtManifestProxy: - """Get a read-only proxy for the manifest's flat_graph.""" - return DbtManifestProxy(self.manifest.flat_graph) - - def write_manifest_artifact(self) -> None: - """Convenience method to write the manifest to disk.""" - artifact_path = Path(self.config.project_root) / self.config.target_path / "manifest.json" - self.manifest.write(str(artifact_path)) - - def clear_internal_caches(self) -> None: - """Clear internal lru caches for the project instance.""" - self.compile_code.cache_clear() - self.unsafe_compile_code.cache_clear() - - def get_relation(self, database: str, schema: str, name: str) -> t.Any: - """Get a relation from the adapter.""" - return self.adapter.get_relation(database, schema, name) - - def adapter_execute( - self, sql: str, auto_begin: bool = False, fetch: bool = False - ) -> tuple[t.Any, t.Any]: - """Convenience method to execute a query via the adapter.""" - return self.adapter.execute(sql, auto_begin, fetch) - - def execute_code(self, raw_code: str) -> DbtAdapterExecutionResult: - """Execute SQL, compiling jinja if necessary and wrapping result in a consistent interface.""" - compiled = raw_code - if has_jinja(raw_code): - compiled = self.compile_code(raw_code).compiled_code - resp, table = self.adapter_execute(compiled, fetch=True) - return DbtAdapterExecutionResult(resp, table, raw_code, compiled) - - @contextmanager - def generate_server_node(self, sql: str, node_name: str = "anonymous_node"): - """Generate a server node, process it, and clear it after use. Mutates manifest during context.""" - with self.manifest_mutation_mutex: - self._clear_node(node_name) - sql_node = self.sql_parser.parse_remote(sql, node_name) - process_node(self.config, self.manifest, sql_node) - yield sql_node - self._clear_node(node_name) - - def unsafe_generate_server_node( - self, sql: str, node_name: str = "anonymous_node" - ) -> ManifestNode: - """Generate a server node without context, mutating manifest.""" - self._clear_node(node_name) - sql_node = self.sql_parser.parse_remote(sql, node_name) - process_node(self.config, self.manifest, sql_node) - return sql_node - - def _clear_node(self, name: str) -> None: - """Clear a node from the manifest.""" - _ = self.manifest.nodes.pop( - f"{NodeType.SqlOperation}.{self.config.project_name}.{name}", None - ) + def is_valid(self) -> bool: + """Check if the current and target locations are valid.""" + return self.current == self.target - @property - def sql_parser(self) -> SqlBlockParser: - """Lazy handle to the dbt SQL parser for the project.""" - if not self._sql_parser: - self._sql_parser = SqlBlockParser(self.config, self.manifest, self._config) - return self._sql_parser - @property - def macro_parser(self) -> SqlMacroParser: - """Lazy handle to the dbt SQL macro parser for the project.""" - if not self._macro_parser: - self._macro_parser = SqlMacroParser(self.config, self.manifest) - return self._macro_parser - - def compile_from_node(self, node: ManifestNode) -> DbtAdapterCompilationResult: - """Compile a node and wrap the result in a consistent interface.""" - compiled_node = SqlCompileRunner( - self._config, self.adapter, node=node, node_index=1, num_nodes=1 - ).compile(self.manifest) - return DbtAdapterCompilationResult( - raw_code=getattr(compiled_node, "raw_code"), - compiled_code=getattr(compiled_node, "compiled_code"), - node=compiled_node, - ) +@dataclass +class SchemaFileMigration: + """Describes a schema file migration operation.""" - @lru_cache(maxsize=100) - def compile_code(self, raw_code: str) -> DbtAdapterCompilationResult: - """Compile raw SQL and wrap the result in a consistent interface, leveraging lru cache.""" - tmp_id = str(uuid.uuid4()) - with self.generate_server_node(raw_code, tmp_id) as node: - return self.compile_from_node(node) - - @lru_cache(maxsize=100) - def unsafe_compile_code(self, raw_code: str, retry: int = 3) -> DbtAdapterCompilationResult: - """Compile raw SQL and wrap the result in a consistent interface, leveraging lru cache. Technically less thread-safe than compile_code but faster in a high throughput server scenario""" - tmp_id = str(uuid.uuid4()) - try: - node = self.unsafe_generate_server_node(raw_code, tmp_id) - return self.compile_from_node(node) - except Exception as e: - if retry > 0: - return self.compile_code(raw_code) - raise e - finally: - self._clear_node(tmp_id) + output: dict[str, t.Any] = field( + default_factory=lambda: {"version": 2, "models": [], "sources": []} + ) + supersede: dict[Path, list[str]] = field(default_factory=dict) -# TODO: we will collapse this from the file it is in currently +# FIXME: fold this in from the other file +@dataclass class ColumnLevelKnowledgePropagator: - """Stub for doc-propagation logic. For brevity, only the relevant part is included.""" + """Example usage for doc-propagation logic. placeholders is a tuple to avoid accidental mutation.""" + + placeholders: tuple[str, ...] = ( + EMPTY_STRING, + "Pending further documentation", + "Pending further documentation.", + "No description for this column", + "No description for this column.", + "Not documented", + "Not documented.", + "Undefined", + "Undefined.", + ) @staticmethod def get_node_columns_with_inherited_knowledge( - manifest: t.Any, + manifest: Manifest, node: ManifestNode, placeholders: list[str], - project_dir: str, use_unrendered_descriptions: bool, ) -> dict[str, dict[str, t.Any]]: - """ - Return known doc/metadata from related lineage. - In real usage, you would gather from multiple upstream nodes. - """ - # This is a stub. - # For now, returning an empty dict or minimal placeholders - _ = manifest, node, placeholders, project_dir, use_unrendered_descriptions + _ = manifest, node, placeholders, use_unrendered_descriptions return {} @staticmethod @@ -443,51 +177,40 @@ def update_undocumented_columns_with_prior_knowledge( node: ManifestNode, yaml_section: dict[str, t.Any], known_knowledge: dict[str, dict[str, t.Any]], - skip_add_tags: bool, - skip_merge_meta: bool, - add_progenitor_to_meta: bool, - add_inheritance_keys: list[str], ) -> int: - """ - Propagate docs from known_knowledge onto columns in node + yaml_section. - Return count of columns that changed. - """ - _ = skip_add_tags, skip_merge_meta, add_progenitor_to_meta, add_inheritance_keys - n = 0 + changed_count = 0 for col in columns_to_update: if col not in node.columns: continue cinfo = node.columns[col] old_desc = getattr(cinfo, "description", "") - # If we have prior knowledge, do something - # (for example, update cinfo.description if old_desc is blank). new_desc = old_desc if col in known_knowledge and not old_desc: new_desc = known_knowledge[col].get("description", "") if new_desc and new_desc != old_desc: setattr(cinfo, "description", new_desc) - # Mirror in yaml for c in yaml_section.get("columns", []): if c["name"].lower() == col.lower(): c["description"] = new_desc - n += 1 - return n + changed_count += 1 + return changed_count class MissingOsmosisConfig(Exception): + """Raised when an osmosis configuration is missing.""" + pass class InvalidOsmosisConfig(Exception): + """Raised when an osmosis configuration is invalid.""" + pass @dataclass -class DbtYamlManager(DbtProject): - """Automates tasks around schema yml files, organization, coverage, etc. - - Inherits from DbtProject to access manifest and adapter. - """ +class YamlRefactorSettings: + """Settings for yaml based refactoring operations.""" fqn: str | None = None models: list[str] = field(default_factory=list) @@ -504,802 +227,342 @@ class DbtYamlManager(DbtProject): add_inheritance_for_specified_keys: list[str] = field(default_factory=list) output_to_lower: bool = False - _mutex: threading.Lock = threading.Lock() - _pool: ThreadPoolExecutor = ThreadPoolExecutor(max_workers=(os.cpu_count() or 1) * 2) - _catalog: CatalogArtifact | None = field(default=None, init=False, repr=False) - _mutations: int = 0 + +@dataclass +class DbtProjectContext: + """A data object that includes references to: + + - The loaded dbt config + - The runtime adapter + - The manifest + - The sql/macro parsers + """ + + config: RuntimeConfig + adapter: Adapter + manifest: Manifest + sql_parser: SqlBlockParser + macro_parser: SqlMacroParser + adapter_mutex: threading.Lock = field(default_factory=threading.Lock) + manifest_mutex: threading.Lock = field(default_factory=threading.Lock) + adapter_created_at: float = time.time() + + +def discover_project_dir() -> str: + """Return the directory containing a dbt_project.yml if found, else the current dir.""" + cwd = Path.cwd() + for p in [cwd] + list(cwd.parents): + if (p / "dbt_project.yml").exists(): + return str(p.resolve()) + return str(cwd.resolve()) + + +def discover_profiles_dir() -> str: + """Return the directory containing a profiles.yml if found, else ~/.dbt.""" + if (Path.cwd() / "profiles.yml").exists(): + return str(Path.cwd().resolve()) + return str(Path.home() / ".dbt") + + +def instantiate_adapter(runtime_config: RuntimeConfig) -> t.Any: + """Instantiate a dbt adapter based on the runtime configuration.""" + adapter_cls = get_adapter_class_by_name(runtime_config.credentials.type) + if not adapter_cls: + raise RuntimeError( + f"No valid adapter class found for credentials type: {runtime_config.credentials.type}" + ) + + # NOTE: this exists to patch over an API change in dbt core at some point I don't remember + try: + adapter = adapter_cls(runtime_config) + except TypeError: + from dbt.mp_context import get_mp_context + + adapter = adapter_cls(runtime_config, get_mp_context()) # pyright: ignore[reportCallIssue] + + adapter.connections.set_connection_name() + return adapter + + +def create_dbt_project_context(config: DbtConfiguration) -> DbtProjectContext: + """Build a DbtProjectContext from a DbtConfiguration.""" + args = config_to_namespace(config) + dbt_flags.set_from_args(args, args) + runtime_cfg = RuntimeConfig.from_args(args) + + adapter = instantiate_adapter(runtime_cfg) + loader = ManifestLoader( + runtime_cfg, runtime_cfg.load_dependencies(), adapter.connections.set_query_header + ) + manifest = loader.load() + manifest.build_flat_graph() + + loader.save_macros_to_adapter(adapter) + + sql_parser = SqlBlockParser(runtime_cfg, manifest, runtime_cfg) + macro_parser = SqlMacroParser(runtime_cfg, manifest) + + return DbtProjectContext( + config=runtime_cfg, + adapter=adapter, + manifest=manifest, + sql_parser=sql_parser, + macro_parser=macro_parser, + adapter_mutex=threading.Lock(), + manifest_mutex=threading.Lock(), + adapter_created_at=time.time(), + ) + + +@dataclass +class YamlRefactorContext: + """A data object that includes references to: + + - The dbt project context + - The yaml refactor settings + - A thread pool executor + - A ruamel.yaml instance + - A tuple of placeholder strings + - The mutation count incremented during refactoring operations + """ + + project: DbtProjectContext + settings: YamlRefactorSettings + + pool: ThreadPoolExecutor = field(default_factory=ThreadPoolExecutor) + + yaml_handler: ruamel.yaml.YAML = field(default_factory=create_yaml_instance) placeholders: tuple[str, ...] = ( + EMPTY_STRING, "Pending further documentation", - "Pending further documentation.", "No description for this column", - "No description for this column.", "Not documented", - "Not documented.", "Undefined", - "Undefined.", - "", ) + mutation_count: int = 0 + + def register_mutations(self, count: int) -> None: + """Increment the mutation count by a specified amount.""" + self.mutation_count += count + def __post_init__(self) -> None: - super(DbtProject, self).__init__() # FIXME: this is not right + if EMPTY_STRING not in self.placeholders: + self.placeholders = (EMPTY_STRING, *self.placeholders) - # Re-parse to ensure our newly added attributes (like skip_add_columns) are recognized - if not list(self.filtered_models()): - logger().warning("No models found to process given fqn/models arguments") - logger().info("Check your filters or supply a valid model name/fqn.") - sys.exit(0) - @property - def yaml_handler(self) -> YamlHandler: - """Get a canonical YAML handler for dbt project files""" - if not hasattr(self, "_yaml_handler"): - self._yaml_handler = YamlHandler() # FIXME: do like DbtProject - return self._yaml_handler +def compile_sql_code(context: DbtProjectContext, raw_sql: str) -> ManifestSQLNode: + """Compile jinja SQL using the context's manifest and adapter.""" + tmp_id = str(uuid.uuid4()) + with context.manifest_mutex: + key = f"{NodeType.SqlOperation}.{context.config.project_name}.{tmp_id}" + _ = context.manifest.nodes.pop(key, None) - @property - def catalog(self) -> CatalogArtifact | None: - """Get the catalog artifact, loading from disk if needed.""" - if self._catalog: - return self._catalog - if not self.catalog_file: - return None - fp = Path(self.catalog_file) - if not fp.exists(): - return None - self._catalog = CatalogArtifact.from_dict(json.loads(fp.read_text())) - return self._catalog - - def _filter_model_by_fqn(self, node: ManifestNode) -> bool: - """Filter a model node by its fqn.""" - if not self.fqn: - return True - fqn_parts = self.fqn.split(".") - return len(node.fqn[1:]) >= len(fqn_parts) and all( - left == right for left, right in zip(fqn_parts, node.fqn[1:]) - ) + node = context.sql_parser.parse_remote(raw_sql, tmp_id) + if not has_jinja(raw_sql): + return node + process_node(context.config, context.manifest, node) + compiled_node = SqlCompileRunner( + context.config, + context.adapter, + node=node, + node_index=1, + num_nodes=1, + ).compile(context.manifest) + + _ = context.manifest.nodes.pop(key, None) + + return compiled_node + + +def execute_sql_code(context: DbtProjectContext, raw_sql: str) -> AdapterResponse: + """Execute jinja SQL using the context's manifest and adapter.""" + if has_jinja(raw_sql): + comp = compile_sql_code(context, raw_sql) + sql_to_exec = comp.compiled_code or comp.raw_code + else: + sql_to_exec = raw_sql - def _filter_model_by_models(self, node: ManifestNode) -> bool: - """Filter a model node by its name.""" - for m in self.models: - if node.name == m: + with context.adapter_mutex: + resp, _ = context.adapter.execute(sql_to_exec, auto_begin=False, fetch=True) + + return resp + + +def filter_models( + context: YamlRefactorContext, +) -> Iterator[tuple[str, ManifestNode | SourceDefinition]]: + """Iterate over the models in the dbt project manifest applying the filter settings.""" + + def f(node: ManifestNode | SourceDefinition) -> bool: + """Closure to filter models based on the context settings.""" + if node.resource_type not in (NodeType.Model, NodeType.Source): + return False + if node.package_name != context.project.config.project_name: + return False + if node.resource_type == NodeType.Model and node.config.materialized == "ephemeral": + return False + if context.settings.models: + if not _is_file_match(node, context.settings.models): + return False + elif context.settings.fqn: + if not _is_fqn_match(node, context.settings.fqn): + return False + return True + + items = chain(context.project.manifest.nodes.items(), context.project.manifest.sources.items()) + for uid, dbt_node in items: + if f(dbt_node): + yield uid, dbt_node + + +def _is_fqn_match(node: ManifestNode | SourceDefinition, fqn_str: str) -> bool: + """Filter models based on the provided fully qualified name matching on partial segments.""" + if not fqn_str: + return True + parts = fqn_str.split(".") + return len(node.fqn[1:]) >= len(parts) and all( + left == right for left, right in zip(parts, node.fqn[1:]) + ) + + +def _is_file_match(node: ManifestNode | SourceDefinition, paths: list[str]) -> bool: + """Check if a node's file path matches any of the provided file paths or names.""" + node_path = _get_node_path(node) + for model in paths: + if node.name == model: + return True + try_path = Path(model).resolve() + if try_path.is_dir(): + if node_path and try_path in node_path.parents: return True - node_path = self.get_node_path(node) - inp_path = Path(m).resolve() - if inp_path.is_dir(): - if node_path and inp_path in node_path.parents: - return True - elif inp_path.is_file(): - if node_path and inp_path == node_path: - return True - return False - - def _filter_model(self, node: ManifestNode) -> bool: - """Filter a model node by fqn or models depending on input.""" - if self.models: - filter_method = self._filter_model_by_models - elif self.fqn: - filter_method = self._filter_model_by_fqn - else: - # FIXME: make this more concise - def _filter_method(_): + elif try_path.is_file(): + if node_path and try_path == node_path: return True + return False - filter_method = _filter_method - return ( - node.resource_type in (NodeType.Model, NodeType.Source) - and node.package_name == self.project_name - and not ( - node.resource_type == NodeType.Model and node.config.materialized == "ephemeral" - ) - and filter_method(node) - ) +def _get_node_path(node: ManifestNode | SourceDefinition) -> Path | None: + """Return the path to the node's original file if available.""" + if node.original_file_path and hasattr(node, "root_path"): + return Path(getattr(node, "root_path"), node.original_file_path).resolve() + return None - def filtered_models( - self, subset: MutableMapping[str, ManifestNode] | None = None - ) -> Iterator[tuple[str, ManifestNode]]: - """Iterate over models in the manifest, applying filters.""" - items = ( - subset.items() - if subset - else chain(self.manifest.nodes.items(), self.manifest.sources.items()) - ) - for unique_id, dbt_node in items: - if self._filter_model(dbt_node): - yield unique_id, dbt_node - @staticmethod - def get_node_path(node: ManifestNode) -> Path | None: - """Get the resolved path for a node.""" - if node.original_file_path: - return Path(node.root_path, node.original_file_path).resolve() +def load_catalog(settings: YamlRefactorSettings) -> CatalogArtifact | None: + """Load the catalog file if it exists and return a CatalogArtifact instance.""" + if not settings.catalog_file: return None - - @staticmethod - def get_patch_path(node: ManifestNode) -> Path | None: - """Get the resolved path for a node's patch (YAML) file.""" - if node.patch_path: - return Path(node.patch_path.split("://")[-1]) + fp = Path(settings.catalog_file) + if not fp.exists(): return None - - def get_columns_meta( - self, catalog_key: CatalogKey, output_to_lower: bool = False - ) -> dict[str, ColumnMetadata]: - """ - Resolve columns metadata (type, comment, etc.) either from an external CatalogArtifact - or from a live introspection query with the adapter. - """ - columns = OrderedDict() - blacklist = self._config.vars.get("dbt-osmosis", {}).get("_blacklist", []) - # if catalog is loaded: - if self.catalog: - # Attempt to match node in catalog - cat_objs = {**self.catalog.nodes, **self.catalog.sources} - matched = [ - obj for key, obj in cat_objs.items() if key.split(".")[-1] == catalog_key.name - ] - if matched: - for col in matched[0].columns.values(): - if any(re.match(pat, col.name) for pat in blacklist): - continue - columns[ - column_casing(col.name, self._config.credentials.type, output_to_lower) - ] = ColumnMetadata( - name=column_casing( - col.name, self._config.credentials.type, output_to_lower - ), - type=col.type, - index=col.index, - comment=col.comment, - ) - return columns - - # fallback to adapter-based introspection - with self.adapter.connection_named("dbt-osmosis"): - table = self.adapter.get_relation( - catalog_key.database, catalog_key.schema, catalog_key.name - ) - if not table: - return columns - try: - for c in self.adapter.get_columns_in_relation(table): - if any(re.match(p, c.name) for p in blacklist): - continue - col_cased = column_casing( - c.name, self._config.credentials.type, output_to_lower - ) - columns[col_cased] = ColumnMetadata( - name=col_cased, - type=c.dtype - if not ( - c.is_numeric() - and self.numeric_precision - or c.is_string() - and self.char_length - ) - else c.data_type, - index=None, - comment=getattr(c, "comment", None), - ) - if hasattr(c, "flatten"): - for exp in c.flatten(): - if any(re.match(p, exp.name) for p in blacklist): - continue - col_exp_cased = column_casing( - exp.name, self._config.credentials.type, output_to_lower - ) - columns[col_exp_cased] = ColumnMetadata( - name=col_exp_cased, - type=exp.dtype - if not ( - exp.is_numeric() - and self.numeric_precision - or exp.is_string() - and self.char_length - ) - else exp.data_type, - index=None, - comment=getattr(exp, "comment", None), - ) - except Exception as e: - logger().info(f"Could not resolve columns for {catalog_key}: {e}") - return columns - - def get_catalog_key(self, node: ManifestNode) -> CatalogKey: - if node.resource_type == NodeType.Source: - return CatalogKey(node.database, node.schema, getattr(node, "identifier", node.name)) - return CatalogKey(node.database, node.schema, getattr(node, "alias", node.name)) - - def propagate_documentation_downstream( - self, force_inheritance: bool = False, output_to_lower: bool = False - ) -> None: - schema_map = self.build_schema_folder_mapping(output_to_lower) - futures = [] - with self.adapter.connection_named("dbt-osmosis"): - for unique_id, node in self.filtered_models(): - futures.append( - self._pool.submit( - self._run, unique_id, node, schema_map, force_inheritance, output_to_lower - ) + return CatalogArtifact.from_dict(json.loads(fp.read_text())) + + +def get_columns_meta( + context: YamlRefactorContext, + node: ManifestNode, + catalog: CatalogArtifact | None, +) -> dict[str, ColumnMetadata]: + """Get the column metadata for a node from the catalog or the adapter.""" + cased_cols = OrderedDict() + blacklist = context.project.config.vars.get("dbt-osmosis", {}).get("_blacklist", []) + + key = _catalog_key_for_node(node) + if catalog: + cat_objs = {**catalog.nodes, **catalog.sources} + matched = [v for k, v in cat_objs.items() if k.split(".")[-1] == key.name] + if matched: + for col in matched[0].columns.values(): + if any(re.match(p, col.name) for p in blacklist): + continue + cased = column_casing( + col.name, + context.project.config.credentials.type, + context.settings.output_to_lower, ) - wait(futures) - - def build_schema_folder_mapping(self, output_to_lower: bool) -> dict[str, SchemaFileLocation]: - """ - Build a mapping of model unique_id -> (target schema yml path, existing path) - """ - self.bootstrap_sources(output_to_lower) - out = {} - for uid, node in self.filtered_models(): - sc_path = self.get_schema_path(node) - target_sc_path = self.get_target_schema_path(node) - out[uid] = SchemaFileLocation( - target=target_sc_path.resolve(), - current=sc_path.resolve() if sc_path else None, - node_type=node.resource_type, - ) - return out - - def bootstrap_sources(self, output_to_lower: bool = False) -> None: - """ - Quick approach: if the user has declared sources in 'dbt-osmosis' vars, - create or augment the schema files for them. For brevity, direct approach only. - """ - performed_disk_mutation = False - spec_dict = self._config.vars.get("dbt-osmosis", {}) - blacklist = spec_dict.get("_blacklist", []) - - for source, spec in spec_dict.items(): - if source == "_blacklist": - continue - if isinstance(spec, str): - schema = source - database = self._config.credentials.database - path = spec - elif isinstance(spec, dict): - schema = spec.get("schema", source) - database = spec.get("database", self._config.credentials.database) - path = spec["path"] - else: - continue + cased_cols[cased] = ColumnMetadata( + name=cased, + type=col.type, + index=col.index, + comment=col.comment, + ) + return cased_cols - # Check if source in manifest - dbt_node = next( - (s for s in self.manifest.sources.values() if s.source_name == source), None - ) - if not dbt_node: - # create file with tables from introspection - sc_file = ( - Path(self._config.project_root) - / self._config.model_paths[0] - / path.lstrip(os.sep) + with context.project.adapter.connection_named("dbt-osmosis"): + rel = context.project.adapter.get_relation(key.database, key.schema, key.name) + if not rel: + return cased_cols + try: + col_objs = context.project.adapter.get_columns_in_relation(rel) + for col_ in col_objs: + if any(re.match(b, col_.name) for b in blacklist): + continue + cased = column_casing( + col_.name, + context.project.config.credentials.type, + context.settings.output_to_lower, ) - relations = self.adapter.list_relations(database=database, schema=schema) - tables_data = [] - for rel in relations: - cols = [] - for c in self.adapter.get_columns_in_relation(rel): - if any(re.match(p, c.name) for p in blacklist): + dtype = _maybe_use_precise_dtype(col_, context.settings) + cased_cols[cased] = ColumnMetadata( + name=cased, + type=dtype, + index=None, + comment=getattr(col_, "comment", None), + ) + if hasattr(col_, "flatten"): + for exp in col_.flatten(): + if any(re.match(b, exp.name) for b in blacklist): continue - col_cased = column_casing( - c.name, self._config.credentials.type, output_to_lower + cased2 = column_casing( + exp.name, + context.project.config.credentials.type, + context.settings.output_to_lower, ) - dt = c.dtype.lower() if output_to_lower else c.dtype - cols.append({"name": col_cased, "description": "", "data_type": dt}) - tables_data.append({"name": rel.identifier, "description": "", "columns": cols}) - - sc_file.parent.mkdir(parents=True, exist_ok=True) - with open(sc_file, "w") as f: - logger().info(f"Injecting source {source} => {sc_file}") - self.yaml_handler.dump( - { - "version": 2, - "sources": [ - { - "name": source, - "database": database, - "schema": schema, - "tables": tables_data, - } - ], - }, - f, - ) - self._mutations += 1 - performed_disk_mutation = True - - if performed_disk_mutation: - logger().info("Reloading project to pick up new sources.") - self.safe_parse_project(init=True) - - def get_schema_path(self, node: ManifestNode) -> Optional[Path]: - if node.resource_type == NodeType.Model and node.patch_path: - return Path(self._config.project_root).joinpath(node.patch_path.partition("://")[-1]) - if node.resource_type == NodeType.Source and hasattr(node, "source_name"): - return Path(self._config.project_root).joinpath(node.path) - return None - - def get_target_schema_path(self, node: ManifestNode) -> Path: - path_spec = self.get_osmosis_path_spec(node) - if not path_spec: - return Path(self._config.project_root, node.original_file_path) - sc = path_spec.format(node=node, model=node.name, parent=node.fqn[-2]) - parts = [] - if node.resource_type == NodeType.Source: - parts.append(self._config.model_paths[0]) - else: - parts.append(Path(node.original_file_path).parent) - if not (sc.endswith(".yml") or sc.endswith(".yaml")): - sc += ".yml" - parts.append(sc) - return Path(self._config.project_root, *parts) - - def get_osmosis_path_spec(self, node: ManifestNode) -> Optional[str]: - if node.resource_type == NodeType.Source: - source_specs = self._config.vars.get("dbt-osmosis", {}) - source_spec = source_specs.get(node.source_name) - if isinstance(source_spec, dict): - return source_spec.get("path") - return source_spec - osm_spec = node.unrendered_config.get("dbt-osmosis") - if not osm_spec: - raise MissingOsmosisConfig(f"Config not set for model {node.name}") - return osm_spec - - def get_columns(self, key: CatalogKey, to_lower: bool) -> list[str]: - return list(self.get_columns_meta(key, to_lower).keys()) - - def get_base_model(self, node: ManifestNode, to_lower: bool) -> dict[str, t.Any]: - cols = self.get_columns(self.get_catalog_key(node), to_lower) - return { - "name": node.name, - "columns": [{"name": c, "description": ""} for c in cols], - } - - def augment_existing_model( - self, doc: dict[str, t.Any], node: ManifestNode, to_lower: bool - ) -> dict[str, t.Any]: - existing_cols = [c["name"] for c in doc.get("columns", [])] - db_cols = self.get_columns(self.get_catalog_key(node), to_lower) - new_cols = [c for c in db_cols if not any(c.lower() == e.lower() for e in existing_cols)] - for col in new_cols: - doc.setdefault("columns", []).append({"name": col, "description": ""}) - logger().info(f"Injecting column {col} into {node.unique_id}") - return doc - - def draft_project_structure_update_plan( - self, output_to_lower: bool = False - ) -> dict[Path, SchemaFileMigration]: - blueprint = {} - logger().info("Building structure update plan.") - futs = [] - with self.adapter.connection_named("dbt-osmosis"): - for uid, sf_loc in self.build_schema_folder_mapping(output_to_lower).items(): - if not sf_loc.is_valid: - futs.append( - self._pool.submit(self._draft, sf_loc, uid, blueprint, output_to_lower) - ) - wait(futs) - return blueprint - - def _draft( - self, - sf_loc: SchemaFileLocation, - uid: str, - blueprint: dict[Path, SchemaFileMigration], - to_lower: bool, - ): - try: - with self._mutex: - if sf_loc.target not in blueprint: - blueprint[sf_loc.target] = SchemaFileMigration() - if sf_loc.node_type == NodeType.Model: - node = self.manifest.nodes[uid] - else: - node = self.manifest.sources[uid] - - if sf_loc.current is None: - # model not documented yet - with self._mutex: - if sf_loc.node_type == NodeType.Model: - blueprint[sf_loc.target].output["models"].append( - self.get_base_model(node, to_lower) + dtype2 = _maybe_use_precise_dtype(exp, context.settings) + cased_cols[cased2] = ColumnMetadata( + name=cased2, + type=dtype2, + index=None, + comment=getattr(exp, "comment", None), ) - else: - # We have existing doc, but we want to unify it into the new location - with self._mutex: - doc = self.yaml_handler.load(sf_loc.current) - if sf_loc.node_type == NodeType.Model: - for m in doc.get("models", []): - if m["name"] == node.name: - newm = self.augment_existing_model(m, node, to_lower) - with self._mutex: - blueprint[sf_loc.target].output["models"].append(newm) - blueprint[sf_loc.target].supersede.setdefault( - sf_loc.current, [] - ).append(node) - break - else: - for source in doc.get("sources", []): - if source["name"] == node.source_name: - for table in source["tables"]: - if table["name"] == node.name: - newt = self.augment_existing_model(table, node, to_lower) - with self._mutex: - if not any( - s["name"] == node.source_name - for s in blueprint[sf_loc.target].output["sources"] - ): - blueprint[sf_loc.target].output["sources"].append( - source - ) - for s in blueprint[sf_loc.target].output["sources"]: - if s["name"] == node.source_name: - for t2 in s["tables"]: - if t2["name"] == node.name: - t2.update(newt) - break - blueprint[sf_loc.target].supersede.setdefault( - sf_loc.current, [] - ).append(node) - break - except Exception as e: - logger().error(f"Drafting structure plan for {uid} failed: {e}") - raise e - - def cleanup_blueprint( - self, blueprint: dict[Path, SchemaFileMigration] - ) -> dict[Path, SchemaFileMigration]: - for k in list(blueprint.keys()): - out = blueprint[k].output - # remove empty models/sources - if "models" in out and not out["models"]: - del out["models"] - if "sources" in out and not out["sources"]: - del out["sources"] - if not out.get("models") and not out.get("sources"): - del blueprint[k] - return blueprint - - def commit_project_restructure_to_disk( - self, - blueprint: Optional[dict[Path, SchemaFileMigration]] = None, - output_to_lower: bool = False, - ) -> bool: - if not blueprint: - blueprint = self.draft_project_structure_update_plan(output_to_lower) - blueprint = self.cleanup_blueprint(blueprint) - if not blueprint: - logger().info("Project structure is already conformed.") - return False - self.pretty_print_restructure_plan(blueprint) - - for target, struct in blueprint.items(): - if not target.exists(): - logger().info(f"Creating schema file {target}") - if not self.dry_run: - target.parent.mkdir(parents=True, exist_ok=True) - target.touch() - self.yaml_handler.dump(struct.output, target) - self._mutations += 1 - else: - logger().info(f"Updating schema file {target}") - existing = self.yaml_handler.load(target) - if not existing: - existing = {"version": 2} - if "version" not in existing: - existing["version"] = 2 - - if "models" in struct.output: - existing.setdefault("models", []).extend(struct.output["models"]) - if "sources" in struct.output: - existing.setdefault("sources", []).extend(struct.output["sources"]) - if not self.dry_run: - self.yaml_handler.dump(existing, target) - self._mutations += 1 - - # handle superseded - for sup_path, nodes in struct.supersede.items(): - raw_sc = self.yaml_handler.load(sup_path) - # figure out which ones to remove - to_remove_models = {n.name for n in nodes if n.resource_type == NodeType.Model} - to_remove_sources = { - (n.source_name, n.name) for n in nodes if n.resource_type == NodeType.Source - } - - keep_models = [] - for m in raw_sc.get("models", []): - if m["name"] not in to_remove_models: - keep_models.append(m) - raw_sc["models"] = keep_models - - # remove relevant source tables - keep_src = [] - for s in raw_sc.get("sources", []): - keep_tables = [] - for t_ in s.get("tables", []): - if (s["name"], t_["name"]) not in to_remove_sources: - keep_tables.append(t_) - if keep_tables: - s["tables"] = keep_tables - keep_src.append(s) - raw_sc["sources"] = keep_src - - # if file is empty => remove it - if (not raw_sc.get("models")) and (not raw_sc.get("sources")): - logger().info(f"Superseding entire file {sup_path}") - if not self.dry_run: - sup_path.unlink(missing_ok=True) - if sup_path.parent.exists() and not any(sup_path.parent.iterdir()): - sup_path.parent.rmdir() - else: - if not self.dry_run: - self.yaml_handler.dump(raw_sc, sup_path) - self._mutations += 1 - logger().info(f"Migrated doc from {sup_path} -> {target}") - return True + except Exception as exc: + logger().warning(f"Could not introspect columns for {key}: {exc}") - @staticmethod - def pretty_print_restructure_plan(blueprint: dict[Path, SchemaFileMigration]) -> None: - summary = [] - for plan in blueprint.keys(): - files_superseded = [s.name for s in blueprint[plan].supersede] or ["CREATE"] - summary.append((files_superseded, "->", plan.name)) - logger().info(summary) - - ############################################################################ - # Column Sync - ############################################################################ - @staticmethod - def get_column_sets( - database_cols: Iterable[str], - yaml_cols: Iterable[str], - documented_cols: Iterable[str], - ) -> t.tuple[list[str], list[str], list[str]]: - """ - Return: (missing_in_yaml, undocumented_in_yaml, extra_in_yaml) - """ - missing = [x for x in database_cols if x.lower() not in (y.lower() for y in yaml_cols)] - undocumented = [ - x for x in database_cols if x.lower() not in (y.lower() for y in documented_cols) - ] - extra = [x for x in yaml_cols if x.lower() not in (y.lower() for y in database_cols)] - return missing, undocumented, extra - - def _run( - self, - uid: str, - node: ManifestNode, - schema_map: dict[str, SchemaFileLocation], - force_inheritance: bool, - output_to_lower: bool, + return cased_cols + + +def _maybe_use_precise_dtype(col: t.Any, settings: YamlRefactorSettings) -> str: + if (col.is_numeric() and settings.numeric_precision) or ( + col.is_string() and settings.char_length ): - try: - with self._mutex: - logger().info(f"Processing model: {uid}") - sf_loc = schema_map.get(uid) - if not sf_loc or not sf_loc.current: - with self._mutex: - logger().info(f"No schema file for {uid}, skipping.") - return - db_cols_list = self.get_columns(self.get_catalog_key(node), output_to_lower) - if not db_cols_list: - with self._mutex: - logger().info( - f"No database columns found for {uid}, falling back to yaml columns." - ) - db_cols_list = list(node.columns.keys()) - - db_cols_set = set(db_cols_list) - yaml_cols_list = list(node.columns.keys()) - documented_cols_set = { - c - for c, info in node.columns.items() - if info.description and info.description not in self.placeholders - } - - missing, undocumented, extra = self.get_column_sets( - db_cols_list, yaml_cols_list, documented_cols_set - ) - - if force_inheritance: - undocumented = list(db_cols_set) # treat all as needing doc - - with self._mutex: - sc_data = self.yaml_handler.load(sf_loc.current) - section = self.maybe_get_section_from_schema_file(sc_data, node) - if not section: - logger().info(f"No section in {sf_loc.current} for {uid}") - return - # Perform updates - n_added = n_doc_inh = n_removed = n_type_changed = n_desc_changed = 0 - if any([missing, undocumented, extra]): - ( - n_added, - n_doc_inh, - n_removed, - n_type_changed, - n_desc_changed, - ) = self.update_schema_file_and_node( - missing, - undocumented, - extra, - node, - section, - self.get_columns_meta(self.get_catalog_key(node), output_to_lower), - output_to_lower, - ) - - reorder = tuple(db_cols_list) != tuple(yaml_cols_list) - if reorder: - - def _sort(c: dict[str, t.Any]) -> int: - try: - return db_cols_list.index( - column_casing( - c["name"], self._config.credentials.type, output_to_lower - ) - ) - except ValueError: - return 999999 - - section["columns"].sort(key=_sort) - - if ( - n_added + n_doc_inh + n_removed + n_type_changed + n_desc_changed or reorder - ) and not self.dry_run: - self.yaml_handler.dump(sc_data, sf_loc.current) - self._mutations += 1 - logger().info(f"Updated {sf_loc.current}") - else: - logger().info(f"{sf_loc.current} is up to date") - - except Exception as e: - logger().error(f"Error while processing {uid}: {e}") - raise e + return col.data_type + return col.dtype - @staticmethod - def maybe_get_section_from_schema_file( - yaml_data: dict[str, t.Any], node: ManifestNode - ) -> Optional[dict[str, t.Any]]: - if node.resource_type == NodeType.Source: - for s in yaml_data.get("sources", []): - for t_ in s.get("tables", []): - if s["name"] == node.source_name and t_["name"] == node.name: - return t_ - else: - for m in yaml_data.get("models", []): - if m["name"] == node.name: - return m - return None - @staticmethod - def remove_columns_not_in_database( - extra_columns: Iterable[str], - node: ManifestNode, - yaml_section: dict[str, t.Any], - ) -> int: - c = 0 - for e in extra_columns: - node.columns.pop(e, None) - yaml_section["columns"] = [col for col in yaml_section["columns"] if col["name"] != e] - c += 1 - return c - - def update_columns_attribute( - self, - node: ManifestNode, - yaml_section: dict[str, t.Any], - db_meta: dict[str, ColumnMetadata], - attr: str, - meta_key: str, - skip_flag: bool, - output_to_lower: bool, - ) -> int: - if skip_flag: - return 0 - changed = 0 - for col_name, col_meta in db_meta.items(): - if col_name in node.columns: - new_val = getattr(col_meta, meta_key, "") or "" - old_val = getattr(node.columns[col_name], attr, "") - if new_val and old_val != new_val: - setattr(node.columns[col_name], attr, new_val) - for c in yaml_section["columns"]: - if ( - column_casing(c["name"], self._config.credentials.type, output_to_lower) - == col_name - ): - if output_to_lower and isinstance(new_val, str): - new_val = new_val.lower() - c[attr] = new_val - changed += 1 - return changed - - def add_missing_cols_to_node_and_model( - self, - missing_cols: Iterable[str], - node: ManifestNode, - yaml_section: dict[str, t.Any], - db_meta: dict[str, ColumnMetadata], - output_to_lower: bool, - ) -> int: - c = 0 - for col in missing_cols: - if col not in db_meta: - continue - dtype = db_meta[col].type or "" - desc = db_meta[col].comment or "" - meta_name = col.lower() if output_to_lower else col - meta_type = dtype.lower() if output_to_lower else dtype - node.columns[col] = ColumnInfo.from_dict( - {"name": meta_name, "description": desc, "data_type": meta_type} - ) - yaml_section.setdefault("columns", []).append( - {"name": meta_name, "description": desc, "data_type": meta_type} - ) - c += 1 - return c - - def update_schema_file_and_node( - self, - missing_cols: Iterable[str], - undocumented_cols: Iterable[str], - extra_cols: Iterable[str], - node: ManifestNode, - yaml_section: dict[str, t.Any], - db_meta: dict[str, ColumnMetadata], - output_to_lower: bool, - ) -> t.tuple[int, int, int, int, int]: - n_added = 0 - n_doc_inherited = 0 - n_removed = 0 - n_type_updated = 0 - n_desc_updated = 0 - - if not self.skip_add_columns: - n_added = self.add_missing_cols_to_node_and_model( - missing_cols, node, yaml_section, db_meta, output_to_lower - ) - - knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( - self.manifest, - node, - self.placeholders, - self._config.project_root, - self.use_unrendered_descriptions, - ) - n_doc_inherited = ( - ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( - undocumented_cols, - node, - yaml_section, - knowledge, - self.skip_add_tags, - self.skip_merge_meta, - self.add_progenitor_to_meta, - self.add_inheritance_for_specified_keys, - ) - ) - n_type_updated = self.update_columns_attribute( - node, - yaml_section, - db_meta, - attr="data_type", - meta_key="type", - skip_flag=self.skip_add_data_types, - output_to_lower=output_to_lower, - ) - # We piggyback the "catalog_file" presence as "update description?" flag in original code - n_desc_updated = self.update_columns_attribute( - node, - yaml_section, - db_meta, - attr="description", - meta_key="comment", - skip_flag=(self.catalog_file is None), - output_to_lower=output_to_lower, - ) - n_removed = self.remove_columns_not_in_database(extra_cols, node, yaml_section) - return n_added, n_doc_inherited, n_removed, n_type_updated, n_desc_updated +def _catalog_key_for_node(node: ManifestNode) -> CatalogKey: + if node.resource_type == NodeType.Source: + return CatalogKey(node.database, node.schema, getattr(node, "identifier", node.name)) + return CatalogKey(node.database, node.schema, getattr(node, "alias", node.name)) + + +def build_dbt_project_context(cfg: DbtConfiguration) -> DbtProjectContext: + if not cfg.project_dir: + cfg.project_dir = discover_project_dir() + if not cfg.profiles_dir: + cfg.profiles_dir = discover_profiles_dir() + return create_dbt_project_context(cfg) + + +def run_example_compilation_flow() -> None: + cfg = DbtConfiguration( + project_dir="", profiles_dir="", target="some_target", threads=2, _vars={"foo": "bar"} + ) + proj_ctx = build_dbt_project_context(cfg) + + cr = compile_sql_code(proj_ctx, "select '{{ 1+1 }}' as col") + print("Compiled =>", cr.compiled_code) + + ex = execute_sql_code(proj_ctx, "select '{{ 1+2 }}' as col") + print("Rows =>", ex.table) diff --git a/src/dbt_osmosis/core/osmosis_o.py b/src/dbt_osmosis/core/osmosis_o.py new file mode 100644 index 00000000..89f00df8 --- /dev/null +++ b/src/dbt_osmosis/core/osmosis_o.py @@ -0,0 +1,1305 @@ +# pyright: reportUnknownVariableType=false, reportPrivateImportUsage=false, reportAny=false, reportUnknownMemberType=false +import json +import logging +import os +import re +import sys +import threading +import time +import typing as t +import uuid +from argparse import Namespace +from collections import OrderedDict, UserDict +from collections.abc import Iterable, Iterator, MutableMapping +from concurrent.futures import ThreadPoolExecutor, wait +from contextlib import contextmanager +from copy import copy +from dataclasses import dataclass, field +from functools import lru_cache +from itertools import chain +from pathlib import Path + +import ruamel.yaml +from dbt.adapters.factory import get_adapter_class_by_name +from dbt.config.runtime import RuntimeConfig +from dbt.contracts.graph.manifest import Manifest +from dbt.contracts.graph.nodes import ColumnInfo, ManifestNode +from dbt.contracts.results import CatalogArtifact, CatalogKey, CatalogTable, ColumnMetadata +from dbt.flags import set_from_args +from dbt.node_types import NodeType +from dbt.parser.manifest import ManifestLoader, process_node +from dbt.parser.sql import SqlBlockParser, SqlMacroParser +from dbt.task.sql import SqlCompileRunner +from dbt.tracking import disable_tracking + +# Disabling dbt tracking for non-standard usage +disable_tracking() + + +def logger() -> logging.Logger: + """Get the log handle for dbt-osmosis""" + return logging.getLogger("dbt-osmosis") + + +def has_jinja(code: str) -> bool: + """Check if code contains Jinja tokens""" + return any(token in code for token in ("{{", "}}", "{%", "%}", "{#", "#}")) + + +def column_casing(column: str, credentials_type: str, to_lower: bool) -> str: + """Utility to handle column name casing based on dbt adapter & user flag.""" + # If quoted in snowflake, pass verbatim + if credentials_type == "snowflake" and column.startswith('"') and column.endswith('"'): + return column + # Otherwise apply user-specified transformations + if to_lower: + return column.lower() + if credentials_type == "snowflake": + return column.upper() + return column + + +class YamlHandler(ruamel.yaml.YAML): + """A ruamel.yaml wrapper to handle dbt YAML files with sane defaults.""" + + def __init__(self, **kwargs: t.Any) -> None: + super().__init__(**kwargs) + self.indent(mapping=2, sequence=4, offset=2) + self.width: int = 800 + self.preserve_quotes: bool = True + self.default_flow_style: bool = False + self.encoding: str = os.getenv("DBT_OSMOSIS_ENCODING", "utf-8") + + +@dataclass +class SchemaFileLocation: + """Dataclass to store schema file location details.""" + + target: Path + current: Path | None = None + node_type: NodeType = NodeType.Model + + @property + def is_valid(self) -> bool: + return self.current == self.target + + +@dataclass +class SchemaFileMigration: + """Dataclass to store schema file migration details.""" + + output: dict[str, t.Any] = field( + default_factory=lambda: {"version": 2, "models": [], "sources": []} + ) + supersede: dict[Path, list[str]] = field(default_factory=dict) + + +@dataclass +class DbtConfiguration: + """Stores dbt project configuration in a namespace""" + + project_dir: str + profiles_dir: str + threads: int = 1 + single_threaded: bool = True + which: str = "" + target: str | None = None + profile: str | None = None + + DEBUG: bool = False + + _vars: str | dict[str, t.Any] = field(default_factory=dict) + + def __post_init__(self) -> None: + if self.threads != 1: + self.single_threaded = False + + @property + def vars(self) -> str: + if isinstance(self._vars, dict): + return json.dumps(self._vars) + return self._vars + + @vars.setter + def vars(self, v: t.Any) -> None: + if not isinstance(v, (str, dict)): + raise ValueError("vars must be a string or dict") + self._vars = v + + +class DbtManifestProxy(UserDict[str, t.Any]): + """Proxy for the manifest's flat_graph, read-only by design.""" + + def _readonly(self, *args: t.Any, **kwargs: t.Any) -> t.Never: + _ = args, kwargs + raise RuntimeError("Cannot modify DbtManifestProxy") + + __setitem__: t.Callable[..., None] = _readonly + __delitem__: t.Callable[..., None] = _readonly + pop: t.Callable[..., None] = _readonly + popitem: t.Callable[..., t.Any] = _readonly + clear: t.Callable[..., None] = _readonly + update: t.Callable[..., None] = _readonly + setdefault: t.Callable[..., None] = _readonly + + +@dataclass +class DbtAdapterExecutionResult: + adapter_response: t.Any + table: t.Any + raw_code: str + compiled_code: str + + +@dataclass +class DbtAdapterCompilationResult: + raw_code: str + compiled_code: str + node: ManifestNode + injected_code: str | None = None + + +def find_default_project_dir() -> str: + cwd = Path.cwd() + # Walk up if needed + for p in [cwd] + list(cwd.parents): + if (p / "dbt_project.yml").exists(): + return str(p.resolve()) + return str(cwd.resolve()) + + +def find_default_profiles_dir() -> str: + # Common fallback for DBT_PROFILES_DIR + if (Path.cwd() / "profiles.yml").exists(): + return str(Path.cwd().resolve()) + return str(Path.home() / ".dbt") + + +class DbtProject: + """Wraps dbt's in-memory project & adapter, enabling queries, compilation, etc.""" + + ADAPTER_TTL: float = 3600.0 + + def __init__( + self, + target: str | None = None, + profiles_dir: str | None = None, + project_dir: str | None = None, + threads: int = 1, + vars: str | dict[str, t.Any] | None = None, + profile: str | None = None, + ): + if not profiles_dir: + profiles_dir = find_default_profiles_dir() + if not project_dir: + project_dir = find_default_project_dir() + + self.base_config: DbtConfiguration = DbtConfiguration( + project_dir=project_dir, + profiles_dir=profiles_dir, + target=target, + threads=threads, + profile=profile, + ) + if vars: + self.base_config.vars = vars + + self.adapter_mutex: threading.Lock = threading.Lock() + self.parsing_mutex: threading.Lock = threading.Lock() + self.manifest_mutation_mutex: threading.Lock = threading.Lock() + + self._config: RuntimeConfig | None = None + self._manifest: Manifest | None = None + self.parse_project(init=True) + + self._sql_parser: SqlBlockParser | None = None + self._macro_parser: SqlMacroParser | None = None + self._adapter_created_at: float = 0.0 + + @property + def config(self) -> RuntimeConfig: + """Get the dbt project configuration.""" + if self._config is None: + raise RuntimeError("DbtProject not initialized. parse_project() must be called first.") + return self._config + + @property + def manifest(self) -> Manifest: + """Get the dbt project manifest.""" + if self._manifest is None: + raise RuntimeError("DbtProject not initialized. parse_project() must be called first.") + return self._manifest + + def parse_project(self, init: bool = False) -> None: + """Parse the dbt project configuration and manifest.""" + with self.parsing_mutex: + if init: + ns = Namespace( + **self.base_config.__dict__ + ) # TODO: replace with method call to handle _vars prop + set_from_args(ns, ns) + self._config = RuntimeConfig.from_args(ns) + self.initialize_adapter() + loader = ManifestLoader( + self.config, + self.config.load_dependencies(), + self.adapter.connections.set_query_header, + ) + self._manifest = loader.load() + self._manifest.build_flat_graph() + loader.save_macros_to_adapter(self.adapter) + self._sql_parser = None + self._macro_parser = None + + def safe_parse_project(self, init: bool = False) -> None: + """Safely re-parse the dbt project configuration and manifest preserving internal state on error.""" + old_config = copy(getattr(self, "config", None)) + try: + self.parse_project(init=init) + except Exception as exc: + if old_config: + self._config = old_config + raise exc + # Write manifest to disk here + self.write_manifest_artifact() + + def initialize_adapter(self) -> None: + """Initialize the dbt adapter.""" + if hasattr(self, "_adapter"): + try: + self.adapter.connections.cleanup_all() + except Exception: + pass + try: + adapter_cls = get_adapter_class_by_name( + self.base_config.target or self.base_config.profile or "" + ) + except Exception: + # fallback if none found (dbt should raise if invalid type) + raise RuntimeError("Could not find an adapter class by name.") + if not adapter_cls: + raise RuntimeError("No valid adapter class found.") + + # NOTE: this smooths over an API change upstream + try: + self.adapter = adapter_cls(self.config) + except TypeError: + from dbt.mp_context import get_mp_context + + self.adapter = adapter_cls(self.config, get_mp_context()) # pyright: ignore[reportCallIssue] + + self.adapter.connections.set_connection_name() + self._adapter_created_at = time.time() + setattr(self.config, "adapter", self.adapter) + + @property + def adapter(self) -> t.Any: + """Get the dbt adapter. Automatically refreshes if TTL exceeded.""" + if (time.time() - getattr(self, "_adapter_created_at", 0)) > self.ADAPTER_TTL: + self.initialize_adapter() + return self._adapter # FIXME: add to init + + @adapter.setter + def adapter(self, v: t.Any) -> None: + """Set the dbt adapter. Thread-safe.""" + if self.adapter_mutex.acquire(blocking=False): + try: + setattr(self, "_adapter", v) + v.debug_query() # Verify connection + self._adapter_created_at = time.time() + setattr(self.config, "adapter", v) + finally: + self.adapter_mutex.release() + + @property + def manifest_dict(self) -> DbtManifestProxy: + """Get a read-only proxy for the manifest's flat_graph.""" + return DbtManifestProxy(self.manifest.flat_graph) + + def write_manifest_artifact(self) -> None: + """Convenience method to write the manifest to disk.""" + artifact_path = Path(self.config.project_root) / self.config.target_path / "manifest.json" + self.manifest.write(str(artifact_path)) + + def clear_internal_caches(self) -> None: + """Clear internal lru caches for the project instance.""" + self.compile_code.cache_clear() + self.unsafe_compile_code.cache_clear() + + def get_relation(self, database: str, schema: str, name: str) -> t.Any: + """Get a relation from the adapter.""" + return self.adapter.get_relation(database, schema, name) + + def adapter_execute( + self, sql: str, auto_begin: bool = False, fetch: bool = False + ) -> tuple[t.Any, t.Any]: + """Convenience method to execute a query via the adapter.""" + return self.adapter.execute(sql, auto_begin, fetch) + + def execute_code(self, raw_code: str) -> DbtAdapterExecutionResult: + """Execute SQL, compiling jinja if necessary and wrapping result in a consistent interface.""" + compiled = raw_code + if has_jinja(raw_code): + compiled = self.compile_code(raw_code).compiled_code + resp, table = self.adapter_execute(compiled, fetch=True) + return DbtAdapterExecutionResult(resp, table, raw_code, compiled) + + @contextmanager + def generate_server_node(self, sql: str, node_name: str = "anonymous_node"): + """Generate a server node, process it, and clear it after use. Mutates manifest during context.""" + with self.manifest_mutation_mutex: + self._clear_node(node_name) + sql_node = self.sql_parser.parse_remote(sql, node_name) + process_node(self.config, self.manifest, sql_node) + yield sql_node + self._clear_node(node_name) + + def unsafe_generate_server_node( + self, sql: str, node_name: str = "anonymous_node" + ) -> ManifestNode: + """Generate a server node without context, mutating manifest.""" + self._clear_node(node_name) + sql_node = self.sql_parser.parse_remote(sql, node_name) + process_node(self.config, self.manifest, sql_node) + return sql_node + + def _clear_node(self, name: str) -> None: + """Clear a node from the manifest.""" + _ = self.manifest.nodes.pop( + f"{NodeType.SqlOperation}.{self.config.project_name}.{name}", None + ) + + @property + def sql_parser(self) -> SqlBlockParser: + """Lazy handle to the dbt SQL parser for the project.""" + if not self._sql_parser: + self._sql_parser = SqlBlockParser(self.config, self.manifest, self._config) + return self._sql_parser + + @property + def macro_parser(self) -> SqlMacroParser: + """Lazy handle to the dbt SQL macro parser for the project.""" + if not self._macro_parser: + self._macro_parser = SqlMacroParser(self.config, self.manifest) + return self._macro_parser + + def compile_from_node(self, node: ManifestNode) -> DbtAdapterCompilationResult: + """Compile a node and wrap the result in a consistent interface.""" + compiled_node = SqlCompileRunner( + self._config, self.adapter, node=node, node_index=1, num_nodes=1 + ).compile(self.manifest) + return DbtAdapterCompilationResult( + raw_code=getattr(compiled_node, "raw_code"), + compiled_code=getattr(compiled_node, "compiled_code"), + node=compiled_node, + ) + + @lru_cache(maxsize=100) + def compile_code(self, raw_code: str) -> DbtAdapterCompilationResult: + """Compile raw SQL and wrap the result in a consistent interface, leveraging lru cache.""" + tmp_id = str(uuid.uuid4()) + with self.generate_server_node(raw_code, tmp_id) as node: + return self.compile_from_node(node) + + @lru_cache(maxsize=100) + def unsafe_compile_code(self, raw_code: str, retry: int = 3) -> DbtAdapterCompilationResult: + """Compile raw SQL and wrap the result in a consistent interface, leveraging lru cache. Technically less thread-safe than compile_code but faster in a high throughput server scenario""" + tmp_id = str(uuid.uuid4()) + try: + node = self.unsafe_generate_server_node(raw_code, tmp_id) + return self.compile_from_node(node) + except Exception as e: + if retry > 0: + return self.compile_code(raw_code) + raise e + finally: + self._clear_node(tmp_id) + + +# TODO: we will collapse this from the file it is in currently +class ColumnLevelKnowledgePropagator: + """Stub for doc-propagation logic. For brevity, only the relevant part is included.""" + + @staticmethod + def get_node_columns_with_inherited_knowledge( + manifest: t.Any, + node: ManifestNode, + placeholders: list[str], + project_dir: str, + use_unrendered_descriptions: bool, + ) -> dict[str, dict[str, t.Any]]: + """ + Return known doc/metadata from related lineage. + In real usage, you would gather from multiple upstream nodes. + """ + # This is a stub. + # For now, returning an empty dict or minimal placeholders + _ = manifest, node, placeholders, project_dir, use_unrendered_descriptions + return {} + + @staticmethod + def update_undocumented_columns_with_prior_knowledge( + columns_to_update: Iterable[str], + node: ManifestNode, + yaml_section: dict[str, t.Any], + known_knowledge: dict[str, dict[str, t.Any]], + skip_add_tags: bool, + skip_merge_meta: bool, + add_progenitor_to_meta: bool, + add_inheritance_keys: list[str], + ) -> int: + """ + Propagate docs from known_knowledge onto columns in node + yaml_section. + Return count of columns that changed. + """ + _ = skip_add_tags, skip_merge_meta, add_progenitor_to_meta, add_inheritance_keys + n = 0 + for col in columns_to_update: + if col not in node.columns: + continue + cinfo = node.columns[col] + old_desc = getattr(cinfo, "description", "") + # If we have prior knowledge, do something + # (for example, update cinfo.description if old_desc is blank). + new_desc = old_desc + if col in known_knowledge and not old_desc: + new_desc = known_knowledge[col].get("description", "") + if new_desc and new_desc != old_desc: + setattr(cinfo, "description", new_desc) + # Mirror in yaml + for c in yaml_section.get("columns", []): + if c["name"].lower() == col.lower(): + c["description"] = new_desc + n += 1 + return n + + +class MissingOsmosisConfig(Exception): + pass + + +class InvalidOsmosisConfig(Exception): + pass + + +@dataclass +class DbtYamlManager(DbtProject): + """Automates tasks around schema yml files, organization, coverage, etc. + + Inherits from DbtProject to access manifest and adapter. + """ + + fqn: str | None = None + models: list[str] = field(default_factory=list) + dry_run: bool = False + catalog_file: str | None = None + skip_add_columns: bool = False + skip_add_tags: bool = False + skip_add_data_types: bool = False + numeric_precision: bool = False + char_length: bool = False + skip_merge_meta: bool = False + add_progenitor_to_meta: bool = False + use_unrendered_descriptions: bool = False + add_inheritance_for_specified_keys: list[str] = field(default_factory=list) + output_to_lower: bool = False + + _mutex: threading.Lock = threading.Lock() + _pool: ThreadPoolExecutor = ThreadPoolExecutor(max_workers=(os.cpu_count() or 1) * 2) + _catalog: CatalogArtifact | None = field(default=None, init=False, repr=False) + _mutations: int = 0 + + placeholders: tuple[str, ...] = ( + "Pending further documentation", + "Pending further documentation.", + "No description for this column", + "No description for this column.", + "Not documented", + "Not documented.", + "Undefined", + "Undefined.", + "", + ) + + def __post_init__(self) -> None: + super(DbtProject, self).__init__() # FIXME: this is not right + + # Re-parse to ensure our newly added attributes (like skip_add_columns) are recognized + if not list(self.filtered_models()): + logger().warning("No models found to process given fqn/models arguments") + logger().info("Check your filters or supply a valid model name/fqn.") + sys.exit(0) + + @property + def yaml_handler(self) -> YamlHandler: + """Get a canonical YAML handler for dbt project files""" + if not hasattr(self, "_yaml_handler"): + self._yaml_handler = YamlHandler() # FIXME: do like DbtProject + return self._yaml_handler + + @property + def catalog(self) -> CatalogArtifact | None: + """Get the catalog artifact, loading from disk if needed.""" + if self._catalog: + return self._catalog + if not self.catalog_file: + return None + fp = Path(self.catalog_file) + if not fp.exists(): + return None + self._catalog = CatalogArtifact.from_dict(json.loads(fp.read_text())) + return self._catalog + + def _filter_model_by_fqn(self, node: ManifestNode) -> bool: + """Filter a model node by its fqn.""" + if not self.fqn: + return True + fqn_parts = self.fqn.split(".") + return len(node.fqn[1:]) >= len(fqn_parts) and all( + left == right for left, right in zip(fqn_parts, node.fqn[1:]) + ) + + def _filter_model_by_models(self, node: ManifestNode) -> bool: + """Filter a model node by its name.""" + for m in self.models: + if node.name == m: + return True + node_path = self.get_node_path(node) + inp_path = Path(m).resolve() + if inp_path.is_dir(): + if node_path and inp_path in node_path.parents: + return True + elif inp_path.is_file(): + if node_path and inp_path == node_path: + return True + return False + + def _filter_model(self, node: ManifestNode) -> bool: + """Filter a model node by fqn or models depending on input.""" + if self.models: + filter_method = self._filter_model_by_models + elif self.fqn: + filter_method = self._filter_model_by_fqn + else: + # FIXME: make this more concise + def _filter_method(_): + return True + + filter_method = _filter_method + + return ( + node.resource_type in (NodeType.Model, NodeType.Source) + and node.package_name == self.project_name + and not ( + node.resource_type == NodeType.Model and node.config.materialized == "ephemeral" + ) + and filter_method(node) + ) + + def filtered_models( + self, subset: MutableMapping[str, ManifestNode] | None = None + ) -> Iterator[tuple[str, ManifestNode]]: + """Iterate over models in the manifest, applying filters.""" + items = ( + subset.items() + if subset + else chain(self.manifest.nodes.items(), self.manifest.sources.items()) + ) + for unique_id, dbt_node in items: + if self._filter_model(dbt_node): + yield unique_id, dbt_node + + @staticmethod + def get_node_path(node: ManifestNode) -> Path | None: + """Get the resolved path for a node.""" + if node.original_file_path: + return Path(node.root_path, node.original_file_path).resolve() + return None + + @staticmethod + def get_patch_path(node: ManifestNode) -> Path | None: + """Get the resolved path for a node's patch (YAML) file.""" + if node.patch_path: + return Path(node.patch_path.split("://")[-1]) + return None + + def get_columns_meta( + self, catalog_key: CatalogKey, output_to_lower: bool = False + ) -> dict[str, ColumnMetadata]: + """ + Resolve columns metadata (type, comment, etc.) either from an external CatalogArtifact + or from a live introspection query with the adapter. + """ + columns = OrderedDict() + blacklist = self._config.vars.get("dbt-osmosis", {}).get("_blacklist", []) + # if catalog is loaded: + if self.catalog: + # Attempt to match node in catalog + cat_objs = {**self.catalog.nodes, **self.catalog.sources} + matched = [ + obj for key, obj in cat_objs.items() if key.split(".")[-1] == catalog_key.name + ] + if matched: + for col in matched[0].columns.values(): + if any(re.match(pat, col.name) for pat in blacklist): + continue + columns[ + column_casing(col.name, self._config.credentials.type, output_to_lower) + ] = ColumnMetadata( + name=column_casing( + col.name, self._config.credentials.type, output_to_lower + ), + type=col.type, + index=col.index, + comment=col.comment, + ) + return columns + + # fallback to adapter-based introspection + with self.adapter.connection_named("dbt-osmosis"): + table = self.adapter.get_relation( + catalog_key.database, catalog_key.schema, catalog_key.name + ) + if not table: + return columns + try: + for c in self.adapter.get_columns_in_relation(table): + if any(re.match(p, c.name) for p in blacklist): + continue + col_cased = column_casing( + c.name, self._config.credentials.type, output_to_lower + ) + columns[col_cased] = ColumnMetadata( + name=col_cased, + type=c.dtype + if not ( + c.is_numeric() + and self.numeric_precision + or c.is_string() + and self.char_length + ) + else c.data_type, + index=None, + comment=getattr(c, "comment", None), + ) + if hasattr(c, "flatten"): + for exp in c.flatten(): + if any(re.match(p, exp.name) for p in blacklist): + continue + col_exp_cased = column_casing( + exp.name, self._config.credentials.type, output_to_lower + ) + columns[col_exp_cased] = ColumnMetadata( + name=col_exp_cased, + type=exp.dtype + if not ( + exp.is_numeric() + and self.numeric_precision + or exp.is_string() + and self.char_length + ) + else exp.data_type, + index=None, + comment=getattr(exp, "comment", None), + ) + except Exception as e: + logger().info(f"Could not resolve columns for {catalog_key}: {e}") + return columns + + def get_catalog_key(self, node: ManifestNode) -> CatalogKey: + if node.resource_type == NodeType.Source: + return CatalogKey(node.database, node.schema, getattr(node, "identifier", node.name)) + return CatalogKey(node.database, node.schema, getattr(node, "alias", node.name)) + + def propagate_documentation_downstream( + self, force_inheritance: bool = False, output_to_lower: bool = False + ) -> None: + schema_map = self.build_schema_folder_mapping(output_to_lower) + futures = [] + with self.adapter.connection_named("dbt-osmosis"): + for unique_id, node in self.filtered_models(): + futures.append( + self._pool.submit( + self._run, unique_id, node, schema_map, force_inheritance, output_to_lower + ) + ) + wait(futures) + + def build_schema_folder_mapping(self, output_to_lower: bool) -> dict[str, SchemaFileLocation]: + """ + Build a mapping of model unique_id -> (target schema yml path, existing path) + """ + self.bootstrap_sources(output_to_lower) + out = {} + for uid, node in self.filtered_models(): + sc_path = self.get_schema_path(node) + target_sc_path = self.get_target_schema_path(node) + out[uid] = SchemaFileLocation( + target=target_sc_path.resolve(), + current=sc_path.resolve() if sc_path else None, + node_type=node.resource_type, + ) + return out + + def bootstrap_sources(self, output_to_lower: bool = False) -> None: + """ + Quick approach: if the user has declared sources in 'dbt-osmosis' vars, + create or augment the schema files for them. For brevity, direct approach only. + """ + performed_disk_mutation = False + spec_dict = self._config.vars.get("dbt-osmosis", {}) + blacklist = spec_dict.get("_blacklist", []) + + for source, spec in spec_dict.items(): + if source == "_blacklist": + continue + if isinstance(spec, str): + schema = source + database = self._config.credentials.database + path = spec + elif isinstance(spec, dict): + schema = spec.get("schema", source) + database = spec.get("database", self._config.credentials.database) + path = spec["path"] + else: + continue + + # Check if source in manifest + dbt_node = next( + (s for s in self.manifest.sources.values() if s.source_name == source), None + ) + if not dbt_node: + # create file with tables from introspection + sc_file = ( + Path(self._config.project_root) + / self._config.model_paths[0] + / path.lstrip(os.sep) + ) + relations = self.adapter.list_relations(database=database, schema=schema) + tables_data = [] + for rel in relations: + cols = [] + for c in self.adapter.get_columns_in_relation(rel): + if any(re.match(p, c.name) for p in blacklist): + continue + col_cased = column_casing( + c.name, self._config.credentials.type, output_to_lower + ) + dt = c.dtype.lower() if output_to_lower else c.dtype + cols.append({"name": col_cased, "description": "", "data_type": dt}) + tables_data.append({"name": rel.identifier, "description": "", "columns": cols}) + + sc_file.parent.mkdir(parents=True, exist_ok=True) + with open(sc_file, "w") as f: + logger().info(f"Injecting source {source} => {sc_file}") + self.yaml_handler.dump( + { + "version": 2, + "sources": [ + { + "name": source, + "database": database, + "schema": schema, + "tables": tables_data, + } + ], + }, + f, + ) + self._mutations += 1 + performed_disk_mutation = True + + if performed_disk_mutation: + logger().info("Reloading project to pick up new sources.") + self.safe_parse_project(init=True) + + def get_schema_path(self, node: ManifestNode) -> Optional[Path]: + if node.resource_type == NodeType.Model and node.patch_path: + return Path(self._config.project_root).joinpath(node.patch_path.partition("://")[-1]) + if node.resource_type == NodeType.Source and hasattr(node, "source_name"): + return Path(self._config.project_root).joinpath(node.path) + return None + + def get_target_schema_path(self, node: ManifestNode) -> Path: + path_spec = self.get_osmosis_path_spec(node) + if not path_spec: + return Path(self._config.project_root, node.original_file_path) + sc = path_spec.format(node=node, model=node.name, parent=node.fqn[-2]) + parts = [] + if node.resource_type == NodeType.Source: + parts.append(self._config.model_paths[0]) + else: + parts.append(Path(node.original_file_path).parent) + if not (sc.endswith(".yml") or sc.endswith(".yaml")): + sc += ".yml" + parts.append(sc) + return Path(self._config.project_root, *parts) + + def get_osmosis_path_spec(self, node: ManifestNode) -> Optional[str]: + if node.resource_type == NodeType.Source: + source_specs = self._config.vars.get("dbt-osmosis", {}) + source_spec = source_specs.get(node.source_name) + if isinstance(source_spec, dict): + return source_spec.get("path") + return source_spec + osm_spec = node.unrendered_config.get("dbt-osmosis") + if not osm_spec: + raise MissingOsmosisConfig(f"Config not set for model {node.name}") + return osm_spec + + def get_columns(self, key: CatalogKey, to_lower: bool) -> list[str]: + return list(self.get_columns_meta(key, to_lower).keys()) + + def get_base_model(self, node: ManifestNode, to_lower: bool) -> dict[str, t.Any]: + cols = self.get_columns(self.get_catalog_key(node), to_lower) + return { + "name": node.name, + "columns": [{"name": c, "description": ""} for c in cols], + } + + def augment_existing_model( + self, doc: dict[str, t.Any], node: ManifestNode, to_lower: bool + ) -> dict[str, t.Any]: + existing_cols = [c["name"] for c in doc.get("columns", [])] + db_cols = self.get_columns(self.get_catalog_key(node), to_lower) + new_cols = [c for c in db_cols if not any(c.lower() == e.lower() for e in existing_cols)] + for col in new_cols: + doc.setdefault("columns", []).append({"name": col, "description": ""}) + logger().info(f"Injecting column {col} into {node.unique_id}") + return doc + + def draft_project_structure_update_plan( + self, output_to_lower: bool = False + ) -> dict[Path, SchemaFileMigration]: + blueprint = {} + logger().info("Building structure update plan.") + futs = [] + with self.adapter.connection_named("dbt-osmosis"): + for uid, sf_loc in self.build_schema_folder_mapping(output_to_lower).items(): + if not sf_loc.is_valid: + futs.append( + self._pool.submit(self._draft, sf_loc, uid, blueprint, output_to_lower) + ) + wait(futs) + return blueprint + + def _draft( + self, + sf_loc: SchemaFileLocation, + uid: str, + blueprint: dict[Path, SchemaFileMigration], + to_lower: bool, + ): + try: + with self._mutex: + if sf_loc.target not in blueprint: + blueprint[sf_loc.target] = SchemaFileMigration() + if sf_loc.node_type == NodeType.Model: + node = self.manifest.nodes[uid] + else: + node = self.manifest.sources[uid] + + if sf_loc.current is None: + # model not documented yet + with self._mutex: + if sf_loc.node_type == NodeType.Model: + blueprint[sf_loc.target].output["models"].append( + self.get_base_model(node, to_lower) + ) + else: + # We have existing doc, but we want to unify it into the new location + with self._mutex: + doc = self.yaml_handler.load(sf_loc.current) + if sf_loc.node_type == NodeType.Model: + for m in doc.get("models", []): + if m["name"] == node.name: + newm = self.augment_existing_model(m, node, to_lower) + with self._mutex: + blueprint[sf_loc.target].output["models"].append(newm) + blueprint[sf_loc.target].supersede.setdefault( + sf_loc.current, [] + ).append(node) + break + else: + for source in doc.get("sources", []): + if source["name"] == node.source_name: + for table in source["tables"]: + if table["name"] == node.name: + newt = self.augment_existing_model(table, node, to_lower) + with self._mutex: + if not any( + s["name"] == node.source_name + for s in blueprint[sf_loc.target].output["sources"] + ): + blueprint[sf_loc.target].output["sources"].append( + source + ) + for s in blueprint[sf_loc.target].output["sources"]: + if s["name"] == node.source_name: + for t2 in s["tables"]: + if t2["name"] == node.name: + t2.update(newt) + break + blueprint[sf_loc.target].supersede.setdefault( + sf_loc.current, [] + ).append(node) + break + except Exception as e: + logger().error(f"Drafting structure plan for {uid} failed: {e}") + raise e + + def cleanup_blueprint( + self, blueprint: dict[Path, SchemaFileMigration] + ) -> dict[Path, SchemaFileMigration]: + for k in list(blueprint.keys()): + out = blueprint[k].output + # remove empty models/sources + if "models" in out and not out["models"]: + del out["models"] + if "sources" in out and not out["sources"]: + del out["sources"] + if not out.get("models") and not out.get("sources"): + del blueprint[k] + return blueprint + + def commit_project_restructure_to_disk( + self, + blueprint: Optional[dict[Path, SchemaFileMigration]] = None, + output_to_lower: bool = False, + ) -> bool: + if not blueprint: + blueprint = self.draft_project_structure_update_plan(output_to_lower) + blueprint = self.cleanup_blueprint(blueprint) + if not blueprint: + logger().info("Project structure is already conformed.") + return False + self.pretty_print_restructure_plan(blueprint) + + for target, struct in blueprint.items(): + if not target.exists(): + logger().info(f"Creating schema file {target}") + if not self.dry_run: + target.parent.mkdir(parents=True, exist_ok=True) + target.touch() + self.yaml_handler.dump(struct.output, target) + self._mutations += 1 + else: + logger().info(f"Updating schema file {target}") + existing = self.yaml_handler.load(target) + if not existing: + existing = {"version": 2} + if "version" not in existing: + existing["version"] = 2 + + if "models" in struct.output: + existing.setdefault("models", []).extend(struct.output["models"]) + if "sources" in struct.output: + existing.setdefault("sources", []).extend(struct.output["sources"]) + if not self.dry_run: + self.yaml_handler.dump(existing, target) + self._mutations += 1 + + # handle superseded + for sup_path, nodes in struct.supersede.items(): + raw_sc = self.yaml_handler.load(sup_path) + # figure out which ones to remove + to_remove_models = {n.name for n in nodes if n.resource_type == NodeType.Model} + to_remove_sources = { + (n.source_name, n.name) for n in nodes if n.resource_type == NodeType.Source + } + + keep_models = [] + for m in raw_sc.get("models", []): + if m["name"] not in to_remove_models: + keep_models.append(m) + raw_sc["models"] = keep_models + + # remove relevant source tables + keep_src = [] + for s in raw_sc.get("sources", []): + keep_tables = [] + for t_ in s.get("tables", []): + if (s["name"], t_["name"]) not in to_remove_sources: + keep_tables.append(t_) + if keep_tables: + s["tables"] = keep_tables + keep_src.append(s) + raw_sc["sources"] = keep_src + + # if file is empty => remove it + if (not raw_sc.get("models")) and (not raw_sc.get("sources")): + logger().info(f"Superseding entire file {sup_path}") + if not self.dry_run: + sup_path.unlink(missing_ok=True) + if sup_path.parent.exists() and not any(sup_path.parent.iterdir()): + sup_path.parent.rmdir() + else: + if not self.dry_run: + self.yaml_handler.dump(raw_sc, sup_path) + self._mutations += 1 + logger().info(f"Migrated doc from {sup_path} -> {target}") + return True + + @staticmethod + def pretty_print_restructure_plan(blueprint: dict[Path, SchemaFileMigration]) -> None: + summary = [] + for plan in blueprint.keys(): + files_superseded = [s.name for s in blueprint[plan].supersede] or ["CREATE"] + summary.append((files_superseded, "->", plan.name)) + logger().info(summary) + + ############################################################################ + # Column Sync + ############################################################################ + @staticmethod + def get_column_sets( + database_cols: Iterable[str], + yaml_cols: Iterable[str], + documented_cols: Iterable[str], + ) -> t.tuple[list[str], list[str], list[str]]: + """ + Return: (missing_in_yaml, undocumented_in_yaml, extra_in_yaml) + """ + missing = [x for x in database_cols if x.lower() not in (y.lower() for y in yaml_cols)] + undocumented = [ + x for x in database_cols if x.lower() not in (y.lower() for y in documented_cols) + ] + extra = [x for x in yaml_cols if x.lower() not in (y.lower() for y in database_cols)] + return missing, undocumented, extra + + def _run( + self, + uid: str, + node: ManifestNode, + schema_map: dict[str, SchemaFileLocation], + force_inheritance: bool, + output_to_lower: bool, + ): + try: + with self._mutex: + logger().info(f"Processing model: {uid}") + sf_loc = schema_map.get(uid) + if not sf_loc or not sf_loc.current: + with self._mutex: + logger().info(f"No schema file for {uid}, skipping.") + return + db_cols_list = self.get_columns(self.get_catalog_key(node), output_to_lower) + if not db_cols_list: + with self._mutex: + logger().info( + f"No database columns found for {uid}, falling back to yaml columns." + ) + db_cols_list = list(node.columns.keys()) + + db_cols_set = set(db_cols_list) + yaml_cols_list = list(node.columns.keys()) + documented_cols_set = { + c + for c, info in node.columns.items() + if info.description and info.description not in self.placeholders + } + + missing, undocumented, extra = self.get_column_sets( + db_cols_list, yaml_cols_list, documented_cols_set + ) + + if force_inheritance: + undocumented = list(db_cols_set) # treat all as needing doc + + with self._mutex: + sc_data = self.yaml_handler.load(sf_loc.current) + section = self.maybe_get_section_from_schema_file(sc_data, node) + if not section: + logger().info(f"No section in {sf_loc.current} for {uid}") + return + # Perform updates + n_added = n_doc_inh = n_removed = n_type_changed = n_desc_changed = 0 + if any([missing, undocumented, extra]): + ( + n_added, + n_doc_inh, + n_removed, + n_type_changed, + n_desc_changed, + ) = self.update_schema_file_and_node( + missing, + undocumented, + extra, + node, + section, + self.get_columns_meta(self.get_catalog_key(node), output_to_lower), + output_to_lower, + ) + + reorder = tuple(db_cols_list) != tuple(yaml_cols_list) + if reorder: + + def _sort(c: dict[str, t.Any]) -> int: + try: + return db_cols_list.index( + column_casing( + c["name"], self._config.credentials.type, output_to_lower + ) + ) + except ValueError: + return 999999 + + section["columns"].sort(key=_sort) + + if ( + n_added + n_doc_inh + n_removed + n_type_changed + n_desc_changed or reorder + ) and not self.dry_run: + self.yaml_handler.dump(sc_data, sf_loc.current) + self._mutations += 1 + logger().info(f"Updated {sf_loc.current}") + else: + logger().info(f"{sf_loc.current} is up to date") + + except Exception as e: + logger().error(f"Error while processing {uid}: {e}") + raise e + + @staticmethod + def maybe_get_section_from_schema_file( + yaml_data: dict[str, t.Any], node: ManifestNode + ) -> Optional[dict[str, t.Any]]: + if node.resource_type == NodeType.Source: + for s in yaml_data.get("sources", []): + for t_ in s.get("tables", []): + if s["name"] == node.source_name and t_["name"] == node.name: + return t_ + else: + for m in yaml_data.get("models", []): + if m["name"] == node.name: + return m + return None + + @staticmethod + def remove_columns_not_in_database( + extra_columns: Iterable[str], + node: ManifestNode, + yaml_section: dict[str, t.Any], + ) -> int: + c = 0 + for e in extra_columns: + node.columns.pop(e, None) + yaml_section["columns"] = [col for col in yaml_section["columns"] if col["name"] != e] + c += 1 + return c + + def update_columns_attribute( + self, + node: ManifestNode, + yaml_section: dict[str, t.Any], + db_meta: dict[str, ColumnMetadata], + attr: str, + meta_key: str, + skip_flag: bool, + output_to_lower: bool, + ) -> int: + if skip_flag: + return 0 + changed = 0 + for col_name, col_meta in db_meta.items(): + if col_name in node.columns: + new_val = getattr(col_meta, meta_key, "") or "" + old_val = getattr(node.columns[col_name], attr, "") + if new_val and old_val != new_val: + setattr(node.columns[col_name], attr, new_val) + for c in yaml_section["columns"]: + if ( + column_casing(c["name"], self._config.credentials.type, output_to_lower) + == col_name + ): + if output_to_lower and isinstance(new_val, str): + new_val = new_val.lower() + c[attr] = new_val + changed += 1 + return changed + + def add_missing_cols_to_node_and_model( + self, + missing_cols: Iterable[str], + node: ManifestNode, + yaml_section: dict[str, t.Any], + db_meta: dict[str, ColumnMetadata], + output_to_lower: bool, + ) -> int: + c = 0 + for col in missing_cols: + if col not in db_meta: + continue + dtype = db_meta[col].type or "" + desc = db_meta[col].comment or "" + meta_name = col.lower() if output_to_lower else col + meta_type = dtype.lower() if output_to_lower else dtype + node.columns[col] = ColumnInfo.from_dict( + {"name": meta_name, "description": desc, "data_type": meta_type} + ) + yaml_section.setdefault("columns", []).append( + {"name": meta_name, "description": desc, "data_type": meta_type} + ) + c += 1 + return c + + def update_schema_file_and_node( + self, + missing_cols: Iterable[str], + undocumented_cols: Iterable[str], + extra_cols: Iterable[str], + node: ManifestNode, + yaml_section: dict[str, t.Any], + db_meta: dict[str, ColumnMetadata], + output_to_lower: bool, + ) -> t.tuple[int, int, int, int, int]: + n_added = 0 + n_doc_inherited = 0 + n_removed = 0 + n_type_updated = 0 + n_desc_updated = 0 + + if not self.skip_add_columns: + n_added = self.add_missing_cols_to_node_and_model( + missing_cols, node, yaml_section, db_meta, output_to_lower + ) + + knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( + self.manifest, + node, + self.placeholders, + self._config.project_root, + self.use_unrendered_descriptions, + ) + n_doc_inherited = ( + ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( + undocumented_cols, + node, + yaml_section, + knowledge, + self.skip_add_tags, + self.skip_merge_meta, + self.add_progenitor_to_meta, + self.add_inheritance_for_specified_keys, + ) + ) + n_type_updated = self.update_columns_attribute( + node, + yaml_section, + db_meta, + attr="data_type", + meta_key="type", + skip_flag=self.skip_add_data_types, + output_to_lower=output_to_lower, + ) + # We piggyback the "catalog_file" presence as "update description?" flag in original code + n_desc_updated = self.update_columns_attribute( + node, + yaml_section, + db_meta, + attr="description", + meta_key="comment", + skip_flag=(self.catalog_file is None), + output_to_lower=output_to_lower, + ) + n_removed = self.remove_columns_not_in_database(extra_cols, node, yaml_section) + return n_added, n_doc_inherited, n_removed, n_type_updated, n_desc_updated diff --git a/src/dbt_osmosis/main.py b/src/dbt_osmosis/main.py index e62e7af4..42b478e5 100644 --- a/src/dbt_osmosis/main.py +++ b/src/dbt_osmosis/main.py @@ -260,11 +260,11 @@ def refactor( # Conform project structure & bootstrap undocumented models injecting columns if runner.commit_project_restructure_to_disk(): - runner.safe_parse_project(reinit=True) + runner.safe_parse_project(init=True) runner.propagate_documentation_downstream( force_inheritance=force_inheritance, output_to_lower=output_to_lower ) - if check and runner.mutations > 0: + if check and runner._mutations > 0: exit(1) @@ -415,7 +415,7 @@ def organize( # Conform project structure & bootstrap undocumented models injecting columns runner.commit_project_restructure_to_disk() - if check and runner.mutations > 0: + if check and runner._mutations > 0: exit(1) @@ -596,7 +596,7 @@ def document( # Propagate documentation & inject/remove schema file columns to align with model in database runner.propagate_documentation_downstream(force_inheritance, output_to_lower) - if check and runner.mutations > 0: + if check and runner._mutations > 0: exit(1) From 21375e727e1ead6e826f687879454d8e5a7c67b2 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Sun, 29 Dec 2024 01:31:48 -0700 Subject: [PATCH 05/46] wip: continue working on functional rewrite --- src/dbt_osmosis/core/osmosis.py | 146 +++++++++++++++++++------------- 1 file changed, 85 insertions(+), 61 deletions(-) diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index d22c4cf7..566faf5a 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -10,10 +10,8 @@ import uuid from collections import OrderedDict from collections.abc import Iterable, Iterator -from concurrent.futures import ThreadPoolExecutor, wait -from contextlib import contextmanager +from concurrent.futures import ThreadPoolExecutor from dataclasses import dataclass, field -from functools import lru_cache from itertools import chain from pathlib import Path @@ -233,19 +231,44 @@ class DbtProjectContext: """A data object that includes references to: - The loaded dbt config - - The runtime adapter - The manifest - The sql/macro parsers + + With mutexes for thread safety. The adapter is lazily instantiated and has a TTL which allows + for re-use across multiple operations in long-running processes. (is the idea) """ config: RuntimeConfig - adapter: Adapter manifest: Manifest sql_parser: SqlBlockParser macro_parser: SqlMacroParser - adapter_mutex: threading.Lock = field(default_factory=threading.Lock) - manifest_mutex: threading.Lock = field(default_factory=threading.Lock) - adapter_created_at: float = time.time() + adapter_ttl: float = 3600.0 + + _adapter_mutex: threading.Lock = field(default_factory=threading.Lock) + _manifest_mutex: threading.Lock = field(default_factory=threading.Lock) + _adapter: Adapter | None = None + _adapter_created_at: float = 0.0 + + @property + def is_adapter_expired(self) -> bool: + """Check if the adapter has expired based on the adapter TTL.""" + return time.time() - self._adapter_created_at > self.adapter_ttl + + # NOTE: the way we use the adapter, the generics are irrelevant + @property + def adapter(self) -> Adapter[t.Any, t.Any, t.Any, t.Any]: + """Get the adapter instance, creating a new one if the current one has expired.""" + with self._adapter_mutex: + if not self._adapter or self.is_adapter_expired: + self._adapter = instantiate_adapter(self.config) + self._adapter.set_macro_resolver(self.manifest) + self._adapter_created_at = time.time() + return self._adapter + + @property + def manifest_mutex(self) -> threading.Lock: + """Return the manifest mutex for thread safety.""" + return self._manifest_mutex def discover_project_dir() -> str: @@ -264,7 +287,7 @@ def discover_profiles_dir() -> str: return str(Path.home() / ".dbt") -def instantiate_adapter(runtime_config: RuntimeConfig) -> t.Any: +def instantiate_adapter(runtime_config: RuntimeConfig) -> Adapter[t.Any, t.Any, t.Any, t.Any]: """Instantiate a dbt adapter based on the runtime configuration.""" adapter_cls = get_adapter_class_by_name(runtime_config.credentials.type) if not adapter_cls: @@ -280,7 +303,7 @@ def instantiate_adapter(runtime_config: RuntimeConfig) -> t.Any: adapter = adapter_cls(runtime_config, get_mp_context()) # pyright: ignore[reportCallIssue] - adapter.connections.set_connection_name() + adapter.connections.set_connection_name("dbt-osmosis") return adapter @@ -290,27 +313,21 @@ def create_dbt_project_context(config: DbtConfiguration) -> DbtProjectContext: dbt_flags.set_from_args(args, args) runtime_cfg = RuntimeConfig.from_args(args) - adapter = instantiate_adapter(runtime_cfg) - loader = ManifestLoader( - runtime_cfg, runtime_cfg.load_dependencies(), adapter.connections.set_query_header - ) + loader = ManifestLoader(runtime_cfg, runtime_cfg.load_dependencies()) manifest = loader.load() manifest.build_flat_graph() - loader.save_macros_to_adapter(adapter) + adapter = instantiate_adapter(runtime_cfg) + adapter.set_macro_resolver(manifest) sql_parser = SqlBlockParser(runtime_cfg, manifest, runtime_cfg) macro_parser = SqlMacroParser(runtime_cfg, manifest) return DbtProjectContext( config=runtime_cfg, - adapter=adapter, manifest=manifest, sql_parser=sql_parser, macro_parser=macro_parser, - adapter_mutex=threading.Lock(), - manifest_mutex=threading.Lock(), - adapter_created_at=time.time(), ) @@ -384,9 +401,7 @@ def execute_sql_code(context: DbtProjectContext, raw_sql: str) -> AdapterRespons else: sql_to_exec = raw_sql - with context.adapter_mutex: - resp, _ = context.adapter.execute(sql_to_exec, auto_begin=False, fetch=True) - + resp, _ = context.adapter.execute(sql_to_exec, auto_begin=False, fetch=True) return resp @@ -460,6 +475,10 @@ def load_catalog(settings: YamlRefactorSettings) -> CatalogArtifact | None: return CatalogArtifact.from_dict(json.loads(fp.read_text())) +# TODO: more work to do below the fold here + + +# NOTE: in multithreaded operations, we need to use the thread connection for the adapter def get_columns_meta( context: YamlRefactorContext, node: ManifestNode, @@ -490,50 +509,50 @@ def get_columns_meta( ) return cased_cols - with context.project.adapter.connection_named("dbt-osmosis"): - rel = context.project.adapter.get_relation(key.database, key.schema, key.name) - if not rel: - return cased_cols - try: - col_objs = context.project.adapter.get_columns_in_relation(rel) - for col_ in col_objs: - if any(re.match(b, col_.name) for b in blacklist): - continue - cased = column_casing( - col_.name, - context.project.config.credentials.type, - context.settings.output_to_lower, - ) - dtype = _maybe_use_precise_dtype(col_, context.settings) - cased_cols[cased] = ColumnMetadata( - name=cased, - type=dtype, - index=None, - comment=getattr(col_, "comment", None), - ) - if hasattr(col_, "flatten"): - for exp in col_.flatten(): - if any(re.match(b, exp.name) for b in blacklist): - continue - cased2 = column_casing( - exp.name, - context.project.config.credentials.type, - context.settings.output_to_lower, - ) - dtype2 = _maybe_use_precise_dtype(exp, context.settings) - cased_cols[cased2] = ColumnMetadata( - name=cased2, - type=dtype2, - index=None, - comment=getattr(exp, "comment", None), - ) - except Exception as exc: - logger().warning(f"Could not introspect columns for {key}: {exc}") + rel = context.project.adapter.get_relation(key.database, key.schema, key.name) + if not rel: + return cased_cols + try: + col_objs = context.project.adapter.get_columns_in_relation(rel) + for col_ in col_objs: + if any(re.match(b, col_.name) for b in blacklist): + continue + cased = column_casing( + col_.name, + context.project.config.credentials.type, + context.settings.output_to_lower, + ) + dtype = _maybe_use_precise_dtype(col_, context.settings) + cased_cols[cased] = ColumnMetadata( + name=cased, + type=dtype, + index=None, + comment=getattr(col_, "comment", None), + ) + if hasattr(col_, "flatten"): + for exp in col_.flatten(): + if any(re.match(b, exp.name) for b in blacklist): + continue + cased2 = column_casing( + exp.name, + context.project.config.credentials.type, + context.settings.output_to_lower, + ) + dtype2 = _maybe_use_precise_dtype(exp, context.settings) + cased_cols[cased2] = ColumnMetadata( + name=cased2, + type=dtype2, + index=None, + comment=getattr(exp, "comment", None), + ) + except Exception as exc: + logger.warning(f"Could not introspect columns for {key}: {exc}") return cased_cols def _maybe_use_precise_dtype(col: t.Any, settings: YamlRefactorSettings) -> str: + """Use the precise data type if enabled in the settings.""" if (col.is_numeric() and settings.numeric_precision) or ( col.is_string() and settings.char_length ): @@ -542,11 +561,16 @@ def _maybe_use_precise_dtype(col: t.Any, settings: YamlRefactorSettings) -> str: def _catalog_key_for_node(node: ManifestNode) -> CatalogKey: + """Make an appropriate catalog key for a dbt node.""" + # TODO: pyright seems to think something is wrong below if node.resource_type == NodeType.Source: return CatalogKey(node.database, node.schema, getattr(node, "identifier", node.name)) return CatalogKey(node.database, node.schema, getattr(node, "alias", node.name)) +# NOTE: usage examples of the more FP style module below + + def build_dbt_project_context(cfg: DbtConfiguration) -> DbtProjectContext: if not cfg.project_dir: cfg.project_dir = discover_project_dir() From ba430deaeabc870d4de2303a6223f96896d94851 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Sun, 29 Dec 2024 02:13:20 -0700 Subject: [PATCH 06/46] wip: continue working on functional rewrite --- src/dbt_osmosis/core/osmosis.py | 115 +++++++++++++++----------------- 1 file changed, 54 insertions(+), 61 deletions(-) diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index 566faf5a..0e2fd346 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -1,5 +1,7 @@ # pyright: reportUnknownVariableType=false, reportPrivateImportUsage=false, reportAny=false, reportUnknownMemberType=false +from __future__ import annotations + import argparse import json import logging @@ -36,28 +38,28 @@ logger = logging.getLogger("dbt-osmosis") -def has_jinja(code: str) -> bool: - """Check if a code string contains jinja tokens.""" - return any(token in code for token in ("{{", "}}", "{%", "%}", "{#", "#}")) +def discover_project_dir() -> str: + """Return the directory containing a dbt_project.yml if found, else the current dir.""" + cwd = Path.cwd() + for p in [cwd] + list(cwd.parents): + if (p / "dbt_project.yml").exists(): + return str(p.resolve()) + return str(cwd.resolve()) -def column_casing(column: str, credentials_type: str, to_lower: bool) -> str: - """Apply case normalization to a column name based on the credentials type.""" - if credentials_type == "snowflake" and column.startswith('"') and column.endswith('"'): - return column - if to_lower: - return column.lower() - if credentials_type == "snowflake": - return column.upper() - return column +def discover_profiles_dir() -> str: + """Return the directory containing a profiles.yml if found, else ~/.dbt.""" + if (Path.cwd() / "profiles.yml").exists(): + return str(Path.cwd().resolve()) + return str(Path.home() / ".dbt") @dataclass class DbtConfiguration: """Configuration for a dbt project.""" - project_dir: str - profiles_dir: str + project_dir: str = field(default_factory=discover_project_dir) + profiles_dir: str = field(default_factory=discover_profiles_dir) target: str | None = None profile: str | None = None threads: int = 1 @@ -65,7 +67,7 @@ class DbtConfiguration: which: str = "" debug: bool = False - _vars: str | dict[str, t.Any] = field(default_factory=dict) + _vars: str | dict[str, t.Any] = field(default_factory=dict, init=False) def __post_init__(self) -> None: if self.threads != 1: @@ -244,8 +246,8 @@ class DbtProjectContext: macro_parser: SqlMacroParser adapter_ttl: float = 3600.0 - _adapter_mutex: threading.Lock = field(default_factory=threading.Lock) - _manifest_mutex: threading.Lock = field(default_factory=threading.Lock) + _adapter_mutex: threading.Lock = field(default_factory=threading.Lock, init=False) + _manifest_mutex: threading.Lock = field(default_factory=threading.Lock, init=False) _adapter: Adapter | None = None _adapter_created_at: float = 0.0 @@ -271,22 +273,6 @@ def manifest_mutex(self) -> threading.Lock: return self._manifest_mutex -def discover_project_dir() -> str: - """Return the directory containing a dbt_project.yml if found, else the current dir.""" - cwd = Path.cwd() - for p in [cwd] + list(cwd.parents): - if (p / "dbt_project.yml").exists(): - return str(p.resolve()) - return str(cwd.resolve()) - - -def discover_profiles_dir() -> str: - """Return the directory containing a profiles.yml if found, else ~/.dbt.""" - if (Path.cwd() / "profiles.yml").exists(): - return str(Path.cwd().resolve()) - return str(Path.home() / ".dbt") - - def instantiate_adapter(runtime_config: RuntimeConfig) -> Adapter[t.Any, t.Any, t.Any, t.Any]: """Instantiate a dbt adapter based on the runtime configuration.""" adapter_cls = get_adapter_class_by_name(runtime_config.credentials.type) @@ -369,6 +355,21 @@ def __post_init__(self) -> None: self.placeholders = (EMPTY_STRING, *self.placeholders) +def load_catalog(settings: YamlRefactorSettings) -> CatalogArtifact | None: + """Load the catalog file if it exists and return a CatalogArtifact instance.""" + if not settings.catalog_file: + return None + fp = Path(settings.catalog_file) + if not fp.exists(): + return None + return CatalogArtifact.from_dict(json.loads(fp.read_text())) + + +def has_jinja(code: str) -> bool: + """Check if a code string contains jinja tokens.""" + return any(token in code for token in ("{{", "}}", "{%", "%}", "{#", "#}")) + + def compile_sql_code(context: DbtProjectContext, raw_sql: str) -> ManifestSQLNode: """Compile jinja SQL using the context's manifest and adapter.""" tmp_id = str(uuid.uuid4()) @@ -465,14 +466,15 @@ def _get_node_path(node: ManifestNode | SourceDefinition) -> Path | None: return None -def load_catalog(settings: YamlRefactorSettings) -> CatalogArtifact | None: - """Load the catalog file if it exists and return a CatalogArtifact instance.""" - if not settings.catalog_file: - return None - fp = Path(settings.catalog_file) - if not fp.exists(): - return None - return CatalogArtifact.from_dict(json.loads(fp.read_text())) +def normalize_column_name(column: str, credentials_type: str, to_lower: bool) -> str: + """Apply case normalization to a column name based on the credentials type.""" + if credentials_type == "snowflake" and column.startswith('"') and column.endswith('"'): + return column + if to_lower: + return column.lower() + if credentials_type == "snowflake": + return column.upper() + return column # TODO: more work to do below the fold here @@ -496,7 +498,7 @@ def get_columns_meta( for col in matched[0].columns.values(): if any(re.match(p, col.name) for p in blacklist): continue - cased = column_casing( + cased = normalize_column_name( col.name, context.project.config.credentials.type, context.settings.output_to_lower, @@ -517,7 +519,7 @@ def get_columns_meta( for col_ in col_objs: if any(re.match(b, col_.name) for b in blacklist): continue - cased = column_casing( + cased = normalize_column_name( col_.name, context.project.config.credentials.type, context.settings.output_to_lower, @@ -533,7 +535,7 @@ def get_columns_meta( for exp in col_.flatten(): if any(re.match(b, exp.name) for b in blacklist): continue - cased2 = column_casing( + cased2 = normalize_column_name( exp.name, context.project.config.credentials.type, context.settings.output_to_lower, @@ -568,25 +570,16 @@ def _catalog_key_for_node(node: ManifestNode) -> CatalogKey: return CatalogKey(node.database, node.schema, getattr(node, "alias", node.name)) -# NOTE: usage examples of the more FP style module below - - -def build_dbt_project_context(cfg: DbtConfiguration) -> DbtProjectContext: - if not cfg.project_dir: - cfg.project_dir = discover_project_dir() - if not cfg.profiles_dir: - cfg.profiles_dir = discover_profiles_dir() - return create_dbt_project_context(cfg) +# NOTE: usage example of the more FP style module below def run_example_compilation_flow() -> None: - cfg = DbtConfiguration( - project_dir="", profiles_dir="", target="some_target", threads=2, _vars={"foo": "bar"} - ) - proj_ctx = build_dbt_project_context(cfg) + config = DbtConfiguration(target="some_target", threads=2) + config.vars = {"foo": "bar"} + proj_ctx = create_dbt_project_context(config) - cr = compile_sql_code(proj_ctx, "select '{{ 1+1 }}' as col") - print("Compiled =>", cr.compiled_code) + node = compile_sql_code(proj_ctx, "select '{{ 1+1 }}' as col") + print("Compiled =>", node.compiled_code) - ex = execute_sql_code(proj_ctx, "select '{{ 1+2 }}' as col") - print("Rows =>", ex.table) + resp = execute_sql_code(proj_ctx, "select '{{ 1+2 }}' as col") + print("Resp =>", resp) From 06890b31806eed3f97c8c4ef8f505127a647ccf5 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Sun, 29 Dec 2024 03:44:52 -0700 Subject: [PATCH 07/46] wip: continue working on functional rewrite --- .../core/column_level_knowledge.py | 50 -- .../core/column_level_knowledge_propagator.py | 257 -------- src/dbt_osmosis/core/exceptions.py | 14 - src/dbt_osmosis/core/osmosis.py | 562 ++++++++++++++---- 4 files changed, 439 insertions(+), 444 deletions(-) delete mode 100644 src/dbt_osmosis/core/column_level_knowledge.py delete mode 100644 src/dbt_osmosis/core/column_level_knowledge_propagator.py delete mode 100644 src/dbt_osmosis/core/exceptions.py diff --git a/src/dbt_osmosis/core/column_level_knowledge.py b/src/dbt_osmosis/core/column_level_knowledge.py deleted file mode 100644 index 3444a031..00000000 --- a/src/dbt_osmosis/core/column_level_knowledge.py +++ /dev/null @@ -1,50 +0,0 @@ -import re -from typing import Any, Dict - -ColumnLevelKnowledge = Dict[str, Any] -Knowledge = Dict[str, ColumnLevelKnowledge] - - -def delete_if_value_is_empty(prior_knowledge: ColumnLevelKnowledge, key: str) -> None: - if not prior_knowledge[key]: - del prior_knowledge[key] - - -def get_prior_knowledge( - knowledge: Knowledge, - column: str, -) -> ColumnLevelKnowledge: - camel_column = re.sub("_(.)", lambda m: m.group(1).upper(), column) - pascal_column = camel_column[0].upper() + camel_column[1:] - prior_knowledge_candidates = list( - filter( - lambda k: k, - [ - knowledge.get(column), - knowledge.get(column.lower()), - knowledge.get(camel_column), - knowledge.get(pascal_column), - ], - ) - ) - sorted_prior_knowledge_candidates_sources = sorted( - [ - k - for k in prior_knowledge_candidates - if (k["progenitor"].startswith("source") or k["progenitor"].startswith("seed")) - ], - key=lambda k: k["generation"], - reverse=True, - ) - sorted_prior_knowledge_candidates_models = sorted( - [k for k in prior_knowledge_candidates if k["progenitor"].startswith("model")], - key=lambda k: k["generation"], - reverse=True, - ) - sorted_prior_knowledge_candidates = ( - sorted_prior_knowledge_candidates_sources + sorted_prior_knowledge_candidates_models - ) - prior_knowledge = ( - sorted_prior_knowledge_candidates[0] if sorted_prior_knowledge_candidates else {} - ) - return prior_knowledge diff --git a/src/dbt_osmosis/core/column_level_knowledge_propagator.py b/src/dbt_osmosis/core/column_level_knowledge_propagator.py deleted file mode 100644 index 998986d1..00000000 --- a/src/dbt_osmosis/core/column_level_knowledge_propagator.py +++ /dev/null @@ -1,257 +0,0 @@ -from pathlib import Path -from typing import Any, Dict, Iterable, List, Optional - -import yaml -from dbt.contracts.graph.nodes import ModelNode, SeedNode, SourceDefinition - -from dbt_osmosis.core.column_level_knowledge import ( - ColumnLevelKnowledge, - Knowledge, - delete_if_value_is_empty, - get_prior_knowledge, -) -from dbt_osmosis.core.log_controller import logger -from dbt_osmosis.vendored.dbt_core_interface.project import ColumnInfo, ManifestNode - - -def _build_node_ancestor_tree( - manifest: ManifestNode, - node: ManifestNode, - family_tree: Optional[Dict[str, List[str]]] = None, - members_found: Optional[List[str]] = None, - depth: int = 0, -) -> Dict[str, List[str]]: - """Recursively build dictionary of parents in generational order""" - if family_tree is None: - family_tree = {} - if members_found is None: - members_found = [] - if not hasattr(node, "depends_on"): - return family_tree - for parent in getattr(node.depends_on, "nodes", []): - member = manifest.nodes.get(parent, manifest.sources.get(parent)) - if member and parent not in members_found: - family_tree.setdefault(f"generation_{depth}", []).append(parent) - members_found.append(parent) - # Recursion - family_tree = _build_node_ancestor_tree( - manifest, member, family_tree, members_found, depth + 1 - ) - return family_tree - - -def _get_member_yaml(member: ManifestNode, project_dir: Path) -> Optional[dict]: - """Get the yaml for a member from the file in the manifest, only returns relevant section""" - if isinstance(member, SourceDefinition): - key = "tables" - elif isinstance(member, ModelNode): - key = "models" - elif isinstance(member, SeedNode): - key = "seeds" - else: - return None - - data = None - if key == "tables" and hasattr(member, "original_file_path") and member.original_file_path: - with (project_dir / Path(member.original_file_path)).open("r") as f: - data = yaml.safe_load(f) - data = next((item for item in data["sources"] if item["name"] == member.source_name), None) - elif key in ["seeds", "models"] and hasattr(member, "patch_path") and member.patch_path: - pfp: str = member.patch_path.split("://")[-1] - with (project_dir / Path(pfp)).open() as f: - data = yaml.safe_load(f) - if data: - model_yaml = next((item for item in data[key] if item["name"] == member.name), None) - return model_yaml - - -def _inherit_column_level_knowledge( - manifest: ManifestNode, - family_tree: Dict[str, Any], - placeholders: List[str], - project_dir: Path = Path.cwd(), - use_unrendered_descriptions: bool = False, -) -> Knowledge: - """Inherit knowledge from ancestors in reverse insertion order to ensure that the most - recent ancestor is always the one to inherit from - """ - knowledge: Knowledge = {} - for generation in reversed(family_tree): - for ancestor in family_tree[generation]: - member: ManifestNode = manifest.nodes.get(ancestor, manifest.sources.get(ancestor)) - if not member: - continue - if use_unrendered_descriptions: - # overwrite member as the yaml - model_yaml = _get_member_yaml(member, project_dir) - for name, info in member.columns.items(): - knowledge_default = {"progenitor": ancestor, "generation": generation} - knowledge.setdefault(name, knowledge_default) - deserialized_info = info.to_dict() - if ( - use_unrendered_descriptions and model_yaml - ): # overwrite the deserialized info with unrendered column info - col_yaml = next( - ( - col - for col in model_yaml["columns"] - if col["name"] == deserialized_info["name"] - ), - None, - ) - if col_yaml is not None and "description" in col_yaml: - deserialized_info["description"] = col_yaml["description"] - - # Handle Info: - # 1. tags are additive - # 2. descriptions are overriden - # 3. meta is merged - # 4. tests are ignored until I am convinced those shouldn't be - # hand curated with love - if deserialized_info["description"] in placeholders: - deserialized_info.pop("description", None) - deserialized_info["tags"] = list( - set(deserialized_info.pop("tags", []) + knowledge[name].get("tags", [])) - ) - if not deserialized_info["tags"]: - deserialized_info.pop("tags") # poppin' tags like Macklemore - deserialized_info["meta"] = { - **knowledge[name].get("meta", {}), - **deserialized_info["meta"], - } - if not deserialized_info["meta"]: - deserialized_info.pop("meta") - knowledge[name].update(deserialized_info) - return knowledge - - -class ColumnLevelKnowledgePropagator: - @staticmethod - def get_node_columns_with_inherited_knowledge( - manifest: ManifestNode, - node: ManifestNode, - placeholders: List[str], - project_dir: Path = Path.cwd(), - use_unrendered_descriptions: bool = False, - ) -> Knowledge: - """Build a knowledgebase for the model based on iterating through ancestors""" - family_tree = _build_node_ancestor_tree(manifest, node) - knowledge = _inherit_column_level_knowledge( - manifest, family_tree, placeholders, project_dir, use_unrendered_descriptions - ) - return knowledge - - @staticmethod - def _get_original_knowledge(node: ManifestNode, column: str) -> ColumnLevelKnowledge: - original_knowledge: ColumnLevelKnowledge = { - "description": None, - "tags": set(), - "meta": {}, - } - if column in node.columns: - original_knowledge["description"] = node.columns[column].description - original_knowledge["meta"] = node.columns[column].meta - original_knowledge["tags"] = node.columns[column].tags - return original_knowledge - - @staticmethod - def _merge_prior_knowledge_with_original_knowledge( - prior_knowledge: ColumnLevelKnowledge, - original_knowledge: ColumnLevelKnowledge, - add_progenitor_to_meta: bool, - progenitor: str, - ) -> None: - if "tags" in prior_knowledge: - prior_knowledge["tags"] = list( - set(prior_knowledge["tags"] + list(original_knowledge["tags"])) - ) - else: - prior_knowledge["tags"] = original_knowledge["tags"] - - if "meta" in prior_knowledge: - prior_knowledge["meta"] = { - **original_knowledge["meta"], - **prior_knowledge["meta"], - } - else: - prior_knowledge["meta"] = original_knowledge["meta"] - - if add_progenitor_to_meta and progenitor: - prior_knowledge["meta"]["osmosis_progenitor"] = progenitor - - if original_knowledge["meta"].get("osmosis_keep_description", None): - prior_knowledge["description"] = original_knowledge["description"] - - for k in ["tags", "meta"]: - delete_if_value_is_empty(prior_knowledge, k) - - @staticmethod - def update_undocumented_columns_with_prior_knowledge( - undocumented_columns: Iterable[str], - node: ManifestNode, - yaml_file_model_section: Dict[str, Any], - knowledge: Knowledge, - skip_add_tags: bool, - skip_merge_meta: bool, - add_progenitor_to_meta: bool, - add_inheritance_for_specified_keys: Iterable[str] = [], - ) -> int: - """Update undocumented columns with prior knowledge in node and model simultaneously - THIS MUTATES THE NODE AND MODEL OBJECTS so that state is always accurate""" - inheritables = ["description"] - if not skip_add_tags: - inheritables.append("tags") - if not skip_merge_meta: - inheritables.append("meta") - for key in add_inheritance_for_specified_keys: - if key not in inheritables: - inheritables.append(key) - - changes_committed = 0 - for column in undocumented_columns: - original_knowledge = ColumnLevelKnowledgePropagator._get_original_knowledge( - node, column - ) - if original_knowledge["meta"].get("osmosis_prefix", None): - column_without_prefix = column.removeprefix( - original_knowledge["meta"]["osmosis_prefix"] - ) - else: - column_without_prefix = column - - prior_knowledge: ColumnLevelKnowledge = get_prior_knowledge( - knowledge, column_without_prefix - ) - progenitor = prior_knowledge.pop("progenitor", None) - prior_knowledge: ColumnLevelKnowledge = { - k: v for k, v in prior_knowledge.items() if k in inheritables - } - - ColumnLevelKnowledgePropagator._merge_prior_knowledge_with_original_knowledge( - prior_knowledge, - original_knowledge, - add_progenitor_to_meta, - progenitor, - ) - if not prior_knowledge: - continue - - if column not in node.columns: - node.columns[column] = ColumnInfo.from_dict({"name": column, **prior_knowledge}) - else: - node.columns[column] = ColumnInfo.from_dict( - dict(node.columns[column].to_dict(), **prior_knowledge) - ) - for model_column in yaml_file_model_section["columns"]: - if model_column["name"] == column: - model_column.update(prior_knowledge) - changes_committed += 1 - logger().info( - ":light_bulb: Column %s is inheriting knowledge from the lineage of progenitor" - " (%s) for model %s", - column, - progenitor, - node.unique_id, - ) - logger().info(prior_knowledge) - return changes_committed diff --git a/src/dbt_osmosis/core/exceptions.py b/src/dbt_osmosis/core/exceptions.py deleted file mode 100644 index dadfbe03..00000000 --- a/src/dbt_osmosis/core/exceptions.py +++ /dev/null @@ -1,14 +0,0 @@ -class InvalidOsmosisConfig(Exception): - pass - - -class MissingOsmosisConfig(Exception): - pass - - -class MissingArgument(Exception): - pass - - -class SanitizationRequired(Exception): - pass diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index 0e2fd346..863a7090 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -19,11 +19,20 @@ import dbt.flags as dbt_flags import ruamel.yaml +from dbt.adapters.base.impl import BaseAdapter from dbt.adapters.contracts.connection import AdapterResponse -from dbt.adapters.factory import Adapter, get_adapter_class_by_name +from dbt.adapters.factory import get_adapter_class_by_name from dbt.config.runtime import RuntimeConfig from dbt.contracts.graph.manifest import Manifest -from dbt.contracts.graph.nodes import ColumnInfo, ManifestNode, ManifestSQLNode, SourceDefinition +from dbt.contracts.graph.nodes import ( + ColumnInfo, + ManifestNode, + ManifestSQLNode, + ModelNode, + ResultNode, + SeedNode, + SourceDefinition, +) from dbt.contracts.results import CatalogArtifact, CatalogKey, CatalogTable, ColumnMetadata from dbt.node_types import NodeType from dbt.parser.manifest import ManifestLoader, process_node @@ -144,58 +153,6 @@ class SchemaFileMigration: supersede: dict[Path, list[str]] = field(default_factory=dict) -# FIXME: fold this in from the other file -@dataclass -class ColumnLevelKnowledgePropagator: - """Example usage for doc-propagation logic. placeholders is a tuple to avoid accidental mutation.""" - - placeholders: tuple[str, ...] = ( - EMPTY_STRING, - "Pending further documentation", - "Pending further documentation.", - "No description for this column", - "No description for this column.", - "Not documented", - "Not documented.", - "Undefined", - "Undefined.", - ) - - @staticmethod - def get_node_columns_with_inherited_knowledge( - manifest: Manifest, - node: ManifestNode, - placeholders: list[str], - use_unrendered_descriptions: bool, - ) -> dict[str, dict[str, t.Any]]: - _ = manifest, node, placeholders, use_unrendered_descriptions - return {} - - @staticmethod - def update_undocumented_columns_with_prior_knowledge( - columns_to_update: Iterable[str], - node: ManifestNode, - yaml_section: dict[str, t.Any], - known_knowledge: dict[str, dict[str, t.Any]], - ) -> int: - changed_count = 0 - for col in columns_to_update: - if col not in node.columns: - continue - cinfo = node.columns[col] - old_desc = getattr(cinfo, "description", "") - new_desc = old_desc - if col in known_knowledge and not old_desc: - new_desc = known_knowledge[col].get("description", "") - if new_desc and new_desc != old_desc: - setattr(cinfo, "description", new_desc) - for c in yaml_section.get("columns", []): - if c["name"].lower() == col.lower(): - c["description"] = new_desc - changed_count += 1 - return changed_count - - class MissingOsmosisConfig(Exception): """Raised when an osmosis configuration is missing.""" @@ -248,7 +205,7 @@ class DbtProjectContext: _adapter_mutex: threading.Lock = field(default_factory=threading.Lock, init=False) _manifest_mutex: threading.Lock = field(default_factory=threading.Lock, init=False) - _adapter: Adapter | None = None + _adapter: BaseAdapter | None = None _adapter_created_at: float = 0.0 @property @@ -256,9 +213,8 @@ def is_adapter_expired(self) -> bool: """Check if the adapter has expired based on the adapter TTL.""" return time.time() - self._adapter_created_at > self.adapter_ttl - # NOTE: the way we use the adapter, the generics are irrelevant @property - def adapter(self) -> Adapter[t.Any, t.Any, t.Any, t.Any]: + def adapter(self) -> BaseAdapter: """Get the adapter instance, creating a new one if the current one has expired.""" with self._adapter_mutex: if not self._adapter or self.is_adapter_expired: @@ -273,7 +229,7 @@ def manifest_mutex(self) -> threading.Lock: return self._manifest_mutex -def instantiate_adapter(runtime_config: RuntimeConfig) -> Adapter[t.Any, t.Any, t.Any, t.Any]: +def instantiate_adapter(runtime_config: RuntimeConfig) -> BaseAdapter: """Instantiate a dbt adapter based on the runtime configuration.""" adapter_cls = get_adapter_class_by_name(runtime_config.credentials.type) if not adapter_cls: @@ -344,11 +300,11 @@ class YamlRefactorContext: "Undefined", ) - mutation_count: int = 0 + _mutation_count: int = field(default=0, init=False) def register_mutations(self, count: int) -> None: """Increment the mutation count by a specified amount.""" - self.mutation_count += count + self._mutation_count += count def __post_init__(self) -> None: if EMPTY_STRING not in self.placeholders: @@ -365,7 +321,7 @@ def load_catalog(settings: YamlRefactorSettings) -> CatalogArtifact | None: return CatalogArtifact.from_dict(json.loads(fp.read_text())) -def has_jinja(code: str) -> bool: +def _has_jinja(code: str) -> bool: """Check if a code string contains jinja tokens.""" return any(token in code for token in ("{{", "}}", "{%", "%}", "{#", "#}")) @@ -378,7 +334,7 @@ def compile_sql_code(context: DbtProjectContext, raw_sql: str) -> ManifestSQLNod _ = context.manifest.nodes.pop(key, None) node = context.sql_parser.parse_remote(raw_sql, tmp_id) - if not has_jinja(raw_sql): + if not _has_jinja(raw_sql): return node process_node(context.config, context.manifest, node) compiled_node = SqlCompileRunner( @@ -396,7 +352,7 @@ def compile_sql_code(context: DbtProjectContext, raw_sql: str) -> ManifestSQLNod def execute_sql_code(context: DbtProjectContext, raw_sql: str) -> AdapterResponse: """Execute jinja SQL using the context's manifest and adapter.""" - if has_jinja(raw_sql): + if _has_jinja(raw_sql): comp = compile_sql_code(context, raw_sql) sql_to_exec = comp.compiled_code or comp.raw_code else: @@ -406,34 +362,7 @@ def execute_sql_code(context: DbtProjectContext, raw_sql: str) -> AdapterRespons return resp -def filter_models( - context: YamlRefactorContext, -) -> Iterator[tuple[str, ManifestNode | SourceDefinition]]: - """Iterate over the models in the dbt project manifest applying the filter settings.""" - - def f(node: ManifestNode | SourceDefinition) -> bool: - """Closure to filter models based on the context settings.""" - if node.resource_type not in (NodeType.Model, NodeType.Source): - return False - if node.package_name != context.project.config.project_name: - return False - if node.resource_type == NodeType.Model and node.config.materialized == "ephemeral": - return False - if context.settings.models: - if not _is_file_match(node, context.settings.models): - return False - elif context.settings.fqn: - if not _is_fqn_match(node, context.settings.fqn): - return False - return True - - items = chain(context.project.manifest.nodes.items(), context.project.manifest.sources.items()) - for uid, dbt_node in items: - if f(dbt_node): - yield uid, dbt_node - - -def _is_fqn_match(node: ManifestNode | SourceDefinition, fqn_str: str) -> bool: +def _is_fqn_match(node: ResultNode, fqn_str: str) -> bool: """Filter models based on the provided fully qualified name matching on partial segments.""" if not fqn_str: return True @@ -443,7 +372,7 @@ def _is_fqn_match(node: ManifestNode | SourceDefinition, fqn_str: str) -> bool: ) -def _is_file_match(node: ManifestNode | SourceDefinition, paths: list[str]) -> bool: +def _is_file_match(node: ResultNode, paths: list[str]) -> bool: """Check if a node's file path matches any of the provided file paths or names.""" node_path = _get_node_path(node) for model in paths: @@ -459,13 +388,40 @@ def _is_file_match(node: ManifestNode | SourceDefinition, paths: list[str]) -> b return False -def _get_node_path(node: ManifestNode | SourceDefinition) -> Path | None: +def _get_node_path(node: ResultNode) -> Path | None: """Return the path to the node's original file if available.""" if node.original_file_path and hasattr(node, "root_path"): return Path(getattr(node, "root_path"), node.original_file_path).resolve() return None +def filter_models( + context: YamlRefactorContext, +) -> Iterator[tuple[str, ResultNode]]: + """Iterate over the models in the dbt project manifest applying the filter settings.""" + + def f(node: ResultNode) -> bool: + """Closure to filter models based on the context settings.""" + if node.resource_type not in (NodeType.Model, NodeType.Source): + return False + if node.package_name != context.project.config.project_name: + return False + if node.resource_type == NodeType.Model and node.config.materialized == "ephemeral": + return False + if context.settings.models: + if not _is_file_match(node, context.settings.models): + return False + elif context.settings.fqn: + if not _is_fqn_match(node, context.settings.fqn): + return False + return True + + items = chain(context.project.manifest.nodes.items(), context.project.manifest.sources.items()) + for uid, dbt_node in items: + if f(dbt_node): + yield uid, dbt_node + + def normalize_column_name(column: str, credentials_type: str, to_lower: bool) -> str: """Apply case normalization to a column name based on the credentials type.""" if credentials_type == "snowflake" and column.startswith('"') and column.endswith('"'): @@ -477,26 +433,404 @@ def normalize_column_name(column: str, credentials_type: str, to_lower: bool) -> return column +@dataclass +class ColumnData: + """Simple data object for column information""" + + name: str + description: str + data_type: str + + +def _maybe_use_precise_dtype(col: t.Any, settings: YamlRefactorSettings) -> str: + """Use the precise data type if enabled in the settings.""" + if (col.is_numeric() and settings.numeric_precision) or ( + col.is_string() and settings.char_length + ): + return col.data_type + return col.dtype + + +def _catalog_key_for_node(node: ResultNode) -> CatalogKey: + """Make an appropriate catalog key for a dbt node.""" + if node.resource_type == NodeType.Source: + return CatalogKey(node.database, node.schema, getattr(node, "identifier", node.name)) + return CatalogKey(node.database, node.schema, getattr(node, "alias", node.name)) + + +def get_columns_meta_for_key( + context: YamlRefactorContext, key: CatalogKey, output_to_lower: bool +) -> dict[str, ColumnMetadata]: + """Equivalent to get_columns_meta in old code but directly referencing a key, not a node.""" + cased_cols = OrderedDict() + blacklist = context.project.config.vars.to_dict().get("dbt-osmosis", {}).get("_blacklist", []) + catalog = None + if context.settings.catalog_file: + path = Path(context.settings.catalog_file) + if path.is_file(): + catalog = CatalogArtifact.from_dict(json.loads(path.read_text())) + + # Catalog first + if catalog: + cat_objs = {**catalog.nodes, **catalog.sources} + matched = [table for cat_k, table in cat_objs.items() if cat_k.split(".")[-1] == key.name] + if matched: + for col in matched[0].columns.values(): + if any(re.match(p, col.name) for p in blacklist): + continue + cased = normalize_column_name( + col.name, + context.project.config.credentials.type, + output_to_lower, + ) + cased_cols[cased] = ColumnMetadata( + name=cased, + type=col.type, + index=col.index, + comment=col.comment, + ) + return cased_cols + + # Fallback to adapter-based + adapter = context.project.adapter + rel = adapter.get_relation(key.database, key.schema, key.name) + if not rel: + return cased_cols + try: + for col_ in adapter.get_columns_in_relation(rel): + if any(re.match(b, col_.name) for b in blacklist): + continue + cased = normalize_column_name( + col_.name, + context.project.config.credentials.type, + output_to_lower, + ) + dtype = _maybe_use_precise_dtype(col_, context.settings) + cased_cols[cased] = ColumnMetadata( + name=cased, + type=dtype, + index=None, + comment=getattr(col_, "comment", None), + ) + if hasattr(col_, "flatten"): + for exp in col_.flatten(): + if any(re.match(b, exp.name) for b in blacklist): + continue + cased2 = normalize_column_name( + exp.name, + context.project.config.credentials.type, + output_to_lower, + ) + dtype2 = _maybe_use_precise_dtype(exp, context.settings) + cased_cols[cased2] = ColumnMetadata( + name=cased2, + type=dtype2, + index=None, + comment=getattr(exp, "comment", None), + ) + except Exception as ex: + logger.warning(f"Could not introspect columns for {key}: {ex}") + return cased_cols + + +def get_columns_for_key( + context: YamlRefactorContext, key: CatalogKey, output_to_lower: bool +) -> list[str]: + """Equivalent to get_columns in old code; returns just the list of column names.""" + meta = get_columns_meta_for_key(context, key, output_to_lower) + return list(meta.keys()) + + # TODO: more work to do below the fold here -# NOTE: in multithreaded operations, we need to use the thread connection for the adapter -def get_columns_meta( - context: YamlRefactorContext, +_ColumnLevelKnowledge = dict[str, t.Any] +_KnowledgeBase = dict[str, _ColumnLevelKnowledge] + + +def _build_node_ancestor_tree( + manifest: Manifest, + node: ResultNode, + family_tree: dict[str, list[str]] | None = None, + members_found: list[str] | None = None, + depth: int = 0, +) -> dict[str, list[str]]: + """Recursively build dictionary of parents in generational order using a simple DFS algorithm""" + # Set initial values + if family_tree is None: + family_tree = {} + if members_found is None: + members_found = [] + + # If the node has no dependencies, return the family tree as it is + if not hasattr(node, "depends_on"): + return family_tree + + # Iterate over the parents of the node mutating family_tree + for parent in getattr(node.depends_on, "nodes", []): + member = manifest.nodes.get(parent, manifest.sources.get(parent)) + if member and parent not in members_found: + family_tree.setdefault(f"generation_{depth}", []).append(parent) + _ = _build_node_ancestor_tree(manifest, member, family_tree, members_found, depth + 1) + members_found.append(parent) + + return family_tree + + +def _find_first(coll: Iterable[dict[str, t.Any]], predicate: t.Callable[[t.Any], bool]) -> t.Any: + """Find the first item in a container that satisfies a predicate.""" + for item in coll: + if predicate(item): + return item + + +def get_member_yaml(context: YamlRefactorContext, member: ResultNode) -> dict[str, t.Any] | None: + """Get the parsed YAML for a dbt model or source node.""" + project_dir = Path(context.project.config.project_root) + yaml_handler = context.yaml_handler + + if isinstance(member, SourceDefinition): + if not member.original_file_path: + return None + path = project_dir.joinpath(member.original_file_path) + if not path.exists(): + return None + with path.open("r") as f: + parsed_yaml = yaml_handler.load(f) + data: t.Any = parsed_yaml.get("sources", []) + src = _find_first(data, lambda s: s["name"] == member.source_name) + if not src: + return None + tables = src.get("tables", []) + return _find_first(tables, lambda tbl: tbl["name"] == member.name) + + elif isinstance(member, (ModelNode, SeedNode)): + if not member.patch_path: + return None + patch_file = project_dir.joinpath(member.patch_path.split("://")[-1]) + if not patch_file.is_file(): + return None + with patch_file.open("r") as f: + parsed_yaml = yaml_handler.load(f) + section_key = f"{member.resource_type}s" + data = parsed_yaml.get(section_key, []) + return _find_first(data, lambda model: model["name"] == member.name) + + return None + + +def inherit_column_level_knowledge( + context: YamlRefactorContext, family_tree: dict[str, list[str]] +) -> _KnowledgeBase: + """Generate a knowledge base by applying inheritance logic based on the family tree graph.""" + knowledge: _KnowledgeBase = {} + placeholders = context.placeholders + manifest = context.project.manifest + + # If the user wants to use unrendered descriptions + use_unrendered = context.settings.use_unrendered_descriptions + + # We traverse from the last generation to the earliest + # so that the "nearest" ancestor overwrites the older ones. + for gen_name in reversed(family_tree.keys()): + members_in_generation = family_tree[gen_name] + for ancestor_id in members_in_generation: + member = manifest.nodes.get(ancestor_id, manifest.sources.get(ancestor_id)) + if not member: + continue + + member_yaml: dict[str, t.Any] | None = None + if use_unrendered: + member_yaml = get_member_yaml(context, member) + + # For each column in the ancestor + for col_name, col_info in member.columns.items(): + # If we haven't seen this column name yet, seed it with minimal data + _ = knowledge.setdefault( + col_name, + {"progenitor": ancestor_id, "generation": gen_name}, + ) + merged_info = col_info.to_dict() + + # If the description is in placeholders, discard it + if merged_info.get("description", "") in placeholders: + merged_info["description"] = "" + + # If user wants unrendered, read from YAML file if present + if member_yaml and "columns" in member_yaml: + col_in_yaml = _find_first( + member_yaml["columns"], lambda c: c["name"] == merged_info["name"] + ) + if col_in_yaml and "description" in col_in_yaml: + merged_info["description"] = col_in_yaml["description"] + + # Merge tags + existing_tags = knowledge[col_name].get("tags", []) + new_tags = set(merged_info.pop("tags", [])) | set(existing_tags) + if new_tags: + merged_info["tags"] = list(new_tags) + + # Merge meta + existing_meta = knowledge[col_name].get("meta", {}) + combined_meta = {**existing_meta, **merged_info.pop("meta", {})} + if combined_meta: + merged_info["meta"] = combined_meta + + # Now unify + knowledge[col_name].update(merged_info) + + return knowledge + + +def get_node_columns_with_inherited_knowledge( + context: YamlRefactorContext, node: ResultNode +) -> _KnowledgeBase: + """Build a knowledgebase for the node by climbing the ancestor tree and merging column doc info from nearest to farthest ancestors.""" + family_tree = _build_node_ancestor_tree(context.project.manifest, node) + return inherit_column_level_knowledge(context, family_tree) + + +def get_prior_knowledge(knowledge: _KnowledgeBase, column: str) -> _ColumnLevelKnowledge: + """If the user has changed column name's case or prefix, attempt to find the best match among possible variants (lowercase, pascalCase, etc.) + + We sort so that any source/seed is considered first, then models, + and within each group we sort descending by generation. + """ + camelcase: str = re.sub(r"_(.)", lambda m: m.group(1).upper(), column) + pascalcase: str = camelcase[0].upper() + camelcase[1:] if camelcase else camelcase + variants = (column, column.lower(), camelcase, pascalcase) + + def is_source_or_seed(k: _ColumnLevelKnowledge) -> bool: + p = k.get("progenitor", "") + return p.startswith("source") or p.startswith("seed") + + matches: list[_ColumnLevelKnowledge] = [] + for var in variants: + found = knowledge.get(var) + if found is not None: + matches.append(found) + + def _sort_k(k: _ColumnLevelKnowledge) -> tuple[bool, str]: + return (not is_source_or_seed(k), k.get("generation", "")) + + sorted_matches = sorted(matches, key=_sort_k, reverse=True) + return sorted_matches[0] if sorted_matches else {} + + +def merge_knowledge_with_original_knowledge( + prior_knowledge: _ColumnLevelKnowledge, + original_knowledge: _ColumnLevelKnowledge, + add_progenitor_to_meta: bool, + progenitor: str, +) -> _ColumnLevelKnowledge: + """Merge two column level knowledge dictionaries.""" + merged = dict(original_knowledge) + + # Unify tags + if "tags" in prior_knowledge: + prior_tags = set(prior_knowledge["tags"]) + merged_tags = set(merged.get("tags", [])) + merged["tags"] = list(prior_tags | merged_tags) + + # Unify meta + if "meta" in prior_knowledge: + new_meta = {**merged.get("meta", {}), **prior_knowledge["meta"]} + merged["meta"] = new_meta + + # If the user wants the source or seed name in meta, apply it + if add_progenitor_to_meta and progenitor: + merged.setdefault("meta", {}) + merged["meta"]["osmosis_progenitor"] = progenitor + + # If meta says "osmosis_keep_description" => keep the original description + if merged.get("meta", {}).get("osmosis_keep_description"): + # Do nothing + pass + else: + # Otherwise if prior knowledge has a non-empty description, override + if prior_knowledge.get("description"): + merged["description"] = prior_knowledge["description"] + + # Remove empty tags or meta + if merged.get("tags") == []: + merged.pop("tags", None) + if merged.get("meta") == {}: + merged.pop("meta", None) + + return merged + + +def update_undocumented_columns_with_prior_knowledge( + undocumented_columns: Iterable[str], node: ManifestNode, - catalog: CatalogArtifact | None, + yaml_file_model_section: dict[str, t.Any], + knowledge: _KnowledgeBase, + skip_add_tags: bool, + skip_merge_meta: bool, + add_progenitor_to_meta: bool, + add_inheritance_for_specified_keys: Iterable[str] = (), +) -> int: + """For columns that are undocumented, we find prior knowledge in the knowledge dict, merge it with the existing column's knowledge, then assign it to both node and YAML.""" + # Which keys are we allowed to adopt from prior knowledge + inheritables = ["description"] + if not skip_add_tags: + inheritables.append("tags") + if not skip_merge_meta: + inheritables.append("meta") + for k in add_inheritance_for_specified_keys: + if k not in inheritables: + inheritables.append(k) + + changes = 0 + for column in undocumented_columns: + if column not in node.columns: + node.columns[column] = ColumnInfo.from_dict({"name": column}) + original_dict = node.columns[column].to_dict() + + prior = get_prior_knowledge(knowledge, column) + progenitor = t.cast(str, prior.pop("progenitor", "")) + + # Only keep keys we want to inherit + filtered_prior = {kk: vv for kk, vv in prior.items() if kk in inheritables} + + new_knowledge = merge_knowledge_with_original_knowledge( + filtered_prior, + original_dict, + add_progenitor_to_meta, + progenitor, + ) + if new_knowledge == original_dict: + continue + + node.columns[column] = ColumnInfo.from_dict(new_knowledge) + for col_def in yaml_file_model_section.get("columns", []): + if col_def.get("name") == column: + # Only update the keys we are inheriting + for k2 in filtered_prior: + col_def[k2] = new_knowledge.get(k2, col_def.get(k2)) + logger.info( + "[osmosis] Inherited knowledge for column: '%s' from progenitor '%s' in node '%s'", + column, + progenitor, + node.unique_id, + ) + changes += 1 + return changes + + +def get_columns_meta( + context: YamlRefactorContext, node: ManifestNode, catalog: CatalogArtifact | None ) -> dict[str, ColumnMetadata]: - """Get the column metadata for a node from the catalog or the adapter.""" cased_cols = OrderedDict() blacklist = context.project.config.vars.get("dbt-osmosis", {}).get("_blacklist", []) key = _catalog_key_for_node(node) if catalog: cat_objs = {**catalog.nodes, **catalog.sources} - matched = [v for k, v in cat_objs.items() if k.split(".")[-1] == key.name] + matched = [table for k, table in cat_objs.items() if k.split(".")[-1] == key.name] if matched: for col in matched[0].columns.values(): - if any(re.match(p, col.name) for p in blacklist): + if any(re.match(b, col.name) for b in blacklist): continue cased = normalize_column_name( col.name, @@ -511,12 +845,12 @@ def get_columns_meta( ) return cased_cols - rel = context.project.adapter.get_relation(key.database, key.schema, key.name) + adapter = context.project.adapter + rel = adapter.get_relation(key.database, key.schema, key.name) if not rel: return cased_cols try: - col_objs = context.project.adapter.get_columns_in_relation(rel) - for col_ in col_objs: + for col_ in adapter.get_columns_in_relation(rel): if any(re.match(b, col_.name) for b in blacklist): continue cased = normalize_column_name( @@ -547,29 +881,11 @@ def get_columns_meta( index=None, comment=getattr(exp, "comment", None), ) - except Exception as exc: - logger.warning(f"Could not introspect columns for {key}: {exc}") - + except Exception as ex: + logger.warning(f"Could not introspect columns for {key}: {ex}") return cased_cols -def _maybe_use_precise_dtype(col: t.Any, settings: YamlRefactorSettings) -> str: - """Use the precise data type if enabled in the settings.""" - if (col.is_numeric() and settings.numeric_precision) or ( - col.is_string() and settings.char_length - ): - return col.data_type - return col.dtype - - -def _catalog_key_for_node(node: ManifestNode) -> CatalogKey: - """Make an appropriate catalog key for a dbt node.""" - # TODO: pyright seems to think something is wrong below - if node.resource_type == NodeType.Source: - return CatalogKey(node.database, node.schema, getattr(node, "identifier", node.name)) - return CatalogKey(node.database, node.schema, getattr(node, "alias", node.name)) - - # NOTE: usage example of the more FP style module below From 87157a35a3ea8538439a2cd45cb7483caa7e8d29 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Sun, 29 Dec 2024 22:00:07 -0700 Subject: [PATCH 08/46] wip: continue working on functional rewrite --- pyproject.toml | 4 +- src/dbt_osmosis/core/osmosis.py | 677 ++++++++++++++++++++++++-------- 2 files changed, 515 insertions(+), 166 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 6fe78e09..7d01d01d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,7 +21,7 @@ documentation = "https://github.com/z3z1ma/dbt-osmosis" repository = "https://github.com/z3z1ma/dbt-osmosis" [tool.poetry.dependencies] -python = ">=3.9,<3.9.7 || >3.9.7,<3.12" +python = ">=3.9,<3.9.7 || >3.9.7,<3.13" click = ">7" dbt-core = ">=1.8,<1.10" "ruamel.yaml" = ">=0.17" @@ -64,7 +64,7 @@ build-backend = "poetry.core.masonry.api" [tool.black] # https://black.readthedocs.io/en/stable/usage_and_configuration/the_basics.html#configuration-via-a-file line-length = 100 -target-version = ["py38", "py39", "py310", "py311"] +target-version = ["py39", "py310", "py311"] preview = true [tool.isort] # https://pycqa.github.io/isort/docs/configuration/options.html diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index 863a7090..07211547 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -5,6 +5,7 @@ import argparse import json import logging +import os import re import threading import time @@ -12,17 +13,21 @@ import uuid from collections import OrderedDict from collections.abc import Iterable, Iterator -from concurrent.futures import ThreadPoolExecutor +from concurrent.futures import Future, ThreadPoolExecutor, wait from dataclasses import dataclass, field from itertools import chain from pathlib import Path import dbt.flags as dbt_flags +import rich.logging import ruamel.yaml +from dbt.adapters.base.column import Column as BaseColumn from dbt.adapters.base.impl import BaseAdapter +from dbt.adapters.base.relation import BaseRelation from dbt.adapters.contracts.connection import AdapterResponse -from dbt.adapters.factory import get_adapter_class_by_name +from dbt.adapters.factory import get_adapter, register_adapter from dbt.config.runtime import RuntimeConfig +from dbt.context.providers import generate_runtime_macro_context from dbt.contracts.graph.manifest import Manifest from dbt.contracts.graph.nodes import ( ColumnInfo, @@ -34,17 +39,25 @@ SourceDefinition, ) from dbt.contracts.results import CatalogArtifact, CatalogKey, CatalogTable, ColumnMetadata +from dbt.mp_context import get_mp_context from dbt.node_types import NodeType from dbt.parser.manifest import ManifestLoader, process_node from dbt.parser.sql import SqlBlockParser, SqlMacroParser from dbt.task.sql import SqlCompileRunner from dbt.tracking import disable_tracking +from dbt_common.clients.system import get_env +from dbt_common.context import set_invocation_context disable_tracking() +logging.basicConfig(level=logging.DEBUG, handlers=[rich.logging.RichHandler()]) +logger = logging.getLogger("dbt-osmosis") + +T = t.TypeVar("T") EMPTY_STRING = "" -logger = logging.getLogger("dbt-osmosis") +SKIP_PATTERNS = "_column_ignore_patterns" +"""This key is used to skip certain column name patterns in dbt-osmosis""" def discover_project_dir() -> str: @@ -73,25 +86,24 @@ class DbtConfiguration: profile: str | None = None threads: int = 1 single_threaded: bool = True - which: str = "" - debug: bool = False _vars: str | dict[str, t.Any] = field(default_factory=dict, init=False) def __post_init__(self) -> None: + set_invocation_context(get_env()) if self.threads != 1: self.single_threaded = False @property - def vars(self) -> str: - if isinstance(self._vars, dict): - return json.dumps(self._vars) + def vars(self) -> dict[str, t.Any]: + if isinstance(self._vars, str): + return json.loads(self._vars) return self._vars @vars.setter def vars(self, value: t.Any) -> None: if not isinstance(value, (str, dict)): - raise ValueError("vars must be a string or dict") + raise ValueError("DbtConfiguration.vars must be a string or dict") self._vars = value @@ -104,9 +116,10 @@ def config_to_namespace(cfg: DbtConfiguration) -> argparse.Namespace: profile=cfg.profile, threads=cfg.threads, single_threaded=cfg.single_threaded, - which=cfg.which, vars=cfg.vars, - DEBUG=cfg.debug, + which="parse", + DEBUG=False, + REQUIRE_RESOURCE_NAMES_WITHOUT_SPACES=False, ) @@ -150,7 +163,7 @@ class SchemaFileMigration: output: dict[str, t.Any] = field( default_factory=lambda: {"version": 2, "models": [], "sources": []} ) - supersede: dict[Path, list[str]] = field(default_factory=dict) + supersede: dict[Path, list[ResultNode]] = field(default_factory=dict) class MissingOsmosisConfig(Exception): @@ -197,6 +210,7 @@ class DbtProjectContext: for re-use across multiple operations in long-running processes. (is the idea) """ + args: argparse.Namespace config: RuntimeConfig manifest: Manifest sql_parser: SqlBlockParser @@ -231,22 +245,11 @@ def manifest_mutex(self) -> threading.Lock: def instantiate_adapter(runtime_config: RuntimeConfig) -> BaseAdapter: """Instantiate a dbt adapter based on the runtime configuration.""" - adapter_cls = get_adapter_class_by_name(runtime_config.credentials.type) - if not adapter_cls: - raise RuntimeError( - f"No valid adapter class found for credentials type: {runtime_config.credentials.type}" - ) - - # NOTE: this exists to patch over an API change in dbt core at some point I don't remember - try: - adapter = adapter_cls(runtime_config) - except TypeError: - from dbt.mp_context import get_mp_context - - adapter = adapter_cls(runtime_config, get_mp_context()) # pyright: ignore[reportCallIssue] - + register_adapter(runtime_config, get_mp_context()) + adapter = get_adapter(runtime_config) + adapter.set_macro_context_generator(t.cast(t.Any, generate_runtime_macro_context)) adapter.connections.set_connection_name("dbt-osmosis") - return adapter + return t.cast(BaseAdapter, t.cast(t.Any, adapter)) def create_dbt_project_context(config: DbtConfiguration) -> DbtProjectContext: @@ -255,17 +258,22 @@ def create_dbt_project_context(config: DbtConfiguration) -> DbtProjectContext: dbt_flags.set_from_args(args, args) runtime_cfg = RuntimeConfig.from_args(args) - loader = ManifestLoader(runtime_cfg, runtime_cfg.load_dependencies()) + adapter = instantiate_adapter(runtime_cfg) + setattr(runtime_cfg, "adapter", adapter) + loader = ManifestLoader( + runtime_cfg, + runtime_cfg.load_dependencies(), + ) manifest = loader.load() manifest.build_flat_graph() - adapter = instantiate_adapter(runtime_cfg) adapter.set_macro_resolver(manifest) sql_parser = SqlBlockParser(runtime_cfg, manifest, runtime_cfg) macro_parser = SqlMacroParser(runtime_cfg, manifest) return DbtProjectContext( + args=args, config=runtime_cfg, manifest=manifest, sql_parser=sql_parser, @@ -273,6 +281,14 @@ def create_dbt_project_context(config: DbtConfiguration) -> DbtProjectContext: ) +def reload_manifest(context: DbtProjectContext) -> None: + """Reload the dbt project manifest. Useful for picking up mutations.""" + loader = ManifestLoader(context.config, context.config.load_dependencies()) + manifest = loader.load() + manifest.build_flat_graph() + context.manifest = manifest + + @dataclass class YamlRefactorContext: """A data object that includes references to: @@ -286,11 +302,10 @@ class YamlRefactorContext: """ project: DbtProjectContext - settings: YamlRefactorSettings - + settings: YamlRefactorSettings = field(default_factory=YamlRefactorSettings) pool: ThreadPoolExecutor = field(default_factory=ThreadPoolExecutor) - yaml_handler: ruamel.yaml.YAML = field(default_factory=create_yaml_instance) + yaml_handler_lock: threading.Lock = field(default_factory=threading.Lock) placeholders: tuple[str, ...] = ( EMPTY_STRING, @@ -306,6 +321,29 @@ def register_mutations(self, count: int) -> None: """Increment the mutation count by a specified amount.""" self._mutation_count += count + @property + def mutation_count(self) -> int: + """Read only property to access the mutation count.""" + return self._mutation_count + + @property + def mutated(self) -> bool: + """Check if the context has performed any mutations.""" + return self._mutation_count > 0 + + @property + def source_definitions(self) -> dict[str, t.Any]: + """The source definitions from the dbt project config.""" + defs = self.project.config.vars.to_dict().get("dbt-osmosis", {}).copy() + defs.pop(SKIP_PATTERNS, None) + return defs + + @property + def skip_patterns(self) -> list[str]: + """The column name skip patterns from the dbt project config.""" + defs = self.project.config.vars.to_dict().get("dbt-osmosis", {}).copy() + return defs.pop(SKIP_PATTERNS, []) + def __post_init__(self) -> None: if EMPTY_STRING not in self.placeholders: self.placeholders = (EMPTY_STRING, *self.placeholders) @@ -422,7 +460,7 @@ def f(node: ResultNode) -> bool: yield uid, dbt_node -def normalize_column_name(column: str, credentials_type: str, to_lower: bool) -> str: +def normalize_column_name(column: str, credentials_type: str, to_lower: bool = False) -> str: """Apply case normalization to a column name based on the credentials type.""" if credentials_type == "snowflake" and column.startswith('"') and column.endswith('"'): return column @@ -451,94 +489,464 @@ def _maybe_use_precise_dtype(col: t.Any, settings: YamlRefactorSettings) -> str: return col.dtype -def _catalog_key_for_node(node: ResultNode) -> CatalogKey: +def _get_catalog_key_for_node(node: ResultNode) -> CatalogKey: """Make an appropriate catalog key for a dbt node.""" if node.resource_type == NodeType.Source: - return CatalogKey(node.database, node.schema, getattr(node, "identifier", node.name)) - return CatalogKey(node.database, node.schema, getattr(node, "alias", node.name)) + return CatalogKey(node.database, node.schema, node.identifier or node.name) + return CatalogKey(node.database, node.schema, node.alias or node.name) -def get_columns_meta_for_key( - context: YamlRefactorContext, key: CatalogKey, output_to_lower: bool -) -> dict[str, ColumnMetadata]: +def get_columns(context: YamlRefactorContext, key: CatalogKey) -> dict[str, ColumnMetadata]: """Equivalent to get_columns_meta in old code but directly referencing a key, not a node.""" - cased_cols = OrderedDict() - blacklist = context.project.config.vars.to_dict().get("dbt-osmosis", {}).get("_blacklist", []) + normalized_cols = OrderedDict() + skip_patterns = context.skip_patterns catalog = None if context.settings.catalog_file: + # TODO: no reason to re-read this file on every call path = Path(context.settings.catalog_file) if path.is_file(): catalog = CatalogArtifact.from_dict(json.loads(path.read_text())) - # Catalog first if catalog: - cat_objs = {**catalog.nodes, **catalog.sources} - matched = [table for cat_k, table in cat_objs.items() if cat_k.split(".")[-1] == key.name] - if matched: - for col in matched[0].columns.values(): - if any(re.match(p, col.name) for p in blacklist): + # TODO: no reason to dict unpack every call here... + catalog_candidates = {**catalog.nodes, **catalog.sources} + catalog_entry = _find_first(catalog_candidates.values(), lambda c: c.key() == key) + if catalog_entry: + for column in catalog_entry.columns.values(): + if any(re.match(p, column.name) for p in skip_patterns): continue - cased = normalize_column_name( - col.name, - context.project.config.credentials.type, - output_to_lower, + normalized = normalize_column_name( + column.name, context.project.config.credentials.type ) - cased_cols[cased] = ColumnMetadata( - name=cased, - type=col.type, - index=col.index, - comment=col.comment, + normalized_cols[normalized] = ColumnMetadata( + name=normalized, type=column.type, index=column.index, comment=column.comment ) - return cased_cols + return normalized_cols + + relation: BaseRelation | None = context.project.adapter.get_relation( + key.database, + key.schema, + key.name, + ) + if not relation: + return normalized_cols - # Fallback to adapter-based - adapter = context.project.adapter - rel = adapter.get_relation(key.database, key.schema, key.name) - if not rel: - return cased_cols try: - for col_ in adapter.get_columns_in_relation(rel): - if any(re.match(b, col_.name) for b in blacklist): + # TODO: the following should be a recursive function to handle nested columns, probably + for index, column in enumerate( + t.cast(Iterable[BaseColumn], context.project.adapter.get_columns_in_relation(relation)) + ): + if any(re.match(b, column.name) for b in skip_patterns): continue - cased = normalize_column_name( - col_.name, - context.project.config.credentials.type, - output_to_lower, + normalized = normalize_column_name(column.name, context.project.config.credentials.type) + dtype = _maybe_use_precise_dtype(column, context.settings) + normalized_cols[normalized] = ColumnMetadata( + name=normalized, type=dtype, index=index, comment=getattr(column, "comment", None) ) - dtype = _maybe_use_precise_dtype(col_, context.settings) - cased_cols[cased] = ColumnMetadata( - name=cased, - type=dtype, - index=None, - comment=getattr(col_, "comment", None), - ) - if hasattr(col_, "flatten"): - for exp in col_.flatten(): - if any(re.match(b, exp.name) for b in blacklist): + if hasattr(column, "flatten"): + for _, subcolumn in enumerate( + t.cast(Iterable[BaseColumn], getattr(column, "flatten")()) + ): + if any(re.match(b, subcolumn.name) for b in skip_patterns): continue - cased2 = normalize_column_name( - exp.name, - context.project.config.credentials.type, - output_to_lower, + normalized = normalize_column_name( + subcolumn.name, context.project.config.credentials.type ) - dtype2 = _maybe_use_precise_dtype(exp, context.settings) - cased_cols[cased2] = ColumnMetadata( - name=cased2, - type=dtype2, - index=None, - comment=getattr(exp, "comment", None), + dtype = _maybe_use_precise_dtype(subcolumn, context.settings) + normalized_cols[normalized] = ColumnMetadata( + name=normalized, + type=dtype, + index=index, + comment=getattr(subcolumn, "comment", None), ) except Exception as ex: logger.warning(f"Could not introspect columns for {key}: {ex}") - return cased_cols + return normalized_cols + + +def create_missing_source_yamls(context: YamlRefactorContext) -> None: + """Create source files for sources defined in the dbt_project.yml dbt-osmosis var which don't exist as nodes. + + This is a useful preprocessing step to ensure that all sources are represented in the dbt project manifest. We + do not have rich node information for non-existent sources, hence the alternative codepath here to bootstrap them. + """ + database: str = context.project.config.credentials.database + + did_side_effect: bool = False + for source, spec in context.source_definitions.items(): + if isinstance(spec, str): + schema = source + src_yaml_path = spec + elif isinstance(spec, dict): + database = t.cast(str, spec.get("database", database)) + schema = t.cast(str, spec.get("schema", source)) + src_yaml_path = t.cast(str, spec["path"]) + else: + continue + + if _find_first( + context.project.manifest.sources.values(), lambda s: s.source_name == source + ): + continue + + src_yaml_path = Path( + context.project.config.project_root, + context.project.config.model_paths[0], + src_yaml_path.lstrip(os.sep), + ) + + def _describe(rel: BaseRelation) -> dict[str, t.Any]: + columns = [] + for c in t.cast( + Iterable[BaseColumn], context.project.adapter.get_columns_in_relation(rel) + ): + if any(re.match(b, c.name) for b in context.skip_patterns): + continue + # NOTE: we should be consistent about recursively flattening structs + normalized_column = normalize_column_name( + c.name, context.project.config.credentials.type + ) + dt = c.dtype.lower() if context.settings.output_to_lower else c.dtype + columns.append({"name": normalized_column, "description": "", "data_type": dt}) + return {"name": rel.identifier, "description": "", "columns": columns} + + tables = [ + schema + for schema in context.pool.map( + _describe, + context.project.adapter.list_relations(database=database, schema=schema), + ) + ] + source = {"name": source, "database": database, "schema": schema, "tables": tables} + + src_yaml_path.parent.mkdir(parents=True, exist_ok=True) + with src_yaml_path.open("w") as f: + logger.info(f"Injecting source {source} => {src_yaml_path}") + context.yaml_handler.dump({"version": 2, "sources": [source]}, f) + + did_side_effect = True + context.register_mutations(1) + + if did_side_effect: + logger.info("Reloading project to pick up new sources.") + reload_manifest(context.project) + + +def _get_yaml_path_template(context: YamlRefactorContext, node: ResultNode) -> str | None: + """Get the yaml path template for a dbt model or source node.""" + if node.resource_type == NodeType.Source: + def_or_path = context.source_definitions.get(node.source_name) + if isinstance(def_or_path, dict): + return def_or_path.get("path") + return def_or_path + path_template = node.config.extra.get("dbt-osmosis", node.unrendered_config.get("dbt-osmosis")) + if not path_template: + raise MissingOsmosisConfig( + f"Config key `dbt-osmosis: ` not set for model {node.name}" + ) + return path_template + + +def get_current_yaml_path(context: YamlRefactorContext, node: ResultNode) -> Path | None: + """Get the current yaml path for a dbt model or source node.""" + if node.resource_type == NodeType.Model and getattr(node, "patch_path", None): + return Path(context.project.config.project_root).joinpath( + t.cast(str, node.patch_path).partition("://")[-1] + ) + if node.resource_type == NodeType.Source and hasattr(node, "source_name"): + return Path(context.project.config.project_root, node.path) + return None -def get_columns_for_key( - context: YamlRefactorContext, key: CatalogKey, output_to_lower: bool -) -> list[str]: - """Equivalent to get_columns in old code; returns just the list of column names.""" - meta = get_columns_meta_for_key(context, key, output_to_lower) - return list(meta.keys()) + +def get_target_yaml_path(context: YamlRefactorContext, node: ResultNode) -> Path: + """Get the target yaml path for a dbt model or source node.""" + tpl = _get_yaml_path_template(context, node) + if not tpl: + return Path(context.project.config.project_root, node.original_file_path) + + rendered = tpl.format(node=node, model=node.name, parent=node.fqn[-2]) + segments: list[Path | str] = [] + + if node.resource_type == NodeType.Source: + segments.append(context.project.config.model_paths[0]) + else: + segments.append(Path(node.original_file_path).parent) + + if not (rendered.endswith(".yml") or rendered.endswith(".yaml")): + rendered += ".yml" + segments.append(rendered) + + return Path(context.project.config.project_root, *segments) + + +def build_schema_folder_mapping(context: YamlRefactorContext) -> dict[str, SchemaFileLocation]: + """Build a mapping of dbt model and source nodes to their current and target yaml paths.""" + create_missing_source_yamls(context) + folder_map: dict[str, SchemaFileLocation] = {} + for uid, node in filter_models(context): + current_path = get_current_yaml_path(context, node) + folder_map[uid] = SchemaFileLocation( + target=get_target_yaml_path(context, node).resolve(), + current=current_path.resolve() if current_path else None, + node_type=node.resource_type, + ) + return folder_map + + +def generate_minimal_yaml_data(context: YamlRefactorContext, node: ResultNode) -> dict[str, t.Any]: + """Get the minimal model yaml data for a dbt model node. (operating under the assumption this yaml probably does not exist yet)""" + return { + "name": node.name, + "description": node.description or "", + "columns": [ + { + "name": name.lower() if context.settings.output_to_lower else name, + "description": meta.comment or "", + } + for name, meta in get_columns(context, _get_catalog_key_for_node(node)).items() + ], + } + + +def augment_existing_yaml_data( + context: YamlRefactorContext, yaml_section: dict[str, t.Any], node: ResultNode +) -> dict[str, t.Any]: + """Mutate an existing yaml section with additional column information.""" + existing_cols = [c["name"] for c in yaml_section.get("columns", [])] + db_cols = get_columns(context, _get_catalog_key_for_node(node)) + new_cols = [ + c for n, c in db_cols.items() if n.lower() not in (e.lower() for e in existing_cols) + ] + for column in new_cols: + yaml_section.setdefault("columns", []).append( + {"name": column.name, "description": column.comment or ""} + ) + logger.info(f"Injecting column {column.name} into {node.unique_id}") + return yaml_section + + +def _draft_structure_for_node( + context: YamlRefactorContext, + yaml_loc: SchemaFileLocation, + uid: str, + blueprint: dict[Path, SchemaFileMigration], + bp_mutex: threading.Lock, +) -> None: + """Draft a structure update plan for a dbt model or source node.""" + with bp_mutex: + if yaml_loc.target not in blueprint: + blueprint[yaml_loc.target] = SchemaFileMigration() + + node = ( + context.project.manifest.nodes[uid] + if yaml_loc.node_type == NodeType.Model + else context.project.manifest.sources[uid] + ) + + if yaml_loc.current is None: + if yaml_loc.node_type == NodeType.Model: + with bp_mutex: + blueprint[yaml_loc.target].output["models"].append( + generate_minimal_yaml_data(context, node) + ) + return + + with context.yaml_handler_lock: + existing_doc = context.yaml_handler.load(yaml_loc.current) + + if yaml_loc.node_type == NodeType.Model: + assert isinstance(node, ModelNode) + for yaml_data in existing_doc.get("models", []): + if yaml_data["name"] == node.name: + _ = augment_existing_yaml_data(context, t.cast(dict[str, t.Any], yaml_data), node) + with bp_mutex: + blueprint[yaml_loc.target].output["models"].append(yaml_data) + blueprint[yaml_loc.target].supersede.setdefault( + yaml_loc.current, + [], + ).append(node) + break + else: + assert isinstance(node, SourceDefinition) + for source in existing_doc.get("sources", []): + if source["name"] == node.source_name: + for yaml_data in source["tables"]: + if yaml_data["name"] == node.name: + _ = augment_existing_yaml_data( + context, t.cast(dict[str, t.Any], yaml_data), node + ) + with bp_mutex: + if not any( + s["name"] == node.source_name + for s in blueprint[yaml_loc.target].output["sources"] + ): + blueprint[yaml_loc.target].output["sources"].append(source) + for existing_sources in blueprint[yaml_loc.target].output["sources"]: + if existing_sources["name"] == node.source_name: + for existing_tables in existing_sources["tables"]: + if existing_tables["name"] == node.name: + existing_tables.update(yaml_data) + break + blueprint[yaml_loc.target].supersede.setdefault( + yaml_loc.current, [] + ).append(node) + break + + +def draft_project_structure_update_plan( + context: YamlRefactorContext, +) -> dict[Path, SchemaFileMigration]: + """Draft a structure update plan for the dbt project.""" + blueprint: dict[Path, SchemaFileMigration] = {} + bp_mutex = threading.Lock() + logger.info("Building structure update plan.") + folder_map = build_schema_folder_mapping(context) + futs: list[Future[None]] = [] + for uid, schema_loc in folder_map.items(): + if not schema_loc.is_valid: + futs.append( + context.pool.submit( + _draft_structure_for_node, context, schema_loc, uid, blueprint, bp_mutex + ) + ) + _ = wait(futs) + return blueprint + + +def pretty_print_restructure_plan(blueprint: dict[Path, SchemaFileMigration]) -> None: + """Pretty print the restructure plan for the dbt project. (intended for rich.console)""" + import pprint + + summary = [] + for plan_path, migration_obj in blueprint.items(): + if not migration_obj.supersede: + summary.append((["CREATE"], "->", plan_path.name)) + else: + files_superseded = [p.name for p in migration_obj.supersede.keys()] or ["CREATE"] + summary.append((files_superseded, "->", plan_path.name)) + + # logger.info(summary) + pprint.pprint(t.cast(list[t.Any], summary)) + + +def cleanup_blueprint( + blueprint: dict[Path, SchemaFileMigration], +) -> dict[Path, SchemaFileMigration]: + """Cleanup the blueprint by removing empty models and sources, mutating it in place.""" + for path_key in list(blueprint.keys()): + out_dict = blueprint[path_key].output + if "models" in out_dict and not out_dict["models"]: + del out_dict["models"] + if "sources" in out_dict and not out_dict["sources"]: + del out_dict["sources"] + if not out_dict.get("models") and not out_dict.get("sources"): + del blueprint[path_key] + return blueprint + + +def commit_project_restructure_to_disk( + context: YamlRefactorContext, + blueprint: dict[Path, SchemaFileMigration] | None = None, +) -> int: + if not blueprint: + blueprint = draft_project_structure_update_plan(context) + + blueprint = cleanup_blueprint(blueprint) + if not blueprint: + logger.info("Project structure is already conformed.") + return 0 + + pretty_print_restructure_plan(blueprint) + change_offset = context.mutation_count + + for target, struct in blueprint.items(): + if not target.exists(): + logger.info(f"Creating schema file {target}") + + if not context.settings.dry_run: + target.parent.mkdir(parents=True, exist_ok=True) + target.touch() + + context.yaml_handler.dump(struct.output, target) + context.register_mutations(1) + else: + logger.info(f"Updating schema file {target}") + + existing: dict[str, t.Any] = context.yaml_handler.load(target) + if not existing: + existing = {"version": 2} + + if "version" not in existing: + existing["version"] = 2 + if "models" in struct.output: + existing.setdefault("models", []).extend(struct.output["models"]) + if "sources" in struct.output: + existing.setdefault("sources", []).extend(struct.output["sources"]) + + if not context.settings.dry_run: + context.yaml_handler.dump(existing, target) + context.register_mutations(1) + + for mut_path, nodes in struct.supersede.items(): + mut_schema = context.yaml_handler.load(mut_path) + + to_remove_models = {n.name for n in nodes if n.resource_type == NodeType.Model} + to_remove_sources = { + (n.source_name, n.name) for n in nodes if n.resource_type == NodeType.Source + } + + keep: list[t.Any] = [] + for model in mut_schema.get("models", []): + if model["name"] not in to_remove_models: + keep.append(model) + mut_schema["models"] = keep + + keep_sources: list[t.Any] = [] + for source in mut_schema.get("sources", []): + keep = [] + for table in source.get("tables", []): + if (source["name"], table["name"]) not in to_remove_sources: + keep.append(table) + if keep: # At least one table remains + source["tables"] = keep + keep_sources.append(source) + mut_schema["sources"] = keep_sources + + if not mut_schema.get("models") and not mut_schema.get("sources"): + logger.info(f"Superseding entire file {mut_path}") + if not context.settings.dry_run: + mut_path.unlink(missing_ok=True) + if mut_path.parent.exists() and not any(mut_path.parent.iterdir()): + mut_path.parent.rmdir() + else: + if not context.settings.dry_run: + context.yaml_handler.dump(t.cast(dict[str, t.Any], mut_schema), mut_path) + context.register_mutations(1) + logger.info(f"Migrated doc from {mut_path} -> {target}") + + return context.mutation_count - change_offset + + +def propagate_documentation_downstream( + context: YamlRefactorContext, force_inheritance: bool = False +) -> None: + folder_map = build_schema_folder_mapping(context) + futures = [] + with context.project.adapter.connection_named("dbt-osmosis"): + for unique_id, node in filter_models(context): + futures.append( + context.pool.submit( + _run_model_doc_sync, + context, + unique_id, + node, + folder_map, + force_inheritance, + output_to_lower, + ) + ) + wait(futures) # TODO: more work to do below the fold here @@ -577,7 +985,7 @@ def _build_node_ancestor_tree( return family_tree -def _find_first(coll: Iterable[dict[str, t.Any]], predicate: t.Callable[[t.Any], bool]) -> t.Any: +def _find_first(coll: Iterable[T], predicate: t.Callable[[T], bool]) -> T | None: """Find the first item in a container that satisfies a predicate.""" for item in coll: if predicate(item): @@ -818,74 +1226,6 @@ def update_undocumented_columns_with_prior_knowledge( return changes -def get_columns_meta( - context: YamlRefactorContext, node: ManifestNode, catalog: CatalogArtifact | None -) -> dict[str, ColumnMetadata]: - cased_cols = OrderedDict() - blacklist = context.project.config.vars.get("dbt-osmosis", {}).get("_blacklist", []) - - key = _catalog_key_for_node(node) - if catalog: - cat_objs = {**catalog.nodes, **catalog.sources} - matched = [table for k, table in cat_objs.items() if k.split(".")[-1] == key.name] - if matched: - for col in matched[0].columns.values(): - if any(re.match(b, col.name) for b in blacklist): - continue - cased = normalize_column_name( - col.name, - context.project.config.credentials.type, - context.settings.output_to_lower, - ) - cased_cols[cased] = ColumnMetadata( - name=cased, - type=col.type, - index=col.index, - comment=col.comment, - ) - return cased_cols - - adapter = context.project.adapter - rel = adapter.get_relation(key.database, key.schema, key.name) - if not rel: - return cased_cols - try: - for col_ in adapter.get_columns_in_relation(rel): - if any(re.match(b, col_.name) for b in blacklist): - continue - cased = normalize_column_name( - col_.name, - context.project.config.credentials.type, - context.settings.output_to_lower, - ) - dtype = _maybe_use_precise_dtype(col_, context.settings) - cased_cols[cased] = ColumnMetadata( - name=cased, - type=dtype, - index=None, - comment=getattr(col_, "comment", None), - ) - if hasattr(col_, "flatten"): - for exp in col_.flatten(): - if any(re.match(b, exp.name) for b in blacklist): - continue - cased2 = normalize_column_name( - exp.name, - context.project.config.credentials.type, - context.settings.output_to_lower, - ) - dtype2 = _maybe_use_precise_dtype(exp, context.settings) - cased_cols[cased2] = ColumnMetadata( - name=cased2, - type=dtype2, - index=None, - comment=getattr(exp, "comment", None), - ) - except Exception as ex: - logger.warning(f"Could not introspect columns for {key}: {ex}") - return cased_cols - - # NOTE: usage example of the more FP style module below @@ -899,3 +1239,12 @@ def run_example_compilation_flow() -> None: resp = execute_sql_code(proj_ctx, "select '{{ 1+2 }}' as col") print("Resp =>", resp) + + +if __name__ == "__main__": + c = DbtConfiguration(project_dir="demo_duckdb", profiles_dir="demo_duckdb") + c.vars = {"dbt-osmosis": {}} + project = create_dbt_project_context(c) + yaml_context = YamlRefactorContext(project) + plan = draft_project_structure_update_plan(yaml_context) + _ = commit_project_restructure_to_disk(yaml_context, plan) From 42d2ffb940412c7be3a23992b0a10bc4adb79841 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Mon, 30 Dec 2024 00:21:28 -0700 Subject: [PATCH 09/46] wip: continue working on functional rewrite --- src/dbt_osmosis/core/osmosis.py | 442 +++++++++++++++----------------- 1 file changed, 210 insertions(+), 232 deletions(-) diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index 07211547..9e4cc4e2 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -13,7 +13,7 @@ import uuid from collections import OrderedDict from collections.abc import Iterable, Iterator -from concurrent.futures import Future, ThreadPoolExecutor, wait +from concurrent.futures import FIRST_EXCEPTION, Future, ThreadPoolExecutor, wait from dataclasses import dataclass, field from itertools import chain from pathlib import Path @@ -49,8 +49,10 @@ from dbt_common.context import set_invocation_context disable_tracking() -logging.basicConfig(level=logging.DEBUG, handlers=[rich.logging.RichHandler()]) -logger = logging.getLogger("dbt-osmosis") + +logger = logging.getLogger(__file__) +logger.setLevel(logging.DEBUG) +logger.addHandler(rich.logging.RichHandler(level=logging.DEBUG)) T = t.TypeVar("T") @@ -166,6 +168,26 @@ class SchemaFileMigration: supersede: dict[Path, list[ResultNode]] = field(default_factory=dict) +@dataclass +class RestructureOperation: + """Represents a single operation to perform on a YAML file. + + This might be CREATE, UPDATE, SUPERSEDE, etc. In a more advanced approach, + we might unify multiple steps under a single operation with sub-operations. + """ + + file_path: Path + content: dict[str, t.Any] + superseded_paths: dict[Path, list[ResultNode]] = field(default_factory=dict) + + +@dataclass +class RestructureDeltaPlan: + """Stores all the operations needed to restructure the project.""" + + operations: list[RestructureOperation] = field(default_factory=list) + + class MissingOsmosisConfig(Exception): """Raised when an osmosis configuration is missing.""" @@ -286,6 +308,7 @@ def reload_manifest(context: DbtProjectContext) -> None: loader = ManifestLoader(context.config, context.config.load_dependencies()) manifest = loader.load() manifest.build_flat_graph() + context.adapter.set_macro_resolver(manifest) context.manifest = manifest @@ -471,15 +494,6 @@ def normalize_column_name(column: str, credentials_type: str, to_lower: bool = F return column -@dataclass -class ColumnData: - """Simple data object for column information""" - - name: str - description: str - data_type: str - - def _maybe_use_precise_dtype(col: t.Any, settings: YamlRefactorSettings) -> str: """Use the precise data type if enabled in the settings.""" if (col.is_numeric() and settings.numeric_precision) or ( @@ -680,258 +694,217 @@ def get_target_yaml_path(context: YamlRefactorContext, node: ResultNode) -> Path return Path(context.project.config.project_root, *segments) -def build_schema_folder_mapping(context: YamlRefactorContext) -> dict[str, SchemaFileLocation]: +def build_yaml_file_mapping( + context: YamlRefactorContext, create_missing_sources: bool = True +) -> dict[str, SchemaFileLocation]: """Build a mapping of dbt model and source nodes to their current and target yaml paths.""" - create_missing_source_yamls(context) - folder_map: dict[str, SchemaFileLocation] = {} + if create_missing_sources: + create_missing_source_yamls(context) + out_map: dict[str, SchemaFileLocation] = {} for uid, node in filter_models(context): current_path = get_current_yaml_path(context, node) - folder_map[uid] = SchemaFileLocation( + out_map[uid] = SchemaFileLocation( target=get_target_yaml_path(context, node).resolve(), current=current_path.resolve() if current_path else None, node_type=node.resource_type, ) - return folder_map - - -def generate_minimal_yaml_data(context: YamlRefactorContext, node: ResultNode) -> dict[str, t.Any]: - """Get the minimal model yaml data for a dbt model node. (operating under the assumption this yaml probably does not exist yet)""" - return { - "name": node.name, - "description": node.description or "", - "columns": [ - { - "name": name.lower() if context.settings.output_to_lower else name, - "description": meta.comment or "", - } - for name, meta in get_columns(context, _get_catalog_key_for_node(node)).items() - ], - } - + return out_map -def augment_existing_yaml_data( - context: YamlRefactorContext, yaml_section: dict[str, t.Any], node: ResultNode -) -> dict[str, t.Any]: - """Mutate an existing yaml section with additional column information.""" - existing_cols = [c["name"] for c in yaml_section.get("columns", [])] - db_cols = get_columns(context, _get_catalog_key_for_node(node)) - new_cols = [ - c for n, c in db_cols.items() if n.lower() not in (e.lower() for e in existing_cols) - ] - for column in new_cols: - yaml_section.setdefault("columns", []).append( - {"name": column.name, "description": column.comment or ""} - ) - logger.info(f"Injecting column {column.name} into {node.unique_id}") - return yaml_section +def _read_yaml(context: YamlRefactorContext, path: Path) -> dict[str, t.Any]: + """Read a yaml file from disk.""" + if not path.is_file(): + return {} + with context.yaml_handler_lock: + return t.cast(dict[str, t.Any], context.yaml_handler.load(path)) -def _draft_structure_for_node( - context: YamlRefactorContext, - yaml_loc: SchemaFileLocation, - uid: str, - blueprint: dict[Path, SchemaFileMigration], - bp_mutex: threading.Lock, -) -> None: - """Draft a structure update plan for a dbt model or source node.""" - with bp_mutex: - if yaml_loc.target not in blueprint: - blueprint[yaml_loc.target] = SchemaFileMigration() - - node = ( - context.project.manifest.nodes[uid] - if yaml_loc.node_type == NodeType.Model - else context.project.manifest.sources[uid] - ) - - if yaml_loc.current is None: - if yaml_loc.node_type == NodeType.Model: - with bp_mutex: - blueprint[yaml_loc.target].output["models"].append( - generate_minimal_yaml_data(context, node) - ) - return +def _write_yaml(context: YamlRefactorContext, path: Path, data: dict[str, t.Any]) -> None: + """Write a yaml file to disk and register a mutation with the context.""" with context.yaml_handler_lock: - existing_doc = context.yaml_handler.load(yaml_loc.current) - - if yaml_loc.node_type == NodeType.Model: - assert isinstance(node, ModelNode) - for yaml_data in existing_doc.get("models", []): - if yaml_data["name"] == node.name: - _ = augment_existing_yaml_data(context, t.cast(dict[str, t.Any], yaml_data), node) - with bp_mutex: - blueprint[yaml_loc.target].output["models"].append(yaml_data) - blueprint[yaml_loc.target].supersede.setdefault( - yaml_loc.current, - [], - ).append(node) - break - else: - assert isinstance(node, SourceDefinition) - for source in existing_doc.get("sources", []): - if source["name"] == node.source_name: - for yaml_data in source["tables"]: - if yaml_data["name"] == node.name: - _ = augment_existing_yaml_data( - context, t.cast(dict[str, t.Any], yaml_data), node - ) - with bp_mutex: - if not any( - s["name"] == node.source_name - for s in blueprint[yaml_loc.target].output["sources"] - ): - blueprint[yaml_loc.target].output["sources"].append(source) - for existing_sources in blueprint[yaml_loc.target].output["sources"]: - if existing_sources["name"] == node.source_name: - for existing_tables in existing_sources["tables"]: - if existing_tables["name"] == node.name: - existing_tables.update(yaml_data) - break - blueprint[yaml_loc.target].supersede.setdefault( - yaml_loc.current, [] - ).append(node) - break - - -def draft_project_structure_update_plan( - context: YamlRefactorContext, -) -> dict[Path, SchemaFileMigration]: - """Draft a structure update plan for the dbt project.""" - blueprint: dict[Path, SchemaFileMigration] = {} - bp_mutex = threading.Lock() - logger.info("Building structure update plan.") - folder_map = build_schema_folder_mapping(context) - futs: list[Future[None]] = [] - for uid, schema_loc in folder_map.items(): - if not schema_loc.is_valid: - futs.append( - context.pool.submit( - _draft_structure_for_node, context, schema_loc, uid, blueprint, bp_mutex + path.parent.mkdir(parents=True, exist_ok=True) + context.yaml_handler.dump(data, path) + context.register_mutations(1) + + +def _generate_minimal_model_yaml(node: ModelNode) -> dict[str, t.Any]: + """Generate a minimal model yaml for a dbt model node.""" + return {"name": node.name, "columns": []} + + +def _generate_minimal_source_yaml(node: SourceDefinition) -> dict[str, t.Any]: + """Generate a minimal source yaml for a dbt source node.""" + return {"name": node.source_name, "tables": [{"name": node.name, "columns": []}]} + + +def _create_operations_for_node( + context: YamlRefactorContext, uid: str, loc: SchemaFileLocation +) -> list[RestructureOperation]: + """Create restructure operations for a dbt model or source node.""" + node = context.project.manifest.nodes.get(uid) or context.project.manifest.sources.get(uid) + if not node: + logger.warning(f"Node {uid} not found in manifest.") + return [] + + # If loc.current is None => we are generating a brand new file + # If loc.current => we unify it with the new location + ops: list[RestructureOperation] = [] + + if loc.current is None: + if loc.node_type == NodeType.Model: + assert isinstance(node, ModelNode) + minimal = _generate_minimal_model_yaml(node) + ops.append( + RestructureOperation( + file_path=loc.target, + content={"version": 2, "models": [minimal]}, ) ) - _ = wait(futs) - return blueprint - + else: + assert isinstance(node, SourceDefinition) + minimal_source = _generate_minimal_source_yaml(node) + ops.append( + RestructureOperation( + file_path=loc.target, + content={"version": 2, "sources": [minimal_source]}, + ) + ) + else: + existing = _read_yaml(context, loc.current) + injectable: dict[str, t.Any] = {"version": 2} + injectable.setdefault("models", []) + injectable.setdefault("sources", []) + if loc.node_type == NodeType.Model: + assert isinstance(node, ModelNode) + for obj in existing.get("models", []): + if obj["name"] == node.name: + injectable["models"].append(obj) + break + else: + assert isinstance(node, SourceDefinition) + for src in existing.get("sources", []): + if src["name"] == node.source_name: + injectable["sources"].append(src) + break + ops.append( + RestructureOperation( + file_path=loc.target, + content=injectable, + superseded_paths={loc.current: [node]}, + ) + ) + return ops -def pretty_print_restructure_plan(blueprint: dict[Path, SchemaFileMigration]) -> None: - """Pretty print the restructure plan for the dbt project. (intended for rich.console)""" - import pprint - summary = [] - for plan_path, migration_obj in blueprint.items(): - if not migration_obj.supersede: - summary.append((["CREATE"], "->", plan_path.name)) - else: - files_superseded = [p.name for p in migration_obj.supersede.keys()] or ["CREATE"] - summary.append((files_superseded, "->", plan_path.name)) +def draft_restructure_delta_plan(context: YamlRefactorContext) -> RestructureDeltaPlan: + """Draft a restructure plan for the dbt project.""" + plan = RestructureDeltaPlan() + lock = threading.Lock() - # logger.info(summary) - pprint.pprint(t.cast(list[t.Any], summary)) + def _job(uid: str, loc: SchemaFileLocation) -> None: + ops = _create_operations_for_node(context, uid, loc) + with lock: + plan.operations.extend(ops) + futs: list[Future[None]] = [] + for uid, loc in build_yaml_file_mapping(context).items(): + if not loc.is_valid: + futs.append(context.pool.submit(_job, uid, loc)) + done, _ = wait(futs, return_when=FIRST_EXCEPTION) + for fut in done: + exc = fut.exception() + if exc: + raise exc + return plan + + +def pretty_print_plan(plan: RestructureDeltaPlan) -> None: + """Pretty print the restructure plan for the dbt project.""" + for op in plan.operations: + logger.info(f"Processing {op.content}") + if not op.superseded_paths: + logger.info(f"CREATE or MERGE => {op.file_path}") + else: + old_paths = [p.name for p in op.superseded_paths.keys()] or ["UNKNOWN"] + logger.info(f"{old_paths} -> {op.file_path}") -def cleanup_blueprint( - blueprint: dict[Path, SchemaFileMigration], -) -> dict[Path, SchemaFileMigration]: - """Cleanup the blueprint by removing empty models and sources, mutating it in place.""" - for path_key in list(blueprint.keys()): - out_dict = blueprint[path_key].output - if "models" in out_dict and not out_dict["models"]: - del out_dict["models"] - if "sources" in out_dict and not out_dict["sources"]: - del out_dict["sources"] - if not out_dict.get("models") and not out_dict.get("sources"): - del blueprint[path_key] - return blueprint +def _remove_models(existing_doc: dict[str, t.Any], nodes: list[ResultNode]) -> None: + """Clean up the existing yaml doc by removing models superseded by the restructure plan.""" + to_remove = {n.name for n in nodes if n.resource_type == NodeType.Model} + keep_models = [] + for model_block in existing_doc.get("models", []): + if model_block.get("name") not in to_remove: + keep_models.append(model_block) + existing_doc["models"] = keep_models -def commit_project_restructure_to_disk( - context: YamlRefactorContext, - blueprint: dict[Path, SchemaFileMigration] | None = None, -) -> int: - if not blueprint: - blueprint = draft_project_structure_update_plan(context) - blueprint = cleanup_blueprint(blueprint) - if not blueprint: - logger.info("Project structure is already conformed.") - return 0 +def _remove_sources(existing_doc: dict[str, t.Any], nodes: list[ResultNode]) -> None: + """Clean up the existing yaml doc by removing sources superseded by the restructure plan.""" + to_remove_sources = { + (n.source_name, n.name) for n in nodes if n.resource_type == NodeType.Source + } + keep_sources = [] + for src_block in existing_doc.get("sources", []): + keep_tables = [] + for tbl in src_block.get("tables", []): + if (src_block["name"], tbl["name"]) not in to_remove_sources: + keep_tables.append(tbl) + if keep_tables: + src_block["tables"] = keep_tables + keep_sources.append(src_block) + existing_doc["sources"] = keep_sources + + +def apply_restructure_plan(context: YamlRefactorContext, plan: RestructureDeltaPlan) -> None: + """Apply the restructure plan for the dbt project.""" + if not plan.operations: + logger.info("No changes needed.") + return - pretty_print_restructure_plan(blueprint) - change_offset = context.mutation_count + for op in plan.operations: + output_doc: dict[str, t.Any] = {"version": 2} + if op.file_path.exists(): + existing_data = _read_yaml(context, op.file_path) + output_doc.update(existing_data) + + for key, val in op.content.items(): + if isinstance(val, list): + output_doc.setdefault(key, []).extend(val) + elif isinstance(val, dict): + output_doc.setdefault(key, {}).update(val) + else: + output_doc[key] = val - for target, struct in blueprint.items(): - if not target.exists(): - logger.info(f"Creating schema file {target}") + if not context.settings.dry_run: + _write_yaml(context, op.file_path, output_doc) - if not context.settings.dry_run: - target.parent.mkdir(parents=True, exist_ok=True) - target.touch() + for path, nodes in op.superseded_paths.items(): + if path.is_file(): + existing_data = _read_yaml(context, path) - context.yaml_handler.dump(struct.output, target) - context.register_mutations(1) - else: - logger.info(f"Updating schema file {target}") - - existing: dict[str, t.Any] = context.yaml_handler.load(target) - if not existing: - existing = {"version": 2} - - if "version" not in existing: - existing["version"] = 2 - if "models" in struct.output: - existing.setdefault("models", []).extend(struct.output["models"]) - if "sources" in struct.output: - existing.setdefault("sources", []).extend(struct.output["sources"]) - - if not context.settings.dry_run: - context.yaml_handler.dump(existing, target) - context.register_mutations(1) - - for mut_path, nodes in struct.supersede.items(): - mut_schema = context.yaml_handler.load(mut_path) - - to_remove_models = {n.name for n in nodes if n.resource_type == NodeType.Model} - to_remove_sources = { - (n.source_name, n.name) for n in nodes if n.resource_type == NodeType.Source - } - - keep: list[t.Any] = [] - for model in mut_schema.get("models", []): - if model["name"] not in to_remove_models: - keep.append(model) - mut_schema["models"] = keep - - keep_sources: list[t.Any] = [] - for source in mut_schema.get("sources", []): - keep = [] - for table in source.get("tables", []): - if (source["name"], table["name"]) not in to_remove_sources: - keep.append(table) - if keep: # At least one table remains - source["tables"] = keep - keep_sources.append(source) - mut_schema["sources"] = keep_sources - - if not mut_schema.get("models") and not mut_schema.get("sources"): - logger.info(f"Superseding entire file {mut_path}") - if not context.settings.dry_run: - mut_path.unlink(missing_ok=True) - if mut_path.parent.exists() and not any(mut_path.parent.iterdir()): - mut_path.parent.rmdir() - else: - if not context.settings.dry_run: - context.yaml_handler.dump(t.cast(dict[str, t.Any], mut_schema), mut_path) - context.register_mutations(1) - logger.info(f"Migrated doc from {mut_path} -> {target}") + if "models" in existing_data: + _remove_models(existing_data, nodes) + if "sources" in existing_data: + _remove_sources(existing_data, nodes) - return context.mutation_count - change_offset + if (not existing_data.get("models")) and (not existing_data.get("sources")): + if not context.settings.dry_run: + path.unlink(missing_ok=True) + if path.parent.exists() and not any(path.parent.iterdir()): + path.parent.rmdir() + context.register_mutations(1) + logger.info(f"Superseded entire file {path}") + else: + if not context.settings.dry_run: + _write_yaml(context, path, existing_data) + logger.info(f"Migrated doc from {path} -> {op.file_path}") def propagate_documentation_downstream( context: YamlRefactorContext, force_inheritance: bool = False ) -> None: - folder_map = build_schema_folder_mapping(context) + folder_map = build_yaml_file_mapping(context) futures = [] with context.project.adapter.connection_named("dbt-osmosis"): for unique_id, node in filter_models(context): @@ -1246,5 +1219,10 @@ def run_example_compilation_flow() -> None: c.vars = {"dbt-osmosis": {}} project = create_dbt_project_context(c) yaml_context = YamlRefactorContext(project) + plan = draft_restructure_delta_plan(yaml_context) + # print("Plan =>", plan) + pretty_print_plan(plan) + apply_restructure_plan(yaml_context, plan) + exit(0) plan = draft_project_structure_update_plan(yaml_context) _ = commit_project_restructure_to_disk(yaml_context, plan) From fe91793245aa38d121e0c89ebe554360d259e3df Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Mon, 30 Dec 2024 00:31:01 -0700 Subject: [PATCH 10/46] chore: remove vendored mod --- src/dbt_osmosis/core/osmosis.py | 6 +- src/dbt_osmosis/vendored/__init__.py | 0 .../vendored/dbt_core_interface/VENDORED.md | 3 - .../vendored/dbt_core_interface/__init__.py | 3 - .../vendored/dbt_core_interface/project.py | 6510 ----------------- 5 files changed, 2 insertions(+), 6520 deletions(-) delete mode 100644 src/dbt_osmosis/vendored/__init__.py delete mode 100644 src/dbt_osmosis/vendored/dbt_core_interface/VENDORED.md delete mode 100644 src/dbt_osmosis/vendored/dbt_core_interface/__init__.py delete mode 100644 src/dbt_osmosis/vendored/dbt_core_interface/project.py diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index 9e4cc4e2..66af45de 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -483,12 +483,10 @@ def f(node: ResultNode) -> bool: yield uid, dbt_node -def normalize_column_name(column: str, credentials_type: str, to_lower: bool = False) -> str: +def normalize_column_name(column: str, credentials_type: str) -> str: """Apply case normalization to a column name based on the credentials type.""" if credentials_type == "snowflake" and column.startswith('"') and column.endswith('"'): return column - if to_lower: - return column.lower() if credentials_type == "snowflake": return column.upper() return column @@ -503,7 +501,7 @@ def _maybe_use_precise_dtype(col: t.Any, settings: YamlRefactorSettings) -> str: return col.dtype -def _get_catalog_key_for_node(node: ResultNode) -> CatalogKey: +def get_catalog_key_for_node(node: ResultNode) -> CatalogKey: """Make an appropriate catalog key for a dbt node.""" if node.resource_type == NodeType.Source: return CatalogKey(node.database, node.schema, node.identifier or node.name) diff --git a/src/dbt_osmosis/vendored/__init__.py b/src/dbt_osmosis/vendored/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/src/dbt_osmosis/vendored/dbt_core_interface/VENDORED.md b/src/dbt_osmosis/vendored/dbt_core_interface/VENDORED.md deleted file mode 100644 index b8d799d6..00000000 --- a/src/dbt_osmosis/vendored/dbt_core_interface/VENDORED.md +++ /dev/null @@ -1,3 +0,0 @@ -# Source - -`dbt-core-interface` is vendored from https://github.com/z3z1ma/dbt-core-interface diff --git a/src/dbt_osmosis/vendored/dbt_core_interface/__init__.py b/src/dbt_osmosis/vendored/dbt_core_interface/__init__.py deleted file mode 100644 index 7986e7ce..00000000 --- a/src/dbt_osmosis/vendored/dbt_core_interface/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -"""Dbt Core Interface.""" - -from .project import * # noqa: F401, F403 diff --git a/src/dbt_osmosis/vendored/dbt_core_interface/project.py b/src/dbt_osmosis/vendored/dbt_core_interface/project.py deleted file mode 100644 index 9e8e674b..00000000 --- a/src/dbt_osmosis/vendored/dbt_core_interface/project.py +++ /dev/null @@ -1,6510 +0,0 @@ -#!/usr/bin/env python -"""The interface for interacting with dbt-core. - -We package the interface as a single python module that can be imported -and used in other python projects. You do not need to include dbt-core-interface -as a dependency in your project if you do not want to. You can simply copy -the dbt_core_interface folder into your project and import it from there. -""" - -# region dbt-core-interface imports & monkey patches - -if 1: # this stops ruff from complaining about the import order - import dbt.adapters.factory - - # See ... for more info on this monkey patch - dbt.adapters.factory.get_adapter = lambda config: config.adapter # type: ignore - -import _thread as thread -import base64 -import calendar -import cgi -import configparser -import decimal -import email.utils -import functools -import hashlib -import hmac -import http.client as httplib -import itertools -import json -import logging -import mimetypes -import os -import pickle -import re -import sys -import tempfile -import threading -import time -import uuid -import warnings -import weakref -from collections import OrderedDict, UserDict -from collections.abc import MutableMapping as DictMixin -from contextlib import contextmanager, redirect_stdout -from copy import copy -from dataclasses import asdict, dataclass, field -from datetime import date as datedate -from datetime import datetime, timedelta -from enum import Enum -from functools import lru_cache, wraps -from http.cookies import CookieError, Morsel, SimpleCookie -from inspect import getfullargspec -from io import BytesIO -from json import dumps as json_dumps -from json import loads as json_lds -from pathlib import Path -from tempfile import NamedTemporaryFile -from traceback import format_exc, print_exc -from types import FunctionType -from types import ModuleType as new_module # noqa -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Generator, - List, - Optional, - Tuple, - Type, - TypeVar, - Union, - cast, -) -from unicodedata import normalize -from urllib.parse import SplitResult as UrlSplitResult -from urllib.parse import quote as urlquote -from urllib.parse import unquote as urlunquote -from urllib.parse import urlencode, urljoin - -import dbt.version -import yaml - -# We maintain the smallest possible surface area of dbt imports -from dbt.adapters.factory import get_adapter_class_by_name - -try: - # dbt >= 1.8 - from dbt_common.clients.system import get_env, make_directory - from dbt_common.context import set_invocation_context -except ImportError: - # dbt < 1.8 - from dbt.clients.system import make_directory - -from dbt.config.runtime import RuntimeConfig -from dbt.contracts.graph.nodes import ColumnInfo, ManifestNode -from dbt.flags import set_from_args -from dbt.node_types import NodeType -from dbt.parser.manifest import PARTIAL_PARSE_FILE_NAME, ManifestLoader, process_node -from dbt.parser.sql import SqlBlockParser, SqlMacroParser -from dbt.task.sql import SqlCompileRunner -from dbt.tracking import disable_tracking - -if TYPE_CHECKING: - # These imports are only used for type checking - from agate import Table # type: ignore # No stubs for agate - from dbt.adapters.base import BaseAdapter, BaseRelation # type: ignore - - try: - # dbt >= 1.8 - from dbt.adapters.contracts.connection import AdapterResponse - from dbt_common.semver import VersionSpecifier - - except ImportError: - # dbt < 1.8 - from dbt.contracts.connection import AdapterResponse - from dbt.semver import VersionSpecifier - from dbt.contracts.results import ExecutionResult, RunExecutionResult - from dbt.task.runnable import ManifestTask - -# dbt-core-interface is designed for non-standard use. There is no -# reason to track usage of this package. -disable_tracking() - -urlunquote = functools.partial(urlunquote, encoding="latin1") - -RAW_CODE = "raw_code" -COMPILED_CODE = "compiled_code" - - -def default_project_dir() -> Path: - if "DBT_PROJECT_DIR" in os.environ: - return Path(os.environ["DBT_PROJECT_DIR"]).resolve() - paths = list(Path.cwd().parents) - paths.insert(0, Path.cwd()) - return next((x for x in paths if (x / "dbt_project.yml").exists()), Path.cwd()) - - -def default_profiles_dir() -> Path: - if "DBT_PROFILES_DIR" in os.environ: - return Path(os.environ["DBT_PROFILES_DIR"]).resolve() - return Path.cwd() if (Path.cwd() / "profiles.yml").exists() else Path.home() / ".dbt" - - -DEFAULT_PROFILES_DIR = str(default_profiles_dir()) -DEFAULT_PROJECT_DIR = str(default_project_dir()) - - -def write_manifest_for_partial_parse(self: ManifestLoader): - """Monkey patch for dbt manifest loader.""" - path = os.path.join( - self.root_project.project_root, - self.root_project.target_path, - PARTIAL_PARSE_FILE_NAME, - ) - try: - if self.manifest.metadata.dbt_version != dbt.version.__version__: - self.manifest.metadata.dbt_version = dbt.version.__version__ - manifest_msgpack = self.manifest.to_msgpack() - make_directory(os.path.dirname(path)) - with open(path, "wb") as fp: - fp.write(manifest_msgpack) - except Exception: - raise - - -__all__ = [ - "DbtProject", - "DbtProjectContainer", - "DbtAdapterExecutionResult", - "DbtAdapterCompilationResult", - "DbtManifestProxy", - "DbtConfiguration", - "DEFAULT_PROFILES_DIR", - "DEFAULT_PROJECT_DIR", - "ServerRunResult", - "ServerCompileResult", - "ServerResetResult", - "ServerRegisterResult", - "ServerUnregisterResult", - "ServerErrorCode", - "ServerError", - "ServerErrorContainer", - "ServerPlugin", - "run_server", - "default_project_dir", - "default_profiles_dir", - "ColumnInfo", - "ManifestNode", -] - -T = TypeVar("T") -JINJA_CONTROL_SEQUENCES = ["{{", "}}", "{%", "%}", "{#", "#}"] - -LOGGER = logging.getLogger(__name__) - -__version__ = dbt.version.__version__ - -# endregion - -# region dbt-core-interface core - - -class DbtCommand(str, Enum): - """The dbt commands we support.""" - - RUN = "run" - BUILD = "build" - TEST = "test" - SEED = "seed" - RUN_OPERATION = "run-operation" - LIST = "list" - SNAPSHOT = "snapshot" - - -@dataclass -class DbtConfiguration: - """The configuration for dbt-core.""" - - project_dir: str = DEFAULT_PROJECT_DIR - profiles_dir: str = DEFAULT_PROFILES_DIR - profile: Optional[str] = None - target: Optional[str] = None - threads: int = 1 - single_threaded: bool = True - _vars: str = "{}" - # Mutes unwanted dbt output - quiet: bool = True - # We need single threaded, simple, jinja parsing -- no rust/pickling - use_experimental_parser: bool = False - static_parser: bool = False - partial_parse: bool = False - # A required attribute for dbt, not used by our interface - dependencies: List[str] = field(default_factory=list) - which: str = None - DEBUG: bool = False - REQUIRE_RESOURCE_NAMES_WITHOUT_SPACES: bool = False - - def __post_init__(self) -> None: - """Post init hook to set single_threaded and remove target if not provided.""" - if self.target is None: - del self.target - self.single_threaded = self.threads == 1 - - @property - def vars(self) -> str: - """Access the vars attribute as a string.""" - return self._vars - - @vars.setter - def vars(self, v: Union[str, Dict[str, Any]]) -> None: - """Set the vars attribute as a string or dict. - - If dict then it will be converted to a string which is what dbt expects. - """ - if isinstance(v, str): - v = yaml.safe_load(v) - self._vars = v - - -class DbtManifestProxy(UserDict): # type: ignore - """Proxy for manifest dictionary object. - - If we need mutation then we should create a copy of the dict or interface with the dbt-core manifest object instead. - """ - - def _readonly(self, *args: Any, **kwargs: Any) -> None: - raise RuntimeError("Cannot modify DbtManifestProxy") - - __setitem__ = _readonly - __delitem__ = _readonly - pop = _readonly # type: ignore - popitem = _readonly # type: ignore - clear = _readonly - update = _readonly # type: ignore - setdefault = _readonly # type: ignore - - -@dataclass -class DbtAdapterExecutionResult: - """Interface for execution results. - - This keeps us 1 layer removed from dbt interfaces which may change. - """ - - adapter_response: "AdapterResponse" - table: "Table" - raw_code: str - compiled_code: str - - -@dataclass -class DbtAdapterCompilationResult: - """Interface for compilation results. - - This keeps us 1 layer removed from dbt interfaces which may change. - """ - - raw_code: str - compiled_code: str - node: "ManifestNode" - injected_code: Optional[str] = None - - -class DbtTaskConfiguration: - """A container for task configuration with sane defaults. - - Users should enforce an interface for their tasks via a factory method that returns an instance of this class. - """ - - def __init__(self, profile: str, target: str, **kwargs: Any) -> None: - """Initialize the task configuration.""" - self.profile: str = profile - self.target: str = target - self.kwargs: Dict[str, Any] = kwargs or {} - self.threads: int = kwargs.get("threads", 1) - self.single_threaded: bool = kwargs.get("single_threaded", self.threads == 1) - self.state_id: Optional[str] = kwargs.get("state_id") - self.version_check: bool = kwargs.get("version_check", False) - self.resource_types: Optional[List[str]] = kwargs.get("resource_types") - self.models: Union[None, str, List[str]] = kwargs.get("models") - self.select: Union[None, str, List[str]] = kwargs.get("select") - self.exclude: Union[None, str, List[str]] = kwargs.get("exclude") - self.selector_name: Optional[str] = kwargs.get("selector_name") - self.state: Optional[str] = kwargs.get("state") - self.defer: bool = kwargs.get("defer", False) - self.fail_fast: bool = kwargs.get("fail_fast", False) - self.full_refresh: bool = kwargs.get("full_refresh", False) - self.store_failures: bool = kwargs.get("store_failures", False) - self.indirect_selection: bool = kwargs.get("indirect_selection", False) - self.data: bool = kwargs.get("data", False) - self.schema: bool = kwargs.get("schema", False) - self.show: bool = kwargs.get("show", False) - self.output: str = kwargs.get("output", "name") - self.output_keys: Union[None, str, List[str]] = kwargs.get("output_keys") - self.macro: Optional[str] = kwargs.get("macro") - self.args: str = kwargs.get("args", "{}") - self.quiet: bool = kwargs.get("quiet", True) - self.defer_state: Path = kwargs.get("defer_state", None) - self.exclude_resource_types: List[str] = kwargs.get("exclude_resource_types", None) - self.selector: str = kwargs.get("selector", None) - self.write_json: bool = kwargs.get("write_json", False) - self.include_saved_query: bool = kwargs.get("include_saved_query", False) - - @classmethod - def from_runtime_config(cls, config: RuntimeConfig, **kwargs: Any) -> "DbtTaskConfiguration": - """Create a task configuration container from a DbtProject's runtime config. - - This is a good example of where static typing is not necessary. Developers can just - pass in whatever they want and it will be passed through to the task configuration container. - Users of the library are free to pass in any mapping derived from their own implementation for - their own custom task. - """ - threads = kwargs.pop("threads", config.threads) - kwargs.pop("single_threaded", None) # This is a derived property - return cls( - config.profile_name, - config.target_name, - threads=threads, - single_threaded=threads == 1, - **kwargs, - ) - - -class DbtProject: - """Container for a dbt project. - - The dbt attribute is the primary interface for dbt-core. The adapter attribute is the primary interface for the dbt adapter. - """ - - ADAPTER_TTL = 3600 - - def __init__( - self, - target: Optional[str] = None, - profiles_dir: str = DEFAULT_PROFILES_DIR, - project_dir: str = DEFAULT_PROJECT_DIR, - threads: int = 1, - vars: Optional[str] = None, - profile: Optional[str] = None, - ) -> None: - """Initialize the DbtProject.""" - self.base_config = DbtConfiguration( - threads=threads, - target=target, - profiles_dir=profiles_dir or DEFAULT_PROFILES_DIR, - project_dir=project_dir or DEFAULT_PROJECT_DIR, - profile=profile, - ) - if hasattr(sys.modules[__name__], "set_invocation_context"): - set_invocation_context(get_env()) - if vars is None: - vars = "{}" - self.base_config.vars = vars - - # Mutexes - self.adapter_mutex = threading.Lock() - self.parsing_mutex = threading.Lock() - self.manifest_mutation_mutex = threading.Lock() - - # First time initialization - self.parse_project(init=True) - - # Utilities - self._sql_parser: Optional[SqlBlockParser] = None - self._macro_parser: Optional[SqlMacroParser] = None - - @classmethod - def from_config(cls, config: DbtConfiguration) -> "DbtProject": - """Instatiate the DbtProject directly from a DbtConfiguration instance.""" - return cls( - target=config.target, - profiles_dir=config.profiles_dir, - project_dir=config.project_dir, - threads=config.threads, - ) - - def get_adapter_cls(self) -> Type["BaseAdapter"]: - """Get the adapter class associated with the dbt profile.""" - return get_adapter_class_by_name(self.config.credentials.type) - - def initialize_adapter(self) -> None: - """Initialize a dbt adapter.""" - if hasattr(self, "_adapter"): - # Clean up any existing connections, err on the side of runtime - # resiliency, don't let this fail. Maybe there is a world where - # it really matters, but I don't think so. Someone can make the case. - try: - self._adapter.connections.cleanup_all() - except Exception as e: - LOGGER.debug(f"Failed to cleanup adapter connections: {e}") - # The adapter.setter verifies connection, resets TTL, and updates adapter ref on config - # this is thread safe by virtue of the adapter_mutex on the adapter.setter - try: - self.adapter = self.get_adapter_cls()(self.config) - except TypeError: - from dbt.mp_context import get_mp_context - - self.adapter = self.get_adapter_cls()(self.config, get_mp_context()) - - try: - from dbt.context.providers import generate_runtime_macro_context - - self.adapter.set_macro_context_generator(generate_runtime_macro_context) - except Exception: - pass - - @property - def adapter(self) -> "BaseAdapter": - """dbt-core adapter with TTL and automatic reinstantiation. - - This supports long running processes that may have their connection to the database terminated by - the database server. It is transparent to the user. - """ - if time.time() - self._adapter_created_at > self.ADAPTER_TTL: - self.initialize_adapter() - return self._adapter - - @adapter.setter - def adapter(self, adapter: "BaseAdapter") -> None: - """Verify connection and reset TTL on adapter set, update adapter prop ref on config.""" - # Ensure safe concurrent access to the adapter - # Currently we choose to drop attempted mutations while an existing mutation is in progress - # This is a tradeoff between safety and performance, we could also choose to block - if self.adapter_mutex.acquire(blocking=False): - try: - self._adapter = adapter - self._adapter.connections.set_connection_name() - self._adapter_created_at = time.time() - self.config.adapter = self.adapter # type: ignore - finally: - self.adapter_mutex.release() - - def parse_project(self, init: bool = False) -> None: - """Parse project on disk. - - Uses the config from `DbtConfiguration` in args attribute, verifies connection to adapters database, - mutates config, adapter, and dbt attributes. Thread-safe. From an efficiency perspective, this is a - relatively expensive operation, so we want to avoid doing it more than necessary. - """ - # Threads will wait here if another thread is parsing the project - # however, it probably makes sense to not parse the project once the waiter - # has acquired the lock, TODO: Lets implement a debounce-like buffer here - with self.parsing_mutex: - if init: - set_from_args(self.base_config, self.base_config) - # We can think of `RuntimeConfig` as a dbt-core "context" object - # where a `Project` meets a `Profile` and is a superset of them both - self.config = RuntimeConfig.from_args(self.base_config) - self.initialize_adapter() - - _project_parser = ManifestLoader( - self.config, - self.config.load_dependencies(), - self.adapter.connections.set_query_header, - ) - - self.manifest = _project_parser.load() - self.manifest.build_flat_graph() - _project_parser.save_macros_to_adapter(self.adapter) - - self._sql_parser = None - self._macro_parser = None - - def safe_parse_project(self, reinit: bool = False) -> None: - """Safe version of parse_project that will not mutate the config if parsing fails.""" - if reinit: - self.clear_internal_caches() - _config_pointer = copy(self.config) - try: - self.parse_project(init=reinit) - except Exception as parse_error: - self.config = _config_pointer - raise parse_error - self.write_manifest_artifact() - - def _verify_connection(self, adapter: "BaseAdapter") -> "BaseAdapter": - """Verification for adapter + profile. - - Used as a passthrough, this also seeds the master connection. - """ - try: - adapter.connections.set_connection_name() - adapter.debug_query() - except Exception as query_exc: - raise RuntimeError("Could not connect to Database") from query_exc - else: - return adapter - - def adapter_probe(self) -> bool: - """Check adapter connection, useful for long running procsesses.""" - if not hasattr(self, "adapter") or self.adapter is None: - return False - try: - with self.adapter.connection_named("osmosis-heartbeat"): - self.adapter.debug_query() - except Exception: - # TODO: Should we preemptively reinitialize the adapter here? or leave it to userland to handle? - return False - return True - - def fn_threaded_conn(self, fn: Callable[..., T], *args: Any, **kwargs: Any) -> Callable[..., T]: - """For jobs which are intended to be submitted to a thread pool.""" - - @wraps(fn) - def _with_conn() -> T: - self.adapter.connections.set_connection_name() - return fn(*args, **kwargs) - - return _with_conn - - def generate_runtime_model_context(self, node: "ManifestNode") -> Dict[str, Any]: - """Wrap dbt context provider.""" - # Purposefully deferred due to its many dependencies - from dbt.context.providers import generate_runtime_model_context - - return generate_runtime_model_context(node, self.config, self.manifest) - - @property - def project_name(self) -> str: - """dbt project name.""" - return self.config.project_name - - @property - def project_root(self) -> str: - """dbt project root.""" - return self.config.project_root - - @property - def manifest_dict(self) -> DbtManifestProxy: - """dbt manifest dict.""" - return DbtManifestProxy(self.manifest.flat_graph) - - def write_manifest_artifact(self) -> None: - """Write a manifest.json to disk. - - Because our project is in memory, this is useful for integrating with other tools that - expect a manifest.json to be present in the target directory. - """ - artifact_path = os.path.join( - self.config.project_root, self.config.target_path, "manifest.json" - ) - self.manifest.write(artifact_path) - - def clear_internal_caches(self) -> None: - """Clear least recently used caches and reinstantiable container objects.""" - self.compile_code.cache_clear() - self.unsafe_compile_code.cache_clear() - - def get_ref_node(self, target_model_name: str) -> "ManifestNode": - """Get a `ManifestNode` from a dbt project model name. - - This is the same as one would in a {{ ref(...) }} macro call. - """ - return cast( - "ManifestNode", - self.manifest.resolve_ref( - target_model_name=target_model_name, - target_model_package=None, - current_project=self.config.project_name, - node_package=self.config.project_name, - ), - ) - - def get_source_node(self, target_source_name: str, target_table_name: str) -> "ManifestNode": - """Get a `ManifestNode` from a dbt project source name and table name. - - This is the same as one would in a {{ source(...) }} macro call. - """ - return cast( - "ManifestNode", - self.manifest.resolve_source( - target_source_name=target_source_name, - target_table_name=target_table_name, - current_project=self.config.project_name, - node_package=self.config.project_name, - ), - ) - - def get_node_by_path(self, path: str) -> Optional["ManifestNode"]: - """Find an existing node given relative file path. - - TODO: We can include Path obj support and make this more robust. - """ - for node in self.manifest.nodes.values(): - if node.original_file_path == path: - return node - return None - - @contextmanager - def generate_server_node( - self, sql: str, node_name: str = "anonymous_node" - ) -> Generator["ManifestNode", None, None]: - """Get a transient node for SQL execution against adapter. - - This is a context manager that will clear the node after execution and leverages a mutex during manifest mutation. - """ - with self.manifest_mutation_mutex: - self._clear_node(node_name) - sql_node = self.sql_parser.parse_remote(sql, node_name) - process_node(self.config, self.manifest, sql_node) - yield sql_node - self._clear_node(node_name) - - def unsafe_generate_server_node( - self, sql: str, node_name: str = "anonymous_node" - ) -> "ManifestNode": - """Get a transient node for SQL execution against adapter. - - This is faster than `generate_server_node` but does not clear the node after execution. - That is left to the caller. It is also not thread safe in and of itself and requires the caller to - manage jitter or mutexes. - """ - self._clear_node(node_name) - sql_node = self.sql_parser.parse_remote(sql, node_name) - process_node(self.config, self.manifest, sql_node) - return sql_node - - def inject_macro(self, macro_contents: str) -> None: - """Inject a macro into the project. - - This is useful for testing macros in isolation. It offers unique ways to integrate with dbt. - """ - macro_overrides = {} - for node in self.macro_parser.parse_remote(macro_contents): - macro_overrides[node.unique_id] = node - self.manifest.macros.update(macro_overrides) - - def get_macro_function(self, macro_name: str) -> Callable[..., Any]: - """Get macro as a function which behaves like a Python function.""" - - def _macro_fn(**kwargs: Any) -> Any: - return self.adapter.execute_macro(macro_name, self.manifest, kwargs=kwargs) - - return _macro_fn - - def execute_macro(self, macro: str, **kwargs: Any) -> Any: - """Wrap adapter execute_macro. Execute a macro like a python function.""" - return self.get_macro_function(macro)(**kwargs) - - def adapter_execute( - self, sql: str, auto_begin: bool = False, fetch: bool = False - ) -> Tuple["AdapterResponse", "Table"]: - """Wrap adapter.execute. Execute SQL against database. - - This is more on-the-rails than `execute_code` which intelligently handles jinja compilation provides a proxy result. - """ - return cast( - Tuple["AdapterResponse", "Table"], - self.adapter.execute(sql, auto_begin, fetch), - ) - - def execute_code(self, raw_code: str) -> DbtAdapterExecutionResult: - """Execute dbt SQL statement against database. - - This is a proxy for `adapter_execute` and the the recommended method for executing SQL against the database. - """ - # If no jinja chars then these are synonymous - compiled_code = str(raw_code) - if has_jinja(raw_code): - # Jinja found, compile it - compiled_code = self.compile_code(raw_code).compiled_code - return DbtAdapterExecutionResult( - *self.adapter_execute(compiled_code, fetch=True), - raw_code, - compiled_code, - ) - - def execute_from_node(self, node: "ManifestNode") -> DbtAdapterExecutionResult: - """Execute dbt SQL statement against database from a "ManifestNode".""" - raw_code: str = getattr(node, RAW_CODE) - compiled_code: Optional[str] = getattr(node, COMPILED_CODE, None) - if compiled_code: - # Node is compiled, execute the SQL - return self.execute_code(compiled_code) - # Node not compiled - if has_jinja(raw_code): - # Node has jinja in its SQL, compile it - compiled_code = self.compile_from_node(node).compiled_code - # Execute the SQL - return self.execute_code(compiled_code or raw_code) - - @lru_cache(maxsize=100) # noqa: B019 - def compile_code(self, raw_code: str) -> DbtAdapterCompilationResult: - """Create a node with `generate_server_node` method. Compile generated node. - - Has a retry built in because even uuidv4 cannot gaurantee uniqueness at the speed - in which we can call this function concurrently. A retry significantly increases the stability. - """ - temp_node_id = str(uuid.uuid4()) - with self.generate_server_node(raw_code, temp_node_id) as node: - return self.compile_from_node(node) - - @lru_cache(maxsize=100) # noqa: B019 - def unsafe_compile_code(self, raw_code: str, retry: int = 3) -> DbtAdapterCompilationResult: - """Create a node with `unsafe_generate_server_node` method. Compiles the generated node. - - Has a retry built in because even uuid4 cannot gaurantee uniqueness at the speed - in which we can call this function concurrently. A retry significantly increases the - stability. This is certainly the fastest way to compile SQL but it is yet to be benchmarked. - """ - temp_node_id = str(uuid.uuid4()) - try: - node = self.compile_from_node(self.unsafe_generate_server_node(raw_code, temp_node_id)) - except Exception as compilation_error: - if retry > 0: - return self.compile_code(raw_code, retry - 1) - raise compilation_error - else: - return node - finally: - self._clear_node(temp_node_id) - - def compile_from_node(self, node: "ManifestNode") -> DbtAdapterCompilationResult: - """Compiles existing node. ALL compilation passes through this code path. - - Raw SQL is marshalled by the caller into a mock node before being passed into this method. - Existing nodes can be passed in here directly. - """ - compiled_node = SqlCompileRunner( - self.config, self.adapter, node=node, node_index=1, num_nodes=1 - ).compile(self.manifest) - return DbtAdapterCompilationResult( - getattr(compiled_node, RAW_CODE), - getattr(compiled_node, COMPILED_CODE), - compiled_node, - ) - - def _clear_node(self, name: str = "anonymous_node") -> None: - """Clear remote node from dbt project.""" - self.manifest.nodes.pop(f"{NodeType.SqlOperation}.{self.project_name}.{name}", None) - - def get_relation(self, database: str, schema: str, name: str) -> Optional["BaseRelation"]: - """Wrap for `adapter.get_relation`.""" - return self.adapter.get_relation(database, schema, name) - - def relation_exists(self, database: str, schema: str, name: str) -> bool: - """Interface for checking if a relation exists in the database.""" - return self.adapter.get_relation(database, schema, name) is not None - - def node_exists(self, node: "ManifestNode") -> bool: - """Interface for checking if a node exists in the database.""" - return self.adapter.get_relation(self.create_relation_from_node(node)) is not None - - def create_relation(self, database: str, schema: str, name: str) -> "BaseRelation": - """Wrap `adapter.Relation.create`.""" - return self.adapter.Relation.create(database, schema, name) - - def create_relation_from_node(self, node: "ManifestNode") -> "BaseRelation": - """Wrap `adapter.Relation.create_from`.""" - return self.adapter.Relation.create_from(self.config, node) - - def get_or_create_relation( - self, database: str, schema: str, name: str - ) -> Tuple["BaseRelation", bool]: - """Get relation or create if not exists. - - Returns tuple of relation and boolean result of whether it existed ie: (relation, did_exist). - """ - ref = self.get_relation(database, schema, name) - return ( - (ref, True) - if ref is not None - else (self.create_relation(database, schema, name), False) - ) - - def create_schema(self, node: "ManifestNode") -> None: - """Create a schema in the database leveraging dbt-core's builtin macro.""" - self.execute_macro( - "create_schema", - kwargs={"relation": self.create_relation_from_node(node)}, - ) - - def get_columns_in_node(self, node: "ManifestNode") -> List[Any]: - """Wrap `adapter.get_columns_in_relation`.""" - return (self.adapter.get_columns_in_relation(self.create_relation_from_node(node)),) - - def get_columns(self, node: "ManifestNode") -> List[str]: - """Get a list of columns from a compiled node.""" - columns = [] - try: - columns.extend([c.name for c in self.get_columns_in_node(node)]) - except Exception: - # TODO: does this fallback make sense? - original_sql = str(getattr(node, RAW_CODE)) - setattr(node, RAW_CODE, f"SELECT * FROM ({original_sql}) WHERE 1=0") - result = self.execute_from_node(node) - setattr(node, RAW_CODE, original_sql), delattr(node, COMPILED_CODE) - node.compiled = False - columns.extend(result.table.column_names) - return columns - - def materialize( - self, node: "ManifestNode", temporary: bool = True - ) -> Tuple["AdapterResponse", None]: - """Materialize a table in the database. - - TODO: This is not fully baked. The API is stable but the implementation is not. - """ - return self.adapter_execute( - # Returns CTAS string so send to adapter.execute - self.execute_macro( - "create_table_as", - kwargs={ - "sql": getattr(node, COMPILED_CODE), - "relation": self.create_relation_from_node(node), - "temporary": temporary, - }, - ), - auto_begin=True, - ) - - @property - def sql_parser(self) -> SqlBlockParser: - """A dbt-core SQL parser capable of parsing and adding nodes to the manifest via `parse_remote` which will also return the added node to the caller. - - Note that post-parsing this still typically requires calls to `_process_nodes_for_ref` - and `_process_sources_for_ref` from the `dbt.parser.manifest` module in order to compile. - We have higher level methods that handle this for you. - """ - if self._sql_parser is None: - self._sql_parser = SqlBlockParser(self.config, self.manifest, self.config) - return self._sql_parser - - @property - def macro_parser(self) -> SqlMacroParser: - """A dbt-core macro parser. Parse macros with `parse_remote` and add them to the manifest. - - We have a higher level method `inject_macro` that handles this for you. - """ - if self._macro_parser is None: - self._macro_parser = SqlMacroParser(self.config, self.manifest) - return self._macro_parser - - def get_task_config(self, **kwargs) -> DbtTaskConfiguration: - """Get a dbt-core task configuration.""" - threads = kwargs.pop("threads", self.config.threads) - return DbtTaskConfiguration.from_runtime_config( - config=self.config, threads=threads, **kwargs - ) - - def get_task_cls(self, typ: DbtCommand) -> Type["ManifestTask"]: - """Get a dbt-core task class by type. - - This could be overridden to add custom tasks such as linting, etc. - so long as they are subclasses of `GraphRunnableTask`. - """ - # These are purposefully deferred imports - from dbt.task.build import BuildTask - from dbt.task.list import ListTask - from dbt.task.run import RunTask - from dbt.task.run_operation import RunOperationTask - from dbt.task.seed import SeedTask - from dbt.task.snapshot import SnapshotTask - from dbt.task.test import TestTask - - return { - DbtCommand.RUN: RunTask, - DbtCommand.BUILD: BuildTask, - DbtCommand.TEST: TestTask, - DbtCommand.SEED: SeedTask, - DbtCommand.LIST: ListTask, - DbtCommand.SNAPSHOT: SnapshotTask, - DbtCommand.RUN_OPERATION: RunOperationTask, - }[typ] - - def get_task(self, typ: DbtCommand, args: DbtTaskConfiguration) -> "ManifestTask": - """Get a dbt-core task by type.""" - try: - # DBT 1.8 requires manifest as 2-nd positional argument - task = self.get_task_cls(typ)(args, self.config, self.manifest) - except Exception as e: - task = self.get_task_cls(typ)(args, self.config) - # Render this a no-op on this class instance so that the tasks `run` - # method plumbing will defer to our existing in memory manifest. - task.load_manifest = lambda *args, **kwargs: None # type: ignore - task.manifest = self.manifest - return task - - def list( - self, - select: Optional[Union[str, List[str]]] = None, - exclude: Optional[Union[str, List[str]]] = None, - **kwargs: Dict[str, Any], - ) -> "ExecutionResult": - """List resources in the dbt project.""" - select, exclude = marshall_selection_args(select, exclude) - with redirect_stdout(None): - return self.get_task( # type: ignore - DbtCommand.LIST, - self.get_task_config(select=select, exclude=exclude, **kwargs), - ).run() - - def run( - self, - select: Optional[Union[str, List[str]]] = None, - exclude: Optional[Union[str, List[str]]] = None, - **kwargs: Dict[str, Any], - ) -> "RunExecutionResult": - """Run models in the dbt project.""" - select, exclude = marshall_selection_args(select, exclude) - with redirect_stdout(None): - return cast( - "RunExecutionResult", - self.get_task( - DbtCommand.RUN, - self.get_task_config(select=select, exclude=exclude, **kwargs), - ).run(), - ) - - def test( - self, - select: Optional[Union[str, List[str]]] = None, - exclude: Optional[Union[str, List[str]]] = None, - **kwargs: Dict[str, Any], - ) -> "ExecutionResult": - """Test models in the dbt project.""" - select, exclude = marshall_selection_args(select, exclude) - with redirect_stdout(None): - return self.get_task( # type: ignore - DbtCommand.TEST, - self.get_task_config(select=select, exclude=exclude, **kwargs), - ).run() - - def build( - self, - select: Optional[Union[str, List[str]]] = None, - exclude: Optional[Union[str, List[str]]] = None, - **kwargs: Dict[str, Any], - ) -> "ExecutionResult": - """Build resources in the dbt project.""" - select, exclude = marshall_selection_args(select, exclude) - with redirect_stdout(None): - return self.get_task( - DbtCommand.BUILD, - self.get_task_config(select=select, exclude=exclude, **kwargs), - ).run() - - -def marshall_selection_args( - select: Optional[Union[str, List[str]]] = None, - exclude: Optional[Union[str, List[str], None]] = None, -) -> Tuple[Union[str, List[str]], Union[str, List[str]]]: - """Marshall selection arguments to a list of strings.""" - if select is None: - select = [] - if exclude is None: - exclude = [] - if isinstance(select, (tuple, set, frozenset)): - select = list(select) - if isinstance(exclude, (tuple, set, frozenset)): - exclude = list(exclude) - # Permit standalone strings such as "my_model+ @some_other_model" - # as well as lists of strings such as ["my_model+", "@some_other_model"] - if not isinstance(select, list): - select = [select] - if not isinstance(exclude, list): - exclude = [exclude] - return select, exclude - - -class DbtProjectContainer: - """Manages multiple DbtProjects. - - A DbtProject corresponds to a single project. This interface is used - dbt projects in a single process. It enables basic multitenant servers. - """ - - def __init__(self) -> None: - """Initialize the container.""" - self._projects: Dict[str, DbtProject] = OrderedDict() - self._default_project: Optional[str] = None - - def get_project(self, project_name: str) -> Optional[DbtProject]: - """Primary interface to get a project and execute code.""" - return self._projects.get(project_name) - - def get_project_by_root_dir(self, root_dir: str) -> Optional[DbtProject]: - """Get a project by its root directory.""" - root_dir = os.path.abspath(os.path.normpath(root_dir)) - for project in self._projects.values(): - if os.path.abspath(project.project_root) == root_dir: - return project - return None - - def get_default_project(self) -> Optional[DbtProject]: - """Get the default project which is the earliest project inserted into the container.""" - default_project = self._default_project - if not default_project: - return None - return self._projects.get(default_project) - - def add_project( - self, - target: Optional[str] = None, - profiles_dir: str = DEFAULT_PROFILES_DIR, - project_dir: Optional[str] = None, - threads: int = 1, - vars: str = "{}", - name_override: str = "", - ) -> DbtProject: - """Add a DbtProject with arguments.""" - project = DbtProject(target, profiles_dir, project_dir, threads, vars) - project_name = name_override or project.config.project_name - if self._default_project is None: - self._default_project = project_name - self._projects[project_name] = project - return project - - def add_parsed_project(self, project: DbtProject) -> DbtProject: - """Add an already instantiated DbtProject.""" - self._projects.setdefault(project.config.project_name, project) - if self._default_project is None: - self._default_project = project.config.project_name - return project - - def add_project_from_args(self, config: DbtConfiguration) -> DbtProject: - """Add a DbtProject from a DbtConfiguration.""" - project = DbtProject.from_config(config) - self._projects.setdefault(project.config.project_name, project) - return project - - def drop_project(self, project_name: str) -> None: - """Drop a DbtProject.""" - project = self.get_project(project_name) - if project is None: - return - # Encourage garbage collection - project.clear_internal_caches() - project.adapter.connections.cleanup_all() - self._projects.pop(project_name) - if self._default_project == project_name: - if len(self) > 0: - self._default_project = list(self._projects.keys())[0] - else: - self._default_project = None - - def drop_all_projects(self) -> None: - """Drop all DbtProject's in the container.""" - self._default_project = None - for project in self._projects: - self.drop_project(project) - - def reparse_all_projects(self) -> None: - """Reparse all projects.""" - for project in self: - project.safe_parse_project() - - def registered_projects(self) -> List[str]: - """Grab all registered project names.""" - return list(self._projects.keys()) - - def __len__(self) -> int: - """Allow len(DbtProjectContainer).""" - return len(self._projects) - - def __getitem__(self, project: str) -> DbtProject: - """Allow DbtProjectContainer['jaffle_shop'].""" - maybe_project = self.get_project(project) - if maybe_project is None: - raise KeyError(project) - return maybe_project - - def __setitem__(self, name: str, project: DbtProject) -> None: - """Allow DbtProjectContainer['jaffle_shop'] = DbtProject.""" - if self._default_project is None: - self._default_project = name - self._projects[name] = project - - def __delitem__(self, project: str) -> None: - """Allow del DbtProjectContainer['jaffle_shop'].""" - self.drop_project(project) - - def __iter__(self) -> Generator[DbtProject, None, None]: - """Allow project for project in DbtProjectContainer.""" - for project in self._projects: - maybe_project = self.get_project(project) - if maybe_project is None: - continue - yield maybe_project - - def __contains__(self, project: str) -> bool: - """Allow 'jaffle_shop' in DbtProjectContainer.""" - return project in self._projects - - def __repr__(self) -> str: - """Canonical string representation of DbtProjectContainer instance.""" - return "\n".join( - f"Project: {project.project_name}, Dir: {project.project_root}" for project in self - ) - - -def has_jinja(query: str) -> bool: - """Check if a query contains any Jinja control sequences.""" - return any(seq in query for seq in JINJA_CONTROL_SEQUENCES) - - -def semvar_to_tuple(semvar: "VersionSpecifier") -> Tuple[int, int, int]: - """Convert a semvar to a tuple of ints.""" - return (int(semvar.major or 0), int(semvar.minor or 0), int(semvar.patch or 0)) - - -# endregion - -# region bottle.py - - -def _cli_parse(args): # pragma: no coverage - from argparse import ArgumentParser - - parser = ArgumentParser(prog=args[0], usage="%(prog)s [options] package.module:app") - opt = parser.add_argument - opt("--version", action="store_true", help="show version number.") - opt("-b", "--bind", metavar="ADDRESS", help="bind socket to ADDRESS.") - opt("-s", "--server", default="wsgiref", help="use SERVER as backend.") - opt("-p", "--plugin", action="append", help="install additional plugin/s.") - opt("-c", "--conf", action="append", metavar="FILE", help="load config values from FILE.") - opt("-C", "--param", action="append", metavar="NAME=VALUE", help="override config values.") - opt("--debug", action="store_true", help="start server in debug mode.") - opt("--reload", action="store_true", help="auto-reload on file changes.") - opt("app", help="WSGI app entry point.", nargs="?") - - cli_args = parser.parse_args(args[1:]) - - return cli_args, parser - - -py = sys.version_info -py3k = py.major > 2 - - -def getargspec(func): - spec = getfullargspec(func) - kwargs = makelist(spec[0]) + makelist(spec.kwonlyargs) - return kwargs, spec[1], spec[2], spec[3] - - -basestring = str -unicode = str -json_loads = lambda s: json_lds(touni(s)) -callable = lambda x: hasattr(x, "__call__") -imap = map - - -def _raise(*a): - raise a[0](a[1]).with_traceback(a[2]) - - -# Some helpers for string/byte handling -def tob(s, enc="utf8"): - if isinstance(s, unicode): - return s.encode(enc) - return b"" if s is None else bytes(s) - - -def touni(s, enc="utf8", err="strict"): - if isinstance(s, bytes): - return s.decode(enc, err) - return unicode("" if s is None else s) - - -tonat = touni if py3k else tob - - -def _stderr(*args): - try: - print(*args, file=sys.stderr) - except (OSError, AttributeError): - pass # Some environments do not allow printing (mod_wsgi) - - -# A bug in functools causes it to break if the wrapper is an instance method -def update_wrapper(wrapper, wrapped, *a, **ka): - try: - functools.update_wrapper(wrapper, wrapped, *a, **ka) - except AttributeError: - pass - - -# These helpers are used at module level and need to be defined first. -# And yes, I know PEP-8, but sometimes a lower-case classname makes more sense. - - -def depr(major, minor, cause, fix): - text = ( - "Warning: Use of deprecated feature or API. (Deprecated in Bottle-%d.%d)\n" - "Cause: %s\n" - "Fix: %s\n" % (major, minor, cause, fix) - ) - if DEBUG == "strict": - raise DeprecationWarning(text) - warnings.warn(text, DeprecationWarning, stacklevel=3) - return DeprecationWarning(text) - - -def makelist(data): # This is just too handy - if isinstance(data, (tuple, list, set, dict)): - return list(data) - elif data: - return [data] - else: - return [] - - -class DictProperty: - """Property that maps to a key in a local dict-like attribute.""" - - def __init__(self, attr, key=None, read_only=False): - self.attr, self.key, self.read_only = attr, key, read_only - - def __call__(self, func): - functools.update_wrapper(self, func, updated=[]) - self.getter, self.key = func, self.key or func.__name__ - return self - - def __get__(self, obj, cls): - if obj is None: - return self - key, storage = self.key, getattr(obj, self.attr) - if key not in storage: - storage[key] = self.getter(obj) - return storage[key] - - def __set__(self, obj, value): - if self.read_only: - raise AttributeError("Read-Only property.") - getattr(obj, self.attr)[self.key] = value - - def __delete__(self, obj): - if self.read_only: - raise AttributeError("Read-Only property.") - del getattr(obj, self.attr)[self.key] - - -class cached_property: - """A property that is only computed once per instance and then replaces - itself with an ordinary attribute. Deleting the attribute resets the - property.""" - - def __init__(self, func): - update_wrapper(self, func) - self.func = func - - def __get__(self, obj, cls): - if obj is None: - return self - value = obj.__dict__[self.func.__name__] = self.func(obj) - return value - - -class lazy_attribute: - """A property that caches itself to the class object.""" - - def __init__(self, func): - functools.update_wrapper(self, func, updated=[]) - self.getter = func - - def __get__(self, obj, cls): - value = self.getter(cls) - setattr(cls, self.__name__, value) - return value - - -class BottleException(Exception): - """A base class for exceptions used by bottle.""" - - pass - - -class RouteError(BottleException): - """This is a base class for all routing related exceptions""" - - -class RouteReset(BottleException): - """If raised by a plugin or request handler, the route is reset and all - plugins are re-applied.""" - - -class RouterUnknownModeError(RouteError): - pass - - -class RouteSyntaxError(RouteError): - """The route parser found something not supported by this router.""" - - -class RouteBuildError(RouteError): - """The route could not be built.""" - - -def _re_flatten(p): - """Turn all capturing groups in a regular expression pattern into - non-capturing groups.""" - if "(" not in p: - return p - return re.sub( - r"(\\*)(\(\?P<[^>]+>|\((?!\?))", - lambda m: m.group(0) if len(m.group(1)) % 2 else m.group(1) + "(?:", - p, - ) - - -class Router: - """A Router is an ordered collection of route->target pairs. It is used to - efficiently match WSGI requests against a number of routes and return - the first target that satisfies the request. The target may be anything, - usually a string, ID or callable object. A route consists of a path-rule - and a HTTP method. - - The path-rule is either a static path (e.g. `/contact`) or a dynamic - path that contains wildcards (e.g. `/wiki/`). The wildcard syntax - and details on the matching order are described in docs:`routing`. - """ - - default_pattern = "[^/]+" - default_filter = "re" - - #: The current CPython regexp implementation does not allow more - #: than 99 matching groups per regular expression. - _MAX_GROUPS_PER_PATTERN = 99 - - def __init__(self, strict=False): - self.rules = [] # All rules in order - self._groups = {} # index of regexes to find them in dyna_routes - self.builder = {} # Data structure for the url builder - self.static = {} # Search structure for static routes - self.dyna_routes = {} - self.dyna_regexes = {} # Search structure for dynamic routes - #: If true, static routes are no longer checked first. - self.strict_order = strict - self.filters = { - "re": lambda conf: (_re_flatten(conf or self.default_pattern), None, None), - "int": lambda conf: (r"-?\d+", int, lambda x: str(int(x))), - "float": lambda conf: (r"-?[\d.]+", float, lambda x: str(float(x))), - "path": lambda conf: (r".+?", None, None), - } - - def add_filter(self, name, func): - """Add a filter. The provided function is called with the configuration - string as parameter and must return a (regexp, to_python, to_url) tuple. - The first element is a string, the last two are callables or None.""" - self.filters[name] = func - - rule_syntax = re.compile( - "(\\\\*)" - "(?:(?::([a-zA-Z_][a-zA-Z_0-9]*)?()(?:#(.*?)#)?)" - "|(?:<([a-zA-Z_][a-zA-Z_0-9]*)?(?::([a-zA-Z_]*)" - "(?::((?:\\\\.|[^\\\\>])+)?)?)?>))" - ) - - def _itertokens(self, rule): - offset, prefix = 0, "" - for match in self.rule_syntax.finditer(rule): - prefix += rule[offset : match.start()] - g = match.groups() - if g[2] is not None: - depr(0, 13, "Use of old route syntax.", "Use instead of :name in routes.") - if len(g[0]) % 2: # Escaped wildcard - prefix += match.group(0)[len(g[0]) :] - offset = match.end() - continue - if prefix: - yield prefix, None, None - name, filtr, conf = g[4:7] if g[2] is None else g[1:4] - yield name, filtr or "default", conf or None - offset, prefix = match.end(), "" - if offset <= len(rule) or prefix: - yield prefix + rule[offset:], None, None - - def add(self, rule, method, target, name=None): - """Add a new rule or replace the target for an existing rule.""" - anons = 0 # Number of anonymous wildcards found - keys = [] # Names of keys - pattern = "" # Regular expression pattern with named groups - filters = [] # Lists of wildcard input filters - builder = [] # Data structure for the URL builder - is_static = True - - for key, mode, conf in self._itertokens(rule): - if mode: - is_static = False - if mode == "default": - mode = self.default_filter - mask, in_filter, out_filter = self.filters[mode](conf) - if not key: - pattern += "(?:%s)" % mask - key = "anon%d" % anons - anons += 1 - else: - pattern += f"(?P<{key}>{mask})" - keys.append(key) - if in_filter: - filters.append((key, in_filter)) - builder.append((key, out_filter or str)) - elif key: - pattern += re.escape(key) - builder.append((None, key)) - - self.builder[rule] = builder - if name: - self.builder[name] = builder - - if is_static and not self.strict_order: - self.static.setdefault(method, {}) - self.static[method][self.build(rule)] = (target, None) - return - - try: - re_pattern = re.compile("^(%s)$" % pattern) - re_match = re_pattern.match - except re.error as e: - raise RouteSyntaxError(f"Could not add Route: {rule} ({e})") - - if filters: - - def getargs(path): - url_args = re_match(path).groupdict() - for name, wildcard_filter in filters: - try: - url_args[name] = wildcard_filter(url_args[name]) - except ValueError: - raise HTTPError(400, "Path has wrong format.") - return url_args - - elif re_pattern.groupindex: - - def getargs(path): - return re_match(path).groupdict() - - else: - getargs = None - - flatpat = _re_flatten(pattern) - whole_rule = (rule, flatpat, target, getargs) - - if (flatpat, method) in self._groups: - if DEBUG: - msg = "Route <%s %s> overwrites a previously defined route" - warnings.warn(msg % (method, rule), RuntimeWarning) - self.dyna_routes[method][self._groups[flatpat, method]] = whole_rule - else: - self.dyna_routes.setdefault(method, []).append(whole_rule) - self._groups[flatpat, method] = len(self.dyna_routes[method]) - 1 - - self._compile(method) - - def _compile(self, method): - all_rules = self.dyna_routes[method] - comborules = self.dyna_regexes[method] = [] - maxgroups = self._MAX_GROUPS_PER_PATTERN - for x in range(0, len(all_rules), maxgroups): - some = all_rules[x : x + maxgroups] - combined = (flatpat for (_, flatpat, _, _) in some) - combined = "|".join("(^%s$)" % flatpat for flatpat in combined) - combined = re.compile(combined).match - rules = [(target, getargs) for (_, _, target, getargs) in some] - comborules.append((combined, rules)) - - def build(self, _name, *anons, **query): - """Build an URL by filling the wildcards in a rule.""" - builder = self.builder.get(_name) - if not builder: - raise RouteBuildError("No route with that name.", _name) - try: - for i, value in enumerate(anons): - query["anon%d" % i] = value - url = "".join([f(query.pop(n)) if n else f for (n, f) in builder]) - return url if not query else url + "?" + urlencode(query) - except KeyError as E: - raise RouteBuildError("Missing URL argument: %r" % E.args[0]) - - def match(self, environ): - """Return a (target, url_args) tuple or raise HTTPError(400/404/405).""" - verb = environ["REQUEST_METHOD"].upper() - path = environ["PATH_INFO"] or "/" - - methods = ("PROXY", "HEAD", "GET", "ANY") if verb == "HEAD" else ("PROXY", verb, "ANY") - - for method in methods: - if method in self.static and path in self.static[method]: - target, getargs = self.static[method][path] - return target, getargs(path) if getargs else {} - elif method in self.dyna_regexes: - for combined, rules in self.dyna_regexes[method]: - match = combined(path) - if match: - target, getargs = rules[match.lastindex - 1] - return target, getargs(path) if getargs else {} - - # No matching route found. Collect alternative methods for 405 response - allowed = set() - nocheck = set(methods) - for method in set(self.static) - nocheck: - if path in self.static[method]: - allowed.add(method) - for method in set(self.dyna_regexes) - allowed - nocheck: - for combined, rules in self.dyna_regexes[method]: - match = combined(path) - if match: - allowed.add(method) - if allowed: - allow_header = ",".join(sorted(allowed)) - raise HTTPError(405, "Method not allowed.", Allow=allow_header) - - # No matching route and no alternative method found. We give up - raise HTTPError(404, "Not found: " + repr(path)) - - -class Route: - """This class wraps a route callback along with route specific metadata and - configuration and applies Plugins on demand. It is also responsible for - turning an URL path rule into a regular expression usable by the Router. - """ - - def __init__( - self, app, rule, method, callback, name=None, plugins=None, skiplist=None, **config - ): - #: The application this route is installed to. - self.app = app - #: The path-rule string (e.g. ``/wiki/``). - self.rule = rule - #: The HTTP method as a string (e.g. ``GET``). - self.method = method - #: The original callback with no plugins applied. Useful for introspection. - self.callback = callback - #: The name of the route (if specified) or ``None``. - self.name = name or None - #: A list of route-specific plugins (see :meth:`Bottle.route`). - self.plugins = plugins or [] - #: A list of plugins to not apply to this route (see :meth:`Bottle.route`). - self.skiplist = skiplist or [] - #: Additional keyword arguments passed to the :meth:`Bottle.route` - #: decorator are stored in this dictionary. Used for route-specific - #: plugin configuration and meta-data. - self.config = app.config._make_overlay() - self.config.load_dict(config) - - @cached_property - def call(self): - """The route callback with all plugins applied. This property is - created on demand and then cached to speed up subsequent requests.""" - return self._make_callback() - - def reset(self): - """Forget any cached values. The next time :attr:`call` is accessed, - all plugins are re-applied.""" - self.__dict__.pop("call", None) - - def prepare(self): - """Do all on-demand work immediately (useful for debugging).""" - self.call - - def all_plugins(self): - """Yield all Plugins affecting this route.""" - unique = set() - for p in reversed(self.app.plugins + self.plugins): - if True in self.skiplist: - break - name = getattr(p, "name", False) - if name and (name in self.skiplist or name in unique): - continue - if p in self.skiplist or type(p) in self.skiplist: - continue - if name: - unique.add(name) - yield p - - def _make_callback(self): - callback = self.callback - for plugin in self.all_plugins(): - try: - if hasattr(plugin, "apply"): - callback = plugin.apply(callback, self) - else: - callback = plugin(callback) - except RouteReset: # Try again with changed configuration. - return self._make_callback() - if callback is not self.callback: - update_wrapper(callback, self.callback) - return callback - - def get_undecorated_callback(self): - """Return the callback. If the callback is a decorated function, try to - recover the original function.""" - func = self.callback - func = getattr(func, "__func__" if py3k else "im_func", func) - closure_attr = "__closure__" if py3k else "func_closure" - while hasattr(func, closure_attr) and getattr(func, closure_attr): - attributes = getattr(func, closure_attr) - func = attributes[0].cell_contents - - # in case of decorators with multiple arguments - if not isinstance(func, FunctionType): - # pick first FunctionType instance from multiple arguments - func = filter( - lambda x: isinstance(x, FunctionType), - map(lambda x: x.cell_contents, attributes), - ) - func = list(func)[0] # py3 support - return func - - def get_callback_args(self): - """Return a list of argument names the callback (most likely) accepts - as keyword arguments. If the callback is a decorated function, try - to recover the original function before inspection.""" - return getargspec(self.get_undecorated_callback())[0] - - def get_config(self, key, default=None): - """Lookup a config field and return its value, first checking the - route.config, then route.app.config.""" - depr( - 0, - 13, - "Route.get_config() is deprecated.", - "The Route.config property already includes values from the" - " application config for missing keys. Access it directly.", - ) - return self.config.get(key, default) - - def __repr__(self): - cb = self.get_undecorated_callback() - return f"<{self.method} {self.rule} -> {cb.__module__}:{cb.__name__}>" - - -class Bottle: - """Each Bottle object represents a single, distinct web application and - consists of routes, callbacks, plugins, resources and configuration. - Instances are callable WSGI applications. - - :param catchall: If true (default), handle all exceptions. Turn off to - let debugging middleware handle exceptions. - """ - - @lazy_attribute - def _global_config(cls): - cfg = ConfigDict() - cfg.meta_set("catchall", "validate", bool) - return cfg - - def __init__(self, **kwargs): - #: A :class:`ConfigDict` for app specific configuration. - self.config = self._global_config._make_overlay() - self.config._add_change_listener(functools.partial(self.trigger_hook, "config")) - - self.config.update({"catchall": True}) - - if kwargs.get("catchall") is False: - depr( - 0, - 13, - "Bottle(catchall) keyword argument.", - "The 'catchall' setting is now part of the app " - "configuration. Fix: `app.config['catchall'] = False`", - ) - self.config["catchall"] = False - if kwargs.get("autojson") is False: - depr( - 0, - 13, - "Bottle(autojson) keyword argument.", - "The 'autojson' setting is now part of the app " - "configuration. Fix: `app.config['json.enable'] = False`", - ) - self.config["json.disable"] = True - - self._mounts = [] - - #: A :class:`ResourceManager` for application files - self.resources = ResourceManager() - - self.routes = [] # List of installed :class:`Route` instances. - self.router = Router() # Maps requests to :class:`Route` instances. - self.error_handler = {} - - # Core plugins - self.plugins = [] # List of installed plugins. - self.install(JSONPlugin()) - self.install(TemplatePlugin()) - - #: If true, most exceptions are caught and returned as :exc:`HTTPError` - catchall = DictProperty("config", "catchall") - - __hook_names = "before_request", "after_request", "app_reset", "config" - __hook_reversed = {"after_request"} - - @cached_property - def _hooks(self): - return {name: [] for name in self.__hook_names} - - def add_hook(self, name, func): - """Attach a callback to a hook. Three hooks are currently implemented: - - before_request - Executed once before each request. The request context is - available, but no routing has happened yet. - after_request - Executed once after each request regardless of its outcome. - app_reset - Called whenever :meth:`Bottle.reset` is called. - """ - if name in self.__hook_reversed: - self._hooks[name].insert(0, func) - else: - self._hooks[name].append(func) - - def remove_hook(self, name, func): - """Remove a callback from a hook.""" - if name in self._hooks and func in self._hooks[name]: - self._hooks[name].remove(func) - return True - - def trigger_hook(self, __name, *args, **kwargs): - """Trigger a hook and return a list of results.""" - return [hook(*args, **kwargs) for hook in self._hooks[__name][:]] - - def hook(self, name): - """Return a decorator that attaches a callback to a hook. See - :meth:`add_hook` for details.""" - - def decorator(func): - self.add_hook(name, func) - return func - - return decorator - - def _mount_wsgi(self, prefix, app, **options): - segments = [p for p in prefix.split("/") if p] - if not segments: - raise ValueError('WSGI applications cannot be mounted to "/".') - path_depth = len(segments) - - def mountpoint_wrapper(): - try: - request.path_shift(path_depth) - rs = HTTPResponse([]) - - def start_response(status, headerlist, exc_info=None): - if exc_info: - _raise(*exc_info) - if py3k: - # Errors here mean that the mounted WSGI app did not - # follow PEP-3333 (which requires latin1) or used a - # pre-encoding other than utf8 :/ - status = status.encode("latin1").decode("utf8") - headerlist = [ - (k, v.encode("latin1").decode("utf8")) for (k, v) in headerlist - ] - rs.status = status - for name, value in headerlist: - rs.add_header(name, value) - return rs.body.append - - body = app(request.environ, start_response) - rs.body = itertools.chain(rs.body, body) if rs.body else body - return rs - finally: - request.path_shift(-path_depth) - - options.setdefault("skip", True) - options.setdefault("method", "PROXY") - options.setdefault("mountpoint", {"prefix": prefix, "target": app}) - options["callback"] = mountpoint_wrapper - - self.route("/%s/<:re:.*>" % "/".join(segments), **options) - if not prefix.endswith("/"): - self.route("/" + "/".join(segments), **options) - - def _mount_app(self, prefix, app, **options): - if app in self._mounts or "_mount.app" in app.config: - depr( - 0, - 13, - "Application mounted multiple times. Falling back to WSGI mount.", - "Clone application before mounting to a different location.", - ) - return self._mount_wsgi(prefix, app, **options) - - if options: - depr( - 0, - 13, - "Unsupported mount options. Falling back to WSGI mount.", - "Do not specify any route options when mounting bottle application.", - ) - return self._mount_wsgi(prefix, app, **options) - - if not prefix.endswith("/"): - depr( - 0, - 13, - "Prefix must end in '/'. Falling back to WSGI mount.", - "Consider adding an explicit redirect from '/prefix' to '/prefix/' in the" - " parent application.", - ) - return self._mount_wsgi(prefix, app, **options) - - self._mounts.append(app) - app.config["_mount.prefix"] = prefix - app.config["_mount.app"] = self - for route in app.routes: - route.rule = prefix + route.rule.lstrip("/") - self.add_route(route) - - def mount(self, prefix, app, **options): - """Mount an application (:class:`Bottle` or plain WSGI) to a specific - URL prefix. Example:: - - parent_app.mount('/prefix/', child_app) - - :param prefix: path prefix or `mount-point`. - :param app: an instance of :class:`Bottle` or a WSGI application. - - Plugins from the parent application are not applied to the routes - of the mounted child application. If you need plugins in the child - application, install them separately. - - While it is possible to use path wildcards within the prefix path - (:class:`Bottle` childs only), it is highly discouraged. - - The prefix path must end with a slash. If you want to access the - root of the child application via `/prefix` in addition to - `/prefix/`, consider adding a route with a 307 redirect to the - parent application. - """ - - if not prefix.startswith("/"): - raise ValueError("Prefix must start with '/'") - - if isinstance(app, Bottle): - return self._mount_app(prefix, app, **options) - else: - return self._mount_wsgi(prefix, app, **options) - - def merge(self, routes): - """Merge the routes of another :class:`Bottle` application or a list of - :class:`Route` objects into this application. The routes keep their - 'owner', meaning that the :data:`Route.app` attribute is not - changed.""" - if isinstance(routes, Bottle): - routes = routes.routes - for route in routes: - self.add_route(route) - - def install(self, plugin): - """Add a plugin to the list of plugins and prepare it for being - applied to all routes of this application. A plugin may be a simple - decorator or an object that implements the :class:`Plugin` API. - """ - if hasattr(plugin, "setup"): - plugin.setup(self) - if not callable(plugin) and not hasattr(plugin, "apply"): - raise TypeError("Plugins must be callable or implement .apply()") - self.plugins.append(plugin) - self.reset() - return plugin - - def uninstall(self, plugin): - """Uninstall plugins. Pass an instance to remove a specific plugin, a type - object to remove all plugins that match that type, a string to remove - all plugins with a matching ``name`` attribute or ``True`` to remove all - plugins. Return the list of removed plugins.""" - removed, remove = [], plugin - for i, plugin in list(enumerate(self.plugins))[::-1]: - if ( - remove is True - or remove is plugin - or remove is type(plugin) - or getattr(plugin, "name", True) == remove - ): - removed.append(plugin) - del self.plugins[i] - if hasattr(plugin, "close"): - plugin.close() - if removed: - self.reset() - return removed - - def reset(self, route=None): - """Reset all routes (force plugins to be re-applied) and clear all - caches. If an ID or route object is given, only that specific route - is affected.""" - if route is None: - routes = self.routes - elif isinstance(route, Route): - routes = [route] - else: - routes = [self.routes[route]] - for route in routes: - route.reset() - if DEBUG: - for route in routes: - route.prepare() - self.trigger_hook("app_reset") - - def close(self): - """Close the application and all installed plugins.""" - for plugin in self.plugins: - if hasattr(plugin, "close"): - plugin.close() - - def run(self, **kwargs): - """Calls :func:`run` with the same parameters.""" - run(self, **kwargs) - - def match(self, environ): - """Search for a matching route and return a (:class:`Route`, urlargs) - tuple. The second value is a dictionary with parameters extracted - from the URL. Raise :exc:`HTTPError` (404/405) on a non-match.""" - return self.router.match(environ) - - def get_url(self, routename, **kargs): - """Return a string that matches a named route""" - scriptname = request.environ.get("SCRIPT_NAME", "").strip("/") + "/" - location = self.router.build(routename, **kargs).lstrip("/") - return urljoin(urljoin("/", scriptname), location) - - def add_route(self, route): - """Add a route object, but do not change the :data:`Route.app` - attribute.""" - self.routes.append(route) - self.router.add(route.rule, route.method, route, name=route.name) - if DEBUG: - route.prepare() - - def route( - self, path=None, method="GET", callback=None, name=None, apply=None, skip=None, **config - ): - """A decorator to bind a function to a request URL. Example:: - - @app.route('/hello/') - def hello(name): - return 'Hello %s' % name - - The ```` part is a wildcard. See :class:`Router` for syntax - details. - - :param path: Request path or a list of paths to listen to. If no - path is specified, it is automatically generated from the - signature of the function. - :param method: HTTP method (`GET`, `POST`, `PUT`, ...) or a list of - methods to listen to. (default: `GET`) - :param callback: An optional shortcut to avoid the decorator - syntax. ``route(..., callback=func)`` equals ``route(...)(func)`` - :param name: The name for this route. (default: None) - :param apply: A decorator or plugin or a list of plugins. These are - applied to the route callback in addition to installed plugins. - :param skip: A list of plugins, plugin classes or names. Matching - plugins are not installed to this route. ``True`` skips all. - - Any additional keyword arguments are stored as route-specific - configuration and passed to plugins (see :meth:`Plugin.apply`). - """ - if callable(path): - path, callback = None, path - plugins = makelist(apply) - skiplist = makelist(skip) - - def decorator(callback): - if isinstance(callback, basestring): - callback = load(callback) - for rule in makelist(path) or yieldroutes(callback): - for verb in makelist(method): - verb = verb.upper() - route = Route( - self, - rule, - verb, - callback, - name=name, - plugins=plugins, - skiplist=skiplist, - **config, - ) - self.add_route(route) - return callback - - return decorator(callback) if callback else decorator - - def get(self, path=None, method="GET", **options): - """Equals :meth:`route`.""" - return self.route(path, method, **options) - - def post(self, path=None, method="POST", **options): - """Equals :meth:`route` with a ``POST`` method parameter.""" - return self.route(path, method, **options) - - def put(self, path=None, method="PUT", **options): - """Equals :meth:`route` with a ``PUT`` method parameter.""" - return self.route(path, method, **options) - - def delete(self, path=None, method="DELETE", **options): - """Equals :meth:`route` with a ``DELETE`` method parameter.""" - return self.route(path, method, **options) - - def patch(self, path=None, method="PATCH", **options): - """Equals :meth:`route` with a ``PATCH`` method parameter.""" - return self.route(path, method, **options) - - def error(self, code=500, callback=None): - """Register an output handler for a HTTP error code. Can - be used as a decorator or called directly :: - - def error_handler_500(error): - return 'error_handler_500' - - app.error(code=500, callback=error_handler_500) - - @app.error(404) - def error_handler_404(error): - return 'error_handler_404' - - """ - - def decorator(callback): - if isinstance(callback, basestring): - callback = load(callback) - self.error_handler[int(code)] = callback - return callback - - return decorator(callback) if callback else decorator - - def default_error_handler(self, res): - return tob( - template( - ERROR_PAGE_TEMPLATE, e=res, template_settings=dict(name="__ERROR_PAGE_TEMPLATE") - ) - ) - - def _handle(self, environ): - path = environ["bottle.raw_path"] = environ["PATH_INFO"] - if py3k: - environ["PATH_INFO"] = path.encode("latin1").decode("utf8", "ignore") - - environ["bottle.app"] = self - request.bind(environ) - response.bind() - - try: - while True: # Remove in 0.14 together with RouteReset - out = None - try: - self.trigger_hook("before_request") - route, args = self.router.match(environ) - environ["route.handle"] = route - environ["bottle.route"] = route - environ["route.url_args"] = args - out = route.call(**args) - break - except HTTPResponse as E: - out = E - break - except RouteReset: - depr( - 0, - 13, - "RouteReset exception deprecated", - "Call route.call() after route.reset() and return the result.", - ) - route.reset() - continue - finally: - if isinstance(out, HTTPResponse): - out.apply(response) - try: - self.trigger_hook("after_request") - except HTTPResponse as E: - out = E - out.apply(response) - except (KeyboardInterrupt, SystemExit, MemoryError): - raise - except Exception as E: - if not self.catchall: - raise - stacktrace = format_exc() - environ["wsgi.errors"].write(stacktrace) - environ["wsgi.errors"].flush() - environ["bottle.exc_info"] = sys.exc_info() - out = HTTPError(500, "Internal Server Error", E, stacktrace) - out.apply(response) - - return out - - def _cast(self, out, peek=None): - """Try to convert the parameter into something WSGI compatible and set - correct HTTP headers when possible. - Support: False, str, unicode, dict, HTTPResponse, HTTPError, file-like, - iterable of strings and iterable of unicodes - """ - - # Empty output is done here - if not out: - if "Content-Length" not in response: - response["Content-Length"] = 0 - return [] - # Join lists of byte or unicode strings. Mixed lists are NOT supported - if isinstance(out, (tuple, list)) and isinstance(out[0], (bytes, unicode)): - out = out[0][0:0].join(out) # b'abc'[0:0] -> b'' - # Encode unicode strings - if isinstance(out, unicode): - out = out.encode(response.charset) - # Byte Strings are just returned - if isinstance(out, bytes): - if "Content-Length" not in response: - response["Content-Length"] = len(out) - return [out] - # HTTPError or HTTPException (recursive, because they may wrap anything) - # TODO: Handle these explicitly in handle() or make them iterable. - if isinstance(out, HTTPError): - out.apply(response) - out = self.error_handler.get(out.status_code, self.default_error_handler)(out) - return self._cast(out) - if isinstance(out, HTTPResponse): - out.apply(response) - return self._cast(out.body) - - # File-like objects. - if hasattr(out, "read"): - if "wsgi.file_wrapper" in request.environ: - return request.environ["wsgi.file_wrapper"](out) - elif hasattr(out, "close") or not hasattr(out, "__iter__"): - return WSGIFileWrapper(out) - - # Handle Iterables. We peek into them to detect their inner type. - try: - iout = iter(out) - first = next(iout) - while not first: - first = next(iout) - except StopIteration: - return self._cast("") - except HTTPResponse as E: - first = E - except (KeyboardInterrupt, SystemExit, MemoryError): - raise - except Exception as error: - if not self.catchall: - raise - first = HTTPError(500, "Unhandled exception", error, format_exc()) - - # These are the inner types allowed in iterator or generator objects. - if isinstance(first, HTTPResponse): - return self._cast(first) - elif isinstance(first, bytes): - new_iter = itertools.chain([first], iout) - elif isinstance(first, unicode): - encoder = lambda x: x.encode(response.charset) - new_iter = imap(encoder, itertools.chain([first], iout)) - else: - msg = "Unsupported response type: %s" % type(first) - return self._cast(HTTPError(500, msg)) - if hasattr(out, "close"): - new_iter = _closeiter(new_iter, out.close) - return new_iter - - def wsgi(self, environ, start_response): - """The bottle WSGI-interface.""" - try: - out = self._cast(self._handle(environ)) - # rfc2616 section 4.3 - if response._status_code in (100, 101, 204, 304) or environ["REQUEST_METHOD"] == "HEAD": - if hasattr(out, "close"): - out.close() - out = [] - exc_info = environ.get("bottle.exc_info") - if exc_info is not None: - del environ["bottle.exc_info"] - start_response(response._wsgi_status_line(), response.headerlist, exc_info) - return out - except (KeyboardInterrupt, SystemExit, MemoryError): - raise - except Exception as E: - if not self.catchall: - raise - err = "

Critical error while processing request: %s

" % html_escape( - environ.get("PATH_INFO", "/") - ) - if DEBUG: - err += ( - "

Error:

\n
\n%s\n
\n

Traceback:

\n
\n%s\n
\n" - % (html_escape(repr(E)), html_escape(format_exc())) - ) - environ["wsgi.errors"].write(err) - environ["wsgi.errors"].flush() - headers = [("Content-Type", "text/html; charset=UTF-8")] - start_response("500 INTERNAL SERVER ERROR", headers, sys.exc_info()) - return [tob(err)] - - def __call__(self, environ, start_response): - """Each instance of :class:'Bottle' is a WSGI application.""" - return self.wsgi(environ, start_response) - - def __enter__(self): - """Use this application as default for all module-level shortcuts.""" - default_app.push(self) - return self - - def __exit__(self, exc_type, exc_value, traceback): - default_app.pop() - - def __setattr__(self, name, value): - if name in self.__dict__: - raise AttributeError("Attribute %s already defined. Plugin conflict?" % name) - self.__dict__[name] = value - - -class BaseRequest: - """A wrapper for WSGI environment dictionaries that adds a lot of - convenient access methods and properties. Most of them are read-only. - - Adding new attributes to a request actually adds them to the environ - dictionary (as 'bottle.request.ext.'). This is the recommended - way to store and access request-specific data. - """ - - __slots__ = ("environ",) - - #: Maximum size of memory buffer for :attr:`body` in bytes. - MEMFILE_MAX = 102400 - - def __init__(self, environ=None): - """Wrap a WSGI environ dictionary.""" - #: The wrapped WSGI environ dictionary. This is the only real attribute. - #: All other attributes actually are read-only properties. - self.environ = {} if environ is None else environ - self.environ["bottle.request"] = self - - @DictProperty("environ", "bottle.app", read_only=True) - def app(self): - """Bottle application handling this request.""" - raise RuntimeError("This request is not connected to an application.") - - @DictProperty("environ", "bottle.route", read_only=True) - def route(self): - """The bottle :class:`Route` object that matches this request.""" - raise RuntimeError("This request is not connected to a route.") - - @DictProperty("environ", "route.url_args", read_only=True) - def url_args(self): - """The arguments extracted from the URL.""" - raise RuntimeError("This request is not connected to a route.") - - @property - def path(self): - """The value of ``PATH_INFO`` with exactly one prefixed slash (to fix - broken clients and avoid the "empty path" edge case).""" - return "/" + self.environ.get("PATH_INFO", "").lstrip("/") - - @property - def method(self): - """The ``REQUEST_METHOD`` value as an uppercase string.""" - return self.environ.get("REQUEST_METHOD", "GET").upper() - - @DictProperty("environ", "bottle.request.headers", read_only=True) - def headers(self): - """A :class:`WSGIHeaderDict` that provides case-insensitive access to - HTTP request headers.""" - return WSGIHeaderDict(self.environ) - - def get_header(self, name, default=None): - """Return the value of a request header, or a given default value.""" - return self.headers.get(name, default) - - @DictProperty("environ", "bottle.request.cookies", read_only=True) - def cookies(self): - """Cookies parsed into a :class:`FormsDict`. Signed cookies are NOT - decoded. Use :meth:`get_cookie` if you expect signed cookies.""" - cookies = SimpleCookie(self.environ.get("HTTP_COOKIE", "")).values() - return FormsDict((c.key, c.value) for c in cookies) - - def get_cookie(self, key, default=None, secret=None, digestmod=hashlib.sha256): - """Return the content of a cookie. To read a `Signed Cookie`, the - `secret` must match the one used to create the cookie (see - :meth:`BaseResponse.set_cookie`). If anything goes wrong (missing - cookie or wrong signature), return a default value.""" - value = self.cookies.get(key) - if secret: - # See BaseResponse.set_cookie for details on signed cookies. - if value and value.startswith("!") and "?" in value: - sig, msg = map(tob, value[1:].split("?", 1)) - hash = hmac.new(tob(secret), msg, digestmod=digestmod).digest() - if _lscmp(sig, base64.b64encode(hash)): - dst = pickle.loads(base64.b64decode(msg)) - if dst and dst[0] == key: - return dst[1] - return default - return value or default - - @DictProperty("environ", "bottle.request.query", read_only=True) - def query(self): - """The :attr:`query_string` parsed into a :class:`FormsDict`. These - values are sometimes called "URL arguments" or "GET parameters", but - not to be confused with "URL wildcards" as they are provided by the - :class:`Router`.""" - get = self.environ["bottle.get"] = FormsDict() - pairs = _parse_qsl(self.environ.get("QUERY_STRING", "")) - for key, value in pairs: - get[key] = value - return get - - @DictProperty("environ", "bottle.request.forms", read_only=True) - def forms(self): - """Form values parsed from an `url-encoded` or `multipart/form-data` - encoded POST or PUT request body. The result is returned as a - :class:`FormsDict`. All keys and values are strings. File uploads - are stored separately in :attr:`files`.""" - forms = FormsDict() - forms.recode_unicode = self.POST.recode_unicode - for name, item in self.POST.allitems(): - if not isinstance(item, FileUpload): - forms[name] = item - return forms - - @DictProperty("environ", "bottle.request.params", read_only=True) - def params(self): - """A :class:`FormsDict` with the combined values of :attr:`query` and - :attr:`forms`. File uploads are stored in :attr:`files`.""" - params = FormsDict() - for key, value in self.query.allitems(): - params[key] = value - for key, value in self.forms.allitems(): - params[key] = value - return params - - @DictProperty("environ", "bottle.request.files", read_only=True) - def files(self): - """File uploads parsed from `multipart/form-data` encoded POST or PUT - request body. The values are instances of :class:`FileUpload`. - - """ - files = FormsDict() - files.recode_unicode = self.POST.recode_unicode - for name, item in self.POST.allitems(): - if isinstance(item, FileUpload): - files[name] = item - return files - - @DictProperty("environ", "bottle.request.json", read_only=True) - def json(self): - """If the ``Content-Type`` header is ``application/json`` or - ``application/json-rpc``, this property holds the parsed content - of the request body. Only requests smaller than :attr:`MEMFILE_MAX` - are processed to avoid memory exhaustion. - Invalid JSON raises a 400 error response. - """ - ctype = self.environ.get("CONTENT_TYPE", "").lower().split(";")[0] - if ctype in ("application/json", "application/json-rpc"): - b = self._get_body_string(self.MEMFILE_MAX) - if not b: - return None - try: - return json_loads(b) - except (ValueError, TypeError): - raise HTTPError(400, "Invalid JSON") - return None - - def _iter_body(self, read, bufsize): - maxread = max(0, self.content_length) - while maxread: - part = read(min(maxread, bufsize)) - if not part: - break - yield part - maxread -= len(part) - - @staticmethod - def _iter_chunked(read, bufsize): - err = HTTPError(400, "Error while parsing chunked transfer body.") - rn, sem, bs = tob("\r\n"), tob(";"), tob("") - while True: - header = read(1) - while header[-2:] != rn: - c = read(1) - header += c - if not c: - raise err - if len(header) > bufsize: - raise err - size, _, _ = header.partition(sem) - try: - maxread = int(tonat(size.strip()), 16) - except ValueError: - raise err - if maxread == 0: - break - buff = bs - while maxread > 0: - if not buff: - buff = read(min(maxread, bufsize)) - part, buff = buff[:maxread], buff[maxread:] - if not part: - raise err - yield part - maxread -= len(part) - if read(2) != rn: - raise err - - @DictProperty("environ", "bottle.request.body", read_only=True) - def _body(self): - try: - read_func = self.environ["wsgi.input"].read - except KeyError: - self.environ["wsgi.input"] = BytesIO() - return self.environ["wsgi.input"] - body_iter = self._iter_chunked if self.chunked else self._iter_body - body, body_size, is_temp_file = BytesIO(), 0, False - for part in body_iter(read_func, self.MEMFILE_MAX): - body.write(part) - body_size += len(part) - if not is_temp_file and body_size > self.MEMFILE_MAX: - body, tmp = NamedTemporaryFile(mode="w+b"), body - body.write(tmp.getvalue()) - del tmp - is_temp_file = True - self.environ["wsgi.input"] = body - body.seek(0) - return body - - def _get_body_string(self, maxread): - """Read body into a string. Raise HTTPError(413) on requests that are - too large.""" - if self.content_length > maxread: - raise HTTPError(413, "Request entity too large") - data = self.body.read(maxread + 1) - if len(data) > maxread: - raise HTTPError(413, "Request entity too large") - return data - - @property - def body(self): - """The HTTP request body as a seek-able file-like object. Depending on - :attr:`MEMFILE_MAX`, this is either a temporary file or a - :class:`io.BytesIO` instance. Accessing this property for the first - time reads and replaces the ``wsgi.input`` environ variable. - Subsequent accesses just do a `seek(0)` on the file object.""" - self._body.seek(0) - return self._body - - @property - def chunked(self): - """True if Chunked transfer encoding was.""" - return "chunked" in self.environ.get("HTTP_TRANSFER_ENCODING", "").lower() - - #: An alias for :attr:`query`. - GET = query - - @DictProperty("environ", "bottle.request.post", read_only=True) - def POST(self): - """The values of :attr:`forms` and :attr:`files` combined into a single - :class:`FormsDict`. Values are either strings (form values) or - instances of :class:`cgi.FieldStorage` (file uploads). - """ - post = FormsDict() - # We default to application/x-www-form-urlencoded for everything that - # is not multipart and take the fast path (also: 3.1 workaround) - if not self.content_type.startswith("multipart/"): - body = tonat(self._get_body_string(self.MEMFILE_MAX), "latin1") - for key, value in _parse_qsl(body): - post[key] = value - return post - - safe_env = {"QUERY_STRING": ""} # Build a safe environment for cgi - for key in ("REQUEST_METHOD", "CONTENT_TYPE", "CONTENT_LENGTH"): - if key in self.environ: - safe_env[key] = self.environ[key] - args = dict(fp=self.body, environ=safe_env, keep_blank_values=True) - - if py3k: - args["encoding"] = "utf8" - post.recode_unicode = False - data = cgi.FieldStorage(**args) - self["_cgi.FieldStorage"] = data # http://bugs.python.org/issue18394 - data = data.list or [] - for item in data: - if item.filename is None: - post[item.name] = item.value - else: - post[item.name] = FileUpload(item.file, item.name, item.filename, item.headers) - return post - - @property - def url(self): - """The full request URI including hostname and scheme. If your app - lives behind a reverse proxy or load balancer and you get confusing - results, make sure that the ``X-Forwarded-Host`` header is set - correctly.""" - return self.urlparts.geturl() - - @DictProperty("environ", "bottle.request.urlparts", read_only=True) - def urlparts(self): - """The :attr:`url` string as an :class:`urlparse.SplitResult` tuple. - The tuple contains (scheme, host, path, query_string and fragment), - but the fragment is always empty because it is not visible to the - server.""" - env = self.environ - http = env.get("HTTP_X_FORWARDED_PROTO") or env.get("wsgi.url_scheme", "http") - host = env.get("HTTP_X_FORWARDED_HOST") or env.get("HTTP_HOST") - if not host: - # HTTP 1.1 requires a Host-header. This is for HTTP/1.0 clients. - host = env.get("SERVER_NAME", "127.0.0.1") - port = env.get("SERVER_PORT") - if port and port != ("80" if http == "http" else "443"): - host += ":" + port - path = urlquote(self.fullpath) - return UrlSplitResult(http, host, path, env.get("QUERY_STRING"), "") - - @property - def fullpath(self): - """Request path including :attr:`script_name` (if present).""" - return urljoin(self.script_name, self.path.lstrip("/")) - - @property - def query_string(self): - """The raw :attr:`query` part of the URL (everything in between ``?`` - and ``#``) as a string.""" - return self.environ.get("QUERY_STRING", "") - - @property - def script_name(self): - """The initial portion of the URL's `path` that was removed by a higher - level (server or routing middleware) before the application was - called. This script path is returned with leading and tailing - slashes.""" - script_name = self.environ.get("SCRIPT_NAME", "").strip("/") - return "/" + script_name + "/" if script_name else "/" - - def path_shift(self, shift=1): - """Shift path segments from :attr:`path` to :attr:`script_name` and - vice versa. - - :param shift: The number of path segments to shift. May be negative - to change the shift direction. (default: 1) - """ - script, path = path_shift(self.environ.get("SCRIPT_NAME", "/"), self.path, shift) - self["SCRIPT_NAME"], self["PATH_INFO"] = script, path - - @property - def content_length(self): - """The request body length as an integer. The client is responsible to - set this header. Otherwise, the real length of the body is unknown - and -1 is returned. In this case, :attr:`body` will be empty.""" - return int(self.environ.get("CONTENT_LENGTH") or -1) - - @property - def content_type(self): - """The Content-Type header as a lowercase-string (default: empty).""" - return self.environ.get("CONTENT_TYPE", "").lower() - - @property - def is_xhr(self): - """True if the request was triggered by a XMLHttpRequest. This only - works with JavaScript libraries that support the `X-Requested-With` - header (most of the popular libraries do).""" - requested_with = self.environ.get("HTTP_X_REQUESTED_WITH", "") - return requested_with.lower() == "xmlhttprequest" - - @property - def is_ajax(self): - """Alias for :attr:`is_xhr`. "Ajax" is not the right term.""" - return self.is_xhr - - @property - def auth(self): - """HTTP authentication data as a (user, password) tuple. This - implementation currently supports basic (not digest) authentication - only. If the authentication happened at a higher level (e.g. in the - front web-server or a middleware), the password field is None, but - the user field is looked up from the ``REMOTE_USER`` environ - variable. On any errors, None is returned.""" - basic = parse_auth(self.environ.get("HTTP_AUTHORIZATION", "")) - if basic: - return basic - ruser = self.environ.get("REMOTE_USER") - if ruser: - return (ruser, None) - return None - - @property - def remote_route(self): - """A list of all IPs that were involved in this request, starting with - the client IP and followed by zero or more proxies. This does only - work if all proxies support the ```X-Forwarded-For`` header. Note - that this information can be forged by malicious clients.""" - proxy = self.environ.get("HTTP_X_FORWARDED_FOR") - if proxy: - return [ip.strip() for ip in proxy.split(",")] - remote = self.environ.get("REMOTE_ADDR") - return [remote] if remote else [] - - @property - def remote_addr(self): - """The client IP as a string. Note that this information can be forged - by malicious clients.""" - route = self.remote_route - return route[0] if route else None - - def copy(self): - """Return a new :class:`Request` with a shallow :attr:`environ` copy.""" - return Request(self.environ.copy()) - - def get(self, value, default=None): - return self.environ.get(value, default) - - def __getitem__(self, key): - return self.environ[key] - - def __delitem__(self, key): - self[key] = "" - del self.environ[key] - - def __iter__(self): - return iter(self.environ) - - def __len__(self): - return len(self.environ) - - def keys(self): - return self.environ.keys() - - def __setitem__(self, key, value): - """Change an environ value and clear all caches that depend on it.""" - - if self.environ.get("bottle.request.readonly"): - raise KeyError("The environ dictionary is read-only.") - - self.environ[key] = value - todelete = () - - if key == "wsgi.input": - todelete = ("body", "forms", "files", "params", "post", "json") - elif key == "QUERY_STRING": - todelete = ("query", "params") - elif key.startswith("HTTP_"): - todelete = ("headers", "cookies") - - for key in todelete: - self.environ.pop("bottle.request." + key, None) - - def __repr__(self): - return f"<{self.__class__.__name__}: {self.method} {self.url}>" - - def __getattr__(self, name): - """Search in self.environ for additional user defined attributes.""" - try: - var = self.environ["bottle.request.ext.%s" % name] - return var.__get__(self) if hasattr(var, "__get__") else var - except KeyError: - raise AttributeError("Attribute %r not defined." % name) - - def __setattr__(self, name, value): - if name == "environ": - return object.__setattr__(self, name, value) - key = "bottle.request.ext.%s" % name - if hasattr(self, name): - raise AttributeError("Attribute already defined: %s" % name) - self.environ[key] = value - - def __delattr__(self, name): - try: - del self.environ["bottle.request.ext.%s" % name] - except KeyError: - raise AttributeError("Attribute not defined: %s" % name) - - -def _hkey(key): - if "\n" in key or "\r" in key or "\0" in key: - raise ValueError("Header names must not contain control characters: %r" % key) - return key.title().replace("_", "-") - - -def _hval(value): - value = tonat(value) - if "\n" in value or "\r" in value or "\0" in value: - raise ValueError("Header value must not contain control characters: %r" % value) - return value - - -class HeaderProperty: - def __init__(self, name, reader=None, writer=None, default=""): - self.name, self.default = name, default - self.reader, self.writer = reader, writer - self.__doc__ = "Current value of the %r header." % name.title() - - def __get__(self, obj, _): - if obj is None: - return self - value = obj.get_header(self.name, self.default) - return self.reader(value) if self.reader else value - - def __set__(self, obj, value): - obj[self.name] = self.writer(value) if self.writer else value - - def __delete__(self, obj): - del obj[self.name] - - -class BaseResponse: - """Storage class for a response body as well as headers and cookies. - - This class does support dict-like case-insensitive item-access to - headers, but is NOT a dict. Most notably, iterating over a response - yields parts of the body and not the headers. - - :param body: The response body as one of the supported types. - :param status: Either an HTTP status code (e.g. 200) or a status line - including the reason phrase (e.g. '200 OK'). - :param headers: A dictionary or a list of name-value pairs. - - Additional keyword arguments are added to the list of headers. - Underscores in the header name are replaced with dashes. - """ - - default_status = 200 - default_content_type = "text/html; charset=UTF-8" - - # Header denylist for specific response codes - # (rfc2616 section 10.2.3 and 10.3.5) - bad_headers = { - 204: frozenset(("Content-Type", "Content-Length")), - 304: frozenset( - ( - "Allow", - "Content-Encoding", - "Content-Language", - "Content-Length", - "Content-Range", - "Content-Type", - "Content-Md5", - "Last-Modified", - ) - ), - } - - def __init__(self, body="", status=None, headers=None, **more_headers): - self._cookies = None - self._headers = {} - self.body = body - self.status = status or self.default_status - if headers: - if isinstance(headers, dict): - headers = headers.items() - for name, value in headers: - self.add_header(name, value) - if more_headers: - for name, value in more_headers.items(): - self.add_header(name, value) - - def copy(self, cls=None): - """Returns a copy of self.""" - cls = cls or BaseResponse - assert issubclass(cls, BaseResponse) - copy = cls() - copy.status = self.status - copy._headers = {k: v[:] for (k, v) in self._headers.items()} - if self._cookies: - cookies = copy._cookies = SimpleCookie() - for k, v in self._cookies.items(): - cookies[k] = v.value - cookies[k].update(v) # also copy cookie attributes - return copy - - def __iter__(self): - return iter(self.body) - - def close(self): - if hasattr(self.body, "close"): - self.body.close() - - @property - def status_line(self): - """The HTTP status line as a string (e.g. ``404 Not Found``).""" - return self._status_line - - @property - def status_code(self): - """The HTTP status code as an integer (e.g. 404).""" - return self._status_code - - def _set_status(self, status): - if isinstance(status, int): - code, status = status, _HTTP_STATUS_LINES.get(status) - elif " " in status: - if "\n" in status or "\r" in status or "\0" in status: - raise ValueError("Status line must not include control chars.") - status = status.strip() - code = int(status.split()[0]) - else: - raise ValueError("String status line without a reason phrase.") - if not 100 <= code <= 999: - raise ValueError("Status code out of range.") - self._status_code = code - self._status_line = str(status or ("%d Unknown" % code)) - - def _get_status(self): - return self._status_line - - status = property( - _get_status, - _set_status, - None, - """ A writeable property to change the HTTP response status. It accepts - either a numeric code (100-999) or a string with a custom reason - phrase (e.g. "404 Brain not found"). Both :data:`status_line` and - :data:`status_code` are updated accordingly. The return value is - always a status string. """, - ) - del _get_status, _set_status - - @property - def headers(self): - """An instance of :class:`HeaderDict`, a case-insensitive dict-like - view on the response headers.""" - hdict = HeaderDict() - hdict.dict = self._headers - return hdict - - def __contains__(self, name): - return _hkey(name) in self._headers - - def __delitem__(self, name): - del self._headers[_hkey(name)] - - def __getitem__(self, name): - return self._headers[_hkey(name)][-1] - - def __setitem__(self, name, value): - self._headers[_hkey(name)] = [_hval(value)] - - def get_header(self, name, default=None): - """Return the value of a previously defined header. If there is no - header with that name, return a default value.""" - return self._headers.get(_hkey(name), [default])[-1] - - def set_header(self, name, value): - """Create a new response header, replacing any previously defined - headers with the same name.""" - self._headers[_hkey(name)] = [_hval(value)] - - def add_header(self, name, value): - """Add an additional response header, not removing duplicates.""" - self._headers.setdefault(_hkey(name), []).append(_hval(value)) - - def iter_headers(self): - """Yield (header, value) tuples, skipping headers that are not - allowed with the current response status code.""" - return self.headerlist - - def _wsgi_status_line(self): - """WSGI conform status line (latin1-encodeable)""" - if py3k: - return self._status_line.encode("utf8").decode("latin1") - return self._status_line - - @property - def headerlist(self): - """WSGI conform list of (header, value) tuples.""" - out = [] - headers = list(self._headers.items()) - if "Content-Type" not in self._headers: - headers.append(("Content-Type", [self.default_content_type])) - if self._status_code in self.bad_headers: - bad_headers = self.bad_headers[self._status_code] - headers = [h for h in headers if h[0] not in bad_headers] - out += [(name, val) for (name, vals) in headers for val in vals] - if self._cookies: - for c in self._cookies.values(): - out.append(("Set-Cookie", _hval(c.OutputString()))) - if py3k: - out = [(k, v.encode("utf8").decode("latin1")) for (k, v) in out] - return out - - content_type = HeaderProperty("Content-Type") - content_length = HeaderProperty("Content-Length", reader=int, default=-1) - expires = HeaderProperty( - "Expires", - reader=lambda x: datetime.utcfromtimestamp(parse_date(x)), - writer=lambda x: http_date(x), - ) - - @property - def charset(self, default="UTF-8"): - """Return the charset specified in the content-type header (default: utf8).""" - if "charset=" in self.content_type: - return self.content_type.split("charset=")[-1].split(";")[0].strip() - return default - - def set_cookie(self, name, value, secret=None, digestmod=hashlib.sha256, **options): - """Create a new cookie or replace an old one. If the `secret` parameter is - set, create a `Signed Cookie` (described below). - - :param name: the name of the cookie. - :param value: the value of the cookie. - :param secret: a signature key required for signed cookies. - - Additionally, this method accepts all RFC 2109 attributes that are - supported by :class:`cookie.Morsel`, including: - - :param maxage: maximum age in seconds. (default: None) - :param expires: a datetime object or UNIX timestamp. (default: None) - :param domain: the domain that is allowed to read the cookie. - (default: current domain) - :param path: limits the cookie to a given path (default: current path) - :param secure: limit the cookie to HTTPS connections (default: off). - :param httponly: prevents client-side javascript to read this cookie - (default: off, requires Python 2.6 or newer). - :param samesite: Control or disable third-party use for this cookie. - Possible values: `lax`, `strict` or `none` (default). - - If neither `expires` nor `maxage` is set (default), the cookie will - expire at the end of the browser session (as soon as the browser - window is closed). - - Signed cookies may store any pickle-able object and are - cryptographically signed to prevent manipulation. Keep in mind that - cookies are limited to 4kb in most browsers. - - Warning: Pickle is a potentially dangerous format. If an attacker - gains access to the secret key, he could forge cookies that execute - code on server side if unpickled. Using pickle is discouraged and - support for it will be removed in later versions of bottle. - - Warning: Signed cookies are not encrypted (the client can still see - the content) and not copy-protected (the client can restore an old - cookie). The main intention is to make pickling and unpickling - save, not to store secret information at client side. - """ - if not self._cookies: - self._cookies = SimpleCookie() - - # Monkey-patch Cookie lib to support 'SameSite' parameter - # https://tools.ietf.org/html/draft-west-first-party-cookies-07#section-4.1 - if py < (3, 8, 0): - Morsel._reserved.setdefault("samesite", "SameSite") - - if secret: - if not isinstance(value, basestring): - depr( - 0, - 13, - "Pickling of arbitrary objects into cookies is deprecated.", - "Only store strings in cookies. JSON strings are fine, too.", - ) - encoded = base64.b64encode(pickle.dumps([name, value], -1)) - sig = base64.b64encode(hmac.new(tob(secret), encoded, digestmod=digestmod).digest()) - value = touni(tob("!") + sig + tob("?") + encoded) - elif not isinstance(value, basestring): - raise TypeError("Secret key required for non-string cookies.") - - # Cookie size plus options must not exceed 4kb. - if len(name) + len(value) > 3800: - raise ValueError("Content does not fit into a cookie.") - - self._cookies[name] = value - - for key, value in options.items(): - if key in ("max_age", "maxage"): # 'maxage' variant added in 0.13 - key = "max-age" - if isinstance(value, timedelta): - value = value.seconds + value.days * 24 * 3600 - if key == "expires": - value = http_date(value) - if key in ("same_site", "samesite"): # 'samesite' variant added in 0.13 - key, value = "samesite", (value or "none").lower() - if value not in ("lax", "strict", "none"): - raise CookieError("Invalid value for SameSite") - if key in ("secure", "httponly") and not value: - continue - self._cookies[name][key] = value - - def delete_cookie(self, key, **kwargs): - """Delete a cookie. Be sure to use the same `domain` and `path` - settings as used to create the cookie.""" - kwargs["max_age"] = -1 - kwargs["expires"] = 0 - self.set_cookie(key, "", **kwargs) - - def __repr__(self): - out = "" - for name, value in self.headerlist: - out += f"{name.title()}: {value.strip()}\n" - return out - - -def _local_property(): - ls = threading.local() - - def fget(_): - try: - return ls.var - except AttributeError: - raise RuntimeError("Request context not initialized.") - - def fset(_, value): - ls.var = value - - def fdel(_): - del ls.var - - return property(fget, fset, fdel, "Thread-local property") - - -class LocalRequest(BaseRequest): - """A thread-local subclass of :class:`BaseRequest` with a different - set of attributes for each thread. There is usually only one global - instance of this class (:data:`request`). If accessed during a - request/response cycle, this instance always refers to the *current* - request (even on a multithreaded server).""" - - bind = BaseRequest.__init__ - environ = _local_property() - - -class LocalResponse(BaseResponse): - """A thread-local subclass of :class:`BaseResponse` with a different - set of attributes for each thread. There is usually only one global - instance of this class (:data:`response`). Its attributes are used - to build the HTTP response at the end of the request/response cycle. - """ - - bind = BaseResponse.__init__ - _status_line = _local_property() - _status_code = _local_property() - _cookies = _local_property() - _headers = _local_property() - body = _local_property() - - -Request = BaseRequest -Response = BaseResponse - - -class HTTPResponse(Response, BottleException): - def __init__(self, body="", status=None, headers=None, **more_headers): - super().__init__(body, status, headers, **more_headers) - - def apply(self, other): - other._status_code = self._status_code - other._status_line = self._status_line - other._headers = self._headers - other._cookies = self._cookies - other.body = self.body - - -class HTTPError(HTTPResponse): - default_status = 500 - - def __init__(self, status=None, body=None, exception=None, traceback=None, **more_headers): - self.exception = exception - self.traceback = traceback - super().__init__(body, status, **more_headers) - - -class PluginError(BottleException): - pass - - -class JSONPlugin: - name = "json" - api = 2 - - def __init__(self, json_dumps=json_dumps): - self.json_dumps = json_dumps - - def setup(self, app): - app.config._define( - "json.enable", - default=True, - validate=bool, - help="Enable or disable automatic dict->json filter.", - ) - app.config._define( - "json.ascii", - default=False, - validate=bool, - help="Use only 7-bit ASCII characters in output.", - ) - app.config._define( - "json.indent", - default=True, - validate=bool, - help="Add whitespace to make json more readable.", - ) - app.config._define( - "json.dump_func", - default=None, - help=( - "If defined, use this function to transform" - " dict into json. The other options no longer" - " apply." - ), - ) - - def apply(self, callback, route): - dumps = self.json_dumps - if not self.json_dumps: - return callback - - @functools.wraps(callback) - def wrapper(*a, **ka): - try: - rv = callback(*a, **ka) - except HTTPResponse as resp: - rv = resp - - if isinstance(rv, dict): - # Attempt to serialize, raises exception on failure - json_response = dumps(rv) - # Set content type only if serialization successful - response.content_type = "application/json" - return json_response - elif isinstance(rv, HTTPResponse) and isinstance(rv.body, dict): - rv.body = dumps(rv.body) - rv.content_type = "application/json" - return rv - - return wrapper - - -class TemplatePlugin: - """This plugin applies the :func:`view` decorator to all routes with a - `template` config parameter. If the parameter is a tuple, the second - element must be a dict with additional options (e.g. `template_engine`) - or default variables for the template.""" - - name = "template" - api = 2 - - def setup(self, app): - app.tpl = self - - def apply(self, callback, route): - conf = route.config.get("template") - if isinstance(conf, (tuple, list)) and len(conf) == 2: - return view(conf[0], **conf[1])(callback) - elif isinstance(conf, str): - return view(conf)(callback) - else: - return callback - - -#: Not a plugin, but part of the plugin API. TODO: Find a better place. -class _ImportRedirect: - def __init__(self, name, impmask): - """Create a virtual package that redirects imports (see PEP 302).""" - self.name = name - self.impmask = impmask - self.module = sys.modules.setdefault(name, new_module(name)) - self.module.__dict__.update( - {"__file__": __file__, "__path__": [], "__all__": [], "__loader__": self} - ) - sys.meta_path.append(self) - - def find_spec(self, fullname, path, target=None): - if "." not in fullname: - return - if fullname.rsplit(".", 1)[0] != self.name: - return - from importlib.util import spec_from_loader - - return spec_from_loader(fullname, self) - - def find_module(self, fullname, path=None): - if "." not in fullname: - return - if fullname.rsplit(".", 1)[0] != self.name: - return - return self - - def load_module(self, fullname): - if fullname in sys.modules: - return sys.modules[fullname] - modname = fullname.rsplit(".", 1)[1] - realname = self.impmask % modname - __import__(realname) - module = sys.modules[fullname] = sys.modules[realname] - setattr(self.module, modname, module) - module.__loader__ = self - return module - - -class MultiDict(DictMixin): - """This dict stores multiple values per key, but behaves exactly like a - normal dict in that it returns only the newest value for any given key. - There are special methods available to access the full list of values. - """ - - def __init__(self, *a, **k): - self.dict = {k: [v] for (k, v) in dict(*a, **k).items()} - - def __len__(self): - return len(self.dict) - - def __iter__(self): - return iter(self.dict) - - def __contains__(self, key): - return key in self.dict - - def __delitem__(self, key): - del self.dict[key] - - def __getitem__(self, key): - return self.dict[key][-1] - - def __setitem__(self, key, value): - self.append(key, value) - - def keys(self): - return self.dict.keys() - - if py3k: - - def values(self): - return (v[-1] for v in self.dict.values()) - - def items(self): - return ((k, v[-1]) for k, v in self.dict.items()) - - def allitems(self): - return ((k, v) for k, vl in self.dict.items() for v in vl) - - iterkeys = keys - itervalues = values - iteritems = items - iterallitems = allitems - - else: - - def values(self): - return [v[-1] for v in self.dict.values()] - - def items(self): - return [(k, v[-1]) for k, v in self.dict.items()] - - def iterkeys(self): - return self.dict.iterkeys() - - def itervalues(self): - return (v[-1] for v in self.dict.itervalues()) - - def iteritems(self): - return ((k, v[-1]) for k, v in self.dict.iteritems()) - - def iterallitems(self): - return ((k, v) for k, vl in self.dict.iteritems() for v in vl) - - def allitems(self): - return [(k, v) for k, vl in self.dict.iteritems() for v in vl] - - def get(self, key, default=None, index=-1, type=None): - """Return the most recent value for a key. - - :param default: The default value to be returned if the key is not - present or the type conversion fails. - :param index: An index for the list of available values. - :param type: If defined, this callable is used to cast the value - into a specific type. Exception are suppressed and result in - the default value to be returned. - """ - try: - val = self.dict[key][index] - return type(val) if type else val - except Exception: - pass - return default - - def append(self, key, value): - """Add a new value to the list of values for this key.""" - self.dict.setdefault(key, []).append(value) - - def replace(self, key, value): - """Replace the list of values with a single value.""" - self.dict[key] = [value] - - def getall(self, key): - """Return a (possibly empty) list of values for a key.""" - return self.dict.get(key) or [] - - #: Aliases for WTForms to mimic other multi-dict APIs (Django) - getone = get - getlist = getall - - -class FormsDict(MultiDict): - """This :class:`MultiDict` subclass is used to store request form data. - Additionally to the normal dict-like item access methods (which return - unmodified data as native strings), this container also supports - attribute-like access to its values. Attributes are automatically de- - or recoded to match :attr:`input_encoding` (default: 'utf8'). Missing - attributes default to an empty string.""" - - #: Encoding used for attribute values. - input_encoding = "utf8" - #: If true (default), unicode strings are first encoded with `latin1` - #: and then decoded to match :attr:`input_encoding`. - recode_unicode = True - - def _fix(self, s, encoding=None): - if isinstance(s, unicode) and self.recode_unicode: # Python 3 WSGI - return s.encode("latin1").decode(encoding or self.input_encoding) - elif isinstance(s, bytes): # Python 2 WSGI - return s.decode(encoding or self.input_encoding) - else: - return s - - def decode(self, encoding=None): - """Returns a copy with all keys and values de- or recoded to match - :attr:`input_encoding`. Some libraries (e.g. WTForms) want a - unicode dictionary.""" - copy = FormsDict() - enc = copy.input_encoding = encoding or self.input_encoding - copy.recode_unicode = False - for key, value in self.allitems(): - copy.append(self._fix(key, enc), self._fix(value, enc)) - return copy - - def getunicode(self, name, default=None, encoding=None): - """Return the value as a unicode string, or the default.""" - try: - return self._fix(self[name], encoding) - except (UnicodeError, KeyError): - return default - - def __getattr__(self, name, default=unicode()): - # Without this guard, pickle generates a cryptic TypeError: - if name.startswith("__") and name.endswith("__"): - return super().__getattr__(name) - return self.getunicode(name, default=default) - - -class HeaderDict(MultiDict): - """A case-insensitive version of :class:`MultiDict` that defaults to - replace the old value instead of appending it.""" - - def __init__(self, *a, **ka): - self.dict = {} - if a or ka: - self.update(*a, **ka) - - def __contains__(self, key): - return _hkey(key) in self.dict - - def __delitem__(self, key): - del self.dict[_hkey(key)] - - def __getitem__(self, key): - return self.dict[_hkey(key)][-1] - - def __setitem__(self, key, value): - self.dict[_hkey(key)] = [_hval(value)] - - def append(self, key, value): - self.dict.setdefault(_hkey(key), []).append(_hval(value)) - - def replace(self, key, value): - self.dict[_hkey(key)] = [_hval(value)] - - def getall(self, key): - return self.dict.get(_hkey(key)) or [] - - def get(self, key, default=None, index=-1): - return MultiDict.get(self, _hkey(key), default, index) - - def filter(self, names): - for name in (_hkey(n) for n in names): - if name in self.dict: - del self.dict[name] - - -class WSGIHeaderDict(DictMixin): - """This dict-like class wraps a WSGI environ dict and provides convenient - access to HTTP_* fields. Keys and values are native strings - (2.x bytes or 3.x unicode) and keys are case-insensitive. If the WSGI - environment contains non-native string values, these are de- or encoded - using a lossless 'latin1' character set. - - The API will remain stable even on changes to the relevant PEPs. - Currently PEP 333, 444 and 3333 are supported. (PEP 444 is the only one - that uses non-native strings.) - """ - - #: List of keys that do not have a ``HTTP_`` prefix. - cgikeys = ("CONTENT_TYPE", "CONTENT_LENGTH") - - def __init__(self, environ): - self.environ = environ - - def _ekey(self, key): - """Translate header field name to CGI/WSGI environ key.""" - key = key.replace("-", "_").upper() - if key in self.cgikeys: - return key - return "HTTP_" + key - - def raw(self, key, default=None): - """Return the header value as is (may be bytes or unicode).""" - return self.environ.get(self._ekey(key), default) - - def __getitem__(self, key): - val = self.environ[self._ekey(key)] - if py3k: - if isinstance(val, unicode): - val = val.encode("latin1").decode("utf8") - else: - val = val.decode("utf8") - return val - - def __setitem__(self, key, value): - raise TypeError("%s is read-only." % self.__class__) - - def __delitem__(self, key): - raise TypeError("%s is read-only." % self.__class__) - - def __iter__(self): - for key in self.environ: - if key[:5] == "HTTP_": - yield _hkey(key[5:]) - elif key in self.cgikeys: - yield _hkey(key) - - def keys(self): - return [x for x in self] - - def __len__(self): - return len(self.keys()) - - def __contains__(self, key): - return self._ekey(key) in self.environ - - -_UNSET = object() - - -class ConfigDict(dict): - """A dict-like configuration storage with additional support for - namespaces, validators, meta-data, overlays and more. - - This dict-like class is heavily optimized for read access. All read-only - methods as well as item access should be as fast as the built-in dict. - """ - - __slots__ = ( - "_meta", - "_change_listener", - "_overlays", - "_virtual_keys", - "_source", - "__weakref__", - ) - - def __init__(self): - self._meta = {} - self._change_listener = [] - #: Weak references of overlays that need to be kept in sync. - self._overlays = [] - #: Config that is the source for this overlay. - self._source = None - #: Keys of values copied from the source (values we do not own) - self._virtual_keys = set() - - def load_module(self, path, squash=True): - """Load values from a Python module. - - Example modue ``config.py``:: - - DEBUG = True - SQLITE = { - "db": ":memory:" - } - - - >>> c = ConfigDict() - >>> c.load_module('config') - {DEBUG: True, 'SQLITE.DB': 'memory'} - >>> c.load_module("config", False) - {'DEBUG': True, 'SQLITE': {'DB': 'memory'}} - - :param squash: If true (default), dictionary values are assumed to - represent namespaces (see :meth:`load_dict`). - """ - config_obj = load(path) - obj = {key: getattr(config_obj, key) for key in dir(config_obj) if key.isupper()} - - if squash: - self.load_dict(obj) - else: - self.update(obj) - return self - - def load_config(self, filename, **options): - """Load values from an ``*.ini`` style config file. - - A configuration file consists of sections, each led by a - ``[section]`` header, followed by key/value entries separated by - either ``=`` or ``:``. Section names and keys are case-insensitive. - Leading and trailing whitespace is removed from keys and values. - Values can be omitted, in which case the key/value delimiter may - also be left out. Values can also span multiple lines, as long as - they are indented deeper than the first line of the value. Commands - are prefixed by ``#`` or ``;`` and may only appear on their own on - an otherwise empty line. - - Both section and key names may contain dots (``.``) as namespace - separators. The actual configuration parameter name is constructed - by joining section name and key name together and converting to - lower case. - - The special sections ``bottle`` and ``ROOT`` refer to the root - namespace and the ``DEFAULT`` section defines default values for all - other sections. - - With Python 3, extended string interpolation is enabled. - - :param filename: The path of a config file, or a list of paths. - :param options: All keyword parameters are passed to the underlying - :class:`python:configparser.ConfigParser` constructor call. - - """ - options.setdefault("allow_no_value", True) - if py3k: - options.setdefault("interpolation", configparser.ExtendedInterpolation()) - conf = configparser.ConfigParser(**options) - conf.read(filename) - for section in conf.sections(): - for key in conf.options(section): - value = conf.get(section, key) - if section not in ("bottle", "ROOT"): - key = section + "." + key - self[key.lower()] = value - return self - - def load_dict(self, source, namespace=""): - """Load values from a dictionary structure. Nesting can be used to - represent namespaces. - - >>> c = ConfigDict() - >>> c.load_dict({'some': {'namespace': {'key': 'value'} } }) - {'some.namespace.key': 'value'} - """ - for key, value in source.items(): - if isinstance(key, basestring): - nskey = (namespace + "." + key).strip(".") - if isinstance(value, dict): - self.load_dict(value, namespace=nskey) - else: - self[nskey] = value - else: - raise TypeError("Key has type %r (not a string)" % type(key)) - return self - - def update(self, *a, **ka): - """If the first parameter is a string, all keys are prefixed with this - namespace. Apart from that it works just as the usual dict.update(). - - >>> c = ConfigDict() - >>> c.update('some.namespace', key='value') - """ - prefix = "" - if a and isinstance(a[0], basestring): - prefix = a[0].strip(".") + "." - a = a[1:] - for key, value in dict(*a, **ka).items(): - self[prefix + key] = value - - def setdefault(self, key, value): - if key not in self: - self[key] = value - return self[key] - - def __setitem__(self, key, value): - if not isinstance(key, basestring): - raise TypeError("Key has type %r (not a string)" % type(key)) - - self._virtual_keys.discard(key) - - value = self.meta_get(key, "filter", lambda x: x)(value) - if key in self and self[key] is value: - return - - self._on_change(key, value) - dict.__setitem__(self, key, value) - - for overlay in self._iter_overlays(): - overlay._set_virtual(key, value) - - def __delitem__(self, key): - if key not in self: - raise KeyError(key) - if key in self._virtual_keys: - raise KeyError("Virtual keys cannot be deleted: %s" % key) - - if self._source and key in self._source: - # Not virtual, but present in source -> Restore virtual value - dict.__delitem__(self, key) - self._set_virtual(key, self._source[key]) - else: # not virtual, not present in source. This is OUR value - self._on_change(key, None) - dict.__delitem__(self, key) - for overlay in self._iter_overlays(): - overlay._delete_virtual(key) - - def _set_virtual(self, key, value): - """Recursively set or update virtual keys. Do nothing if non-virtual - value is present.""" - if key in self and key not in self._virtual_keys: - return # Do nothing for non-virtual keys. - - self._virtual_keys.add(key) - if key in self and self[key] is not value: - self._on_change(key, value) - dict.__setitem__(self, key, value) - for overlay in self._iter_overlays(): - overlay._set_virtual(key, value) - - def _delete_virtual(self, key): - """Recursively delete virtual entry. Do nothing if key is not virtual.""" - if key not in self._virtual_keys: - return # Do nothing for non-virtual keys. - - if key in self: - self._on_change(key, None) - dict.__delitem__(self, key) - self._virtual_keys.discard(key) - for overlay in self._iter_overlays(): - overlay._delete_virtual(key) - - def _on_change(self, key, value): - for cb in self._change_listener: - if cb(self, key, value): - return True - - def _add_change_listener(self, func): - self._change_listener.append(func) - return func - - def meta_get(self, key, metafield, default=None): - """Return the value of a meta field for a key.""" - return self._meta.get(key, {}).get(metafield, default) - - def meta_set(self, key, metafield, value): - """Set the meta field for a key to a new value.""" - self._meta.setdefault(key, {})[metafield] = value - - def meta_list(self, key): - """Return an iterable of meta field names defined for a key.""" - return self._meta.get(key, {}).keys() - - def _define(self, key, default=_UNSET, help=_UNSET, validate=_UNSET): - """(Unstable) Shortcut for plugins to define own config parameters.""" - if default is not _UNSET: - self.setdefault(key, default) - if help is not _UNSET: - self.meta_set(key, "help", help) - if validate is not _UNSET: - self.meta_set(key, "validate", validate) - - def _iter_overlays(self): - for ref in self._overlays: - overlay = ref() - if overlay is not None: - yield overlay - - def _make_overlay(self): - """(Unstable) Create a new overlay that acts like a chained map: Values - missing in the overlay are copied from the source map. Both maps - share the same meta entries. - - Entries that were copied from the source are called 'virtual'. You - can not delete virtual keys, but overwrite them, which turns them - into non-virtual entries. Setting keys on an overlay never affects - its source, but may affect any number of child overlays. - - Other than collections.ChainMap or most other implementations, this - approach does not resolve missing keys on demand, but instead - actively copies all values from the source to the overlay and keeps - track of virtual and non-virtual keys internally. This removes any - lookup-overhead. Read-access is as fast as a build-in dict for both - virtual and non-virtual keys. - - Changes are propagated recursively and depth-first. A failing - on-change handler in an overlay stops the propagation of virtual - values and may result in an partly updated tree. Take extra care - here and make sure that on-change handlers never fail. - - Used by Route.config - """ - # Cleanup dead references - self._overlays[:] = [ref for ref in self._overlays if ref() is not None] - - overlay = ConfigDict() - overlay._meta = self._meta - overlay._source = self - self._overlays.append(weakref.ref(overlay)) - for key in self: - overlay._set_virtual(key, self[key]) - return overlay - - -class AppStack(list): - """A stack-like list. Calling it returns the head of the stack.""" - - def __call__(self): - """Return the current default application.""" - return self.default - - def push(self, value=None): - """Add a new :class:`Bottle` instance to the stack""" - if not isinstance(value, Bottle): - value = Bottle() - self.append(value) - return value - - new_app = push - - @property - def default(self): - try: - return self[-1] - except IndexError: - return self.push() - - -class WSGIFileWrapper: - def __init__(self, fp, buffer_size=1024 * 64): - self.fp, self.buffer_size = fp, buffer_size - for attr in "fileno", "close", "read", "readlines", "tell", "seek": - if hasattr(fp, attr): - setattr(self, attr, getattr(fp, attr)) - - def __iter__(self): - buff, read = self.buffer_size, self.read - part = read(buff) - while part: - yield part - part = read(buff) - - -class _closeiter: - """This only exists to be able to attach a .close method to iterators that - do not support attribute assignment (most of itertools).""" - - def __init__(self, iterator, close=None): - self.iterator = iterator - self.close_callbacks = makelist(close) - - def __iter__(self): - return iter(self.iterator) - - def close(self): - for func in self.close_callbacks: - func() - - -class ResourceManager: - """This class manages a list of search paths and helps to find and open - application-bound resources (files). - - :param base: default value for :meth:`add_path` calls. - :param opener: callable used to open resources. - :param cachemode: controls which lookups are cached. One of 'all', - 'found' or 'none'. - """ - - def __init__(self, base="./", opener=open, cachemode="all"): - self.opener = opener - self.base = base - self.cachemode = cachemode - - #: A list of search paths. See :meth:`add_path` for details. - self.path = [] - #: A cache for resolved paths. ``res.cache.clear()`` clears the cache. - self.cache = {} - - def add_path(self, path, base=None, index=None, create=False): - """Add a new path to the list of search paths. Return False if the - path does not exist. - - :param path: The new search path. Relative paths are turned into - an absolute and normalized form. If the path looks like a file - (not ending in `/`), the filename is stripped off. - :param base: Path used to absolutize relative search paths. - Defaults to :attr:`base` which defaults to ``os.getcwd()``. - :param index: Position within the list of search paths. Defaults - to last index (appends to the list). - - The `base` parameter makes it easy to reference files installed - along with a python module or package:: - - res.add_path('./resources/', __file__) - """ - base = os.path.abspath(os.path.dirname(base or self.base)) - path = os.path.abspath(os.path.join(base, os.path.dirname(path))) - path += os.sep - if path in self.path: - self.path.remove(path) - if create and not os.path.isdir(path): - os.makedirs(path) - if index is None: - self.path.append(path) - else: - self.path.insert(index, path) - self.cache.clear() - return os.path.exists(path) - - def __iter__(self): - """Iterate over all existing files in all registered paths.""" - search = self.path[:] - while search: - path = search.pop() - if not os.path.isdir(path): - continue - for name in os.listdir(path): - full = os.path.join(path, name) - if os.path.isdir(full): - search.append(full) - else: - yield full - - def lookup(self, name): - """Search for a resource and return an absolute file path, or `None`. - - The :attr:`path` list is searched in order. The first match is - returned. Symlinks are followed. The result is cached to speed up - future lookups.""" - if name not in self.cache or DEBUG: - for path in self.path: - fpath = os.path.join(path, name) - if os.path.isfile(fpath): - if self.cachemode in ("all", "found"): - self.cache[name] = fpath - return fpath - if self.cachemode == "all": - self.cache[name] = None - return self.cache[name] - - def open(self, name, mode="r", *args, **kwargs): - """Find a resource and return a file object, or raise IOError.""" - fname = self.lookup(name) - if not fname: - raise OSError("Resource %r not found." % name) - return self.opener(fname, mode=mode, *args, **kwargs) - - -class FileUpload: - def __init__(self, fileobj, name, filename, headers=None): - """Wrapper for file uploads.""" - #: Open file(-like) object (BytesIO buffer or temporary file) - self.file = fileobj - #: Name of the upload form field - self.name = name - #: Raw filename as sent by the client (may contain unsafe characters) - self.raw_filename = filename - #: A :class:`HeaderDict` with additional headers (e.g. content-type) - self.headers = HeaderDict(headers) if headers else HeaderDict() - - content_type = HeaderProperty("Content-Type") - content_length = HeaderProperty("Content-Length", reader=int, default=-1) - - def get_header(self, name, default=None): - """Return the value of a header within the multipart part.""" - return self.headers.get(name, default) - - @cached_property - def filename(self): - """Name of the file on the client file system, but normalized to ensure - file system compatibility. An empty filename is returned as 'empty'. - - Only ASCII letters, digits, dashes, underscores and dots are - allowed in the final filename. Accents are removed, if possible. - Whitespace is replaced by a single dash. Leading or tailing dots - or dashes are removed. The filename is limited to 255 characters. - """ - fname = self.raw_filename - if not isinstance(fname, unicode): - fname = fname.decode("utf8", "ignore") - fname = normalize("NFKD", fname) - fname = fname.encode("ASCII", "ignore").decode("ASCII") - fname = os.path.basename(fname.replace("\\", os.path.sep)) - fname = re.sub(r"[^a-zA-Z0-9-_.\s]", "", fname).strip() - fname = re.sub(r"[-\s]+", "-", fname).strip(".-") - return fname[:255] or "empty" - - def _copy_file(self, fp, chunk_size=2**16): - read, write, offset = self.file.read, fp.write, self.file.tell() - while 1: - buf = read(chunk_size) - if not buf: - break - write(buf) - self.file.seek(offset) - - def save(self, destination, overwrite=False, chunk_size=2**16): - """Save file to disk or copy its content to an open file(-like) object. - If *destination* is a directory, :attr:`filename` is added to the - path. Existing files are not overwritten by default (IOError). - - :param destination: File path, directory or file(-like) object. - :param overwrite: If True, replace existing files. (default: False) - :param chunk_size: Bytes to read at a time. (default: 64kb) - """ - if isinstance(destination, basestring): # Except file-likes here - if os.path.isdir(destination): - destination = os.path.join(destination, self.filename) - if not overwrite and os.path.exists(destination): - raise OSError("File exists.") - with open(destination, "wb") as fp: - self._copy_file(fp, chunk_size) - else: - self._copy_file(destination, chunk_size) - - -############################################################################### -# Application Helper ########################################################### -############################################################################### - - -def abort(code=500, text="Unknown Error."): - """Aborts execution and causes a HTTP error.""" - raise HTTPError(code, text) - - -def redirect(url, code=None): - """Aborts execution and causes a 303 or 302 redirect, depending on - the HTTP protocol version.""" - if not code: - code = 303 if request.get("SERVER_PROTOCOL") == "HTTP/1.1" else 302 - res = response.copy(cls=HTTPResponse) - res.status = code - res.body = "" - res.set_header("Location", urljoin(request.url, url)) - raise res - - -def _rangeiter(fp, offset, limit, bufsize=1024 * 1024): - """Yield chunks from a range in a file.""" - fp.seek(offset) - while limit > 0: - part = fp.read(min(limit, bufsize)) - if not part: - break - limit -= len(part) - yield part - - -def static_file( - filename, root, mimetype=True, download=False, charset="UTF-8", etag=None, headers=None -): - """Open a file in a safe way and return an instance of :exc:`HTTPResponse` - that can be sent back to the client. - - :param filename: Name or path of the file to send, relative to ``root``. - :param root: Root path for file lookups. Should be an absolute directory - path. - :param mimetype: Provide the content-type header (default: guess from - file extension) - :param download: If True, ask the browser to open a `Save as...` dialog - instead of opening the file with the associated program. You can - specify a custom filename as a string. If not specified, the - original filename is used (default: False). - :param charset: The charset for files with a ``text/*`` mime-type. - (default: UTF-8) - :param etag: Provide a pre-computed ETag header. If set to ``False``, - ETag handling is disabled. (default: auto-generate ETag header) - :param headers: Additional headers dict to add to the response. - - While checking user input is always a good idea, this function provides - additional protection against malicious ``filename`` parameters from - breaking out of the ``root`` directory and leaking sensitive information - to an attacker. - - Read-protected files or files outside of the ``root`` directory are - answered with ``403 Access Denied``. Missing files result in a - ``404 Not Found`` response. Conditional requests (``If-Modified-Since``, - ``If-None-Match``) are answered with ``304 Not Modified`` whenever - possible. ``HEAD`` and ``Range`` requests (used by download managers to - check or continue partial downloads) are also handled automatically. - - """ - - root = os.path.join(os.path.abspath(root), "") - filename = os.path.abspath(os.path.join(root, filename.strip("/\\"))) - headers = headers.copy() if headers else {} - - if not filename.startswith(root): - return HTTPError(403, "Access denied.") - if not os.path.exists(filename) or not os.path.isfile(filename): - return HTTPError(404, "File does not exist.") - if not os.access(filename, os.R_OK): - return HTTPError(403, "You do not have permission to access this file.") - - if mimetype is True: - if download and download is not True: - mimetype, encoding = mimetypes.guess_type(download) - else: - mimetype, encoding = mimetypes.guess_type(filename) - if encoding: - headers["Content-Encoding"] = encoding - - if mimetype: - if ( - (mimetype[:5] == "text/" or mimetype == "application/javascript") - and charset - and "charset" not in mimetype - ): - mimetype += "; charset=%s" % charset - headers["Content-Type"] = mimetype - - if download: - download = os.path.basename(filename if download is True else download) - headers["Content-Disposition"] = 'attachment; filename="%s"' % download - - stats = os.stat(filename) - headers["Content-Length"] = clen = stats.st_size - headers["Last-Modified"] = email.utils.formatdate(stats.st_mtime, usegmt=True) - headers["Date"] = email.utils.formatdate(time.time(), usegmt=True) - - getenv = request.environ.get - - if etag is None: - etag = "%d:%d:%d:%d:%s" % (stats.st_dev, stats.st_ino, stats.st_mtime, clen, filename) - etag = hashlib.sha1(tob(etag)).hexdigest() - - if etag: - headers["ETag"] = etag - check = getenv("HTTP_IF_NONE_MATCH") - if check and check == etag: - return HTTPResponse(status=304, **headers) - - ims = getenv("HTTP_IF_MODIFIED_SINCE") - if ims: - ims = parse_date(ims.split(";")[0].strip()) - if ims is not None and ims >= int(stats.st_mtime): - return HTTPResponse(status=304, **headers) - - body = "" if request.method == "HEAD" else open(filename, "rb") - - headers["Accept-Ranges"] = "bytes" - range_header = getenv("HTTP_RANGE") - if range_header: - ranges = list(parse_range_header(range_header, clen)) - if not ranges: - return HTTPError(416, "Requested Range Not Satisfiable") - offset, end = ranges[0] - rlen = end - offset - headers["Content-Range"] = "bytes %d-%d/%d" % (offset, end - 1, clen) - headers["Content-Length"] = str(rlen) - if body: - body = _closeiter(_rangeiter(body, offset, rlen), body.close) - return HTTPResponse(body, status=206, **headers) - return HTTPResponse(body, **headers) - - -def debug(mode=True): - """Change the debug level. - There is only one debug level supported at the moment.""" - global DEBUG - if mode: - warnings.simplefilter("default") - DEBUG = bool(mode) - - -def http_date(value): - if isinstance(value, basestring): - return value - if isinstance(value, datetime): - # aware datetime.datetime is converted to UTC time - # naive datetime.datetime is treated as UTC time - value = value.utctimetuple() - elif isinstance(value, datedate): - # datetime.date is naive, and is treated as UTC time - value = value.timetuple() - if not isinstance(value, (int, float)): - # convert struct_time in UTC to UNIX timestamp - value = calendar.timegm(value) - return email.utils.formatdate(value, usegmt=True) - - -def parse_date(ims): - """Parse rfc1123, rfc850 and asctime timestamps and return UTC epoch.""" - try: - ts = email.utils.parsedate_tz(ims) - return calendar.timegm(ts[:8] + (0,)) - (ts[9] or 0) - except (TypeError, ValueError, IndexError, OverflowError): - return None - - -def parse_auth(header): - """Parse rfc2617 HTTP authentication header string (basic) and return (user,pass) tuple or None""" - try: - method, data = header.split(None, 1) - if method.lower() == "basic": - user, pwd = touni(base64.b64decode(tob(data))).split(":", 1) - return user, pwd - except (KeyError, ValueError): - return None - - -def parse_range_header(header, maxlen=0): - """Yield (start, end) ranges parsed from a HTTP Range header. Skip - unsatisfiable ranges. The end index is non-inclusive.""" - if not header or header[:6] != "bytes=": - return - ranges = [r.split("-", 1) for r in header[6:].split(",") if "-" in r] - for start, end in ranges: - try: - if not start: # bytes=-100 -> last 100 bytes - start, end = max(0, maxlen - int(end)), maxlen - elif not end: # bytes=100- -> all but the first 99 bytes - start, end = int(start), maxlen - else: # bytes=100-200 -> bytes 100-200 (inclusive) - start, end = int(start), min(int(end) + 1, maxlen) - if 0 <= start < end <= maxlen: - yield start, end - except ValueError: - pass - - -#: Header tokenizer used by _parse_http_header() -_hsplit = re.compile('(?:(?:"((?:[^"\\\\]|\\\\.)*)")|([^;,=]+))([;,=]?)').findall - - -def _parse_http_header(h): - """Parses a typical multi-valued and parametrised HTTP header (e.g. Accept headers) and returns a list of values - and parameters. For non-standard or broken input, this implementation may return partial results. - :param h: A header string (e.g. ``text/html,text/plain;q=0.9,*/*;q=0.8``) - :return: List of (value, params) tuples. The second element is a (possibly empty) dict. - """ - values = [] - if '"' not in h: # INFO: Fast path without regexp (~2x faster) - for value in h.split(","): - parts = value.split(";") - values.append((parts[0].strip(), {})) - for attr in parts[1:]: - name, value = attr.split("=", 1) - values[-1][1][name.strip()] = value.strip() - else: - lop, key, attrs = ",", None, {} - for quoted, plain, tok in _hsplit(h): - value = plain.strip() if plain else quoted.replace('\\"', '"') - if lop == ",": - attrs = {} - values.append((value, attrs)) - elif lop == ";": - if tok == "=": - key = value - else: - attrs[value] = "" - elif lop == "=" and key: - attrs[key] = value - key = None - lop = tok - return values - - -def _parse_qsl(qs): - r = [] - for pair in qs.split("&"): - if not pair: - continue - nv = pair.split("=", 1) - if len(nv) != 2: - nv.append("") - key = urlunquote(nv[0].replace("+", " ")) - value = urlunquote(nv[1].replace("+", " ")) - r.append((key, value)) - return r - - -def _lscmp(a, b): - """Compares two strings in a cryptographically safe way: - Runtime is not affected by length of common prefix.""" - return not sum(0 if x == y else 1 for x, y in zip(a, b)) and len(a) == len(b) - - -def cookie_encode(data, key, digestmod=None): - """Encode and sign a pickle-able object. Return a (byte) string""" - depr(0, 13, "cookie_encode() will be removed soon.", "Do not use this API directly.") - digestmod = digestmod or hashlib.sha256 - msg = base64.b64encode(pickle.dumps(data, -1)) - sig = base64.b64encode(hmac.new(tob(key), msg, digestmod=digestmod).digest()) - return tob("!") + sig + tob("?") + msg - - -def cookie_decode(data, key, digestmod=None): - """Verify and decode an encoded string. Return an object or None.""" - depr(0, 13, "cookie_decode() will be removed soon.", "Do not use this API directly.") - data = tob(data) - if cookie_is_encoded(data): - sig, msg = data.split(tob("?"), 1) - digestmod = digestmod or hashlib.sha256 - hashed = hmac.new(tob(key), msg, digestmod=digestmod).digest() - if _lscmp(sig[1:], base64.b64encode(hashed)): - return pickle.loads(base64.b64decode(msg)) - return None - - -def cookie_is_encoded(data): - """Return True if the argument looks like a encoded cookie.""" - depr(0, 13, "cookie_is_encoded() will be removed soon.", "Do not use this API directly.") - return bool(data.startswith(tob("!")) and tob("?") in data) - - -def html_escape(string): - """Escape HTML special characters ``&<>`` and quotes ``'"``.""" - return ( - string.replace("&", "&") - .replace("<", "<") - .replace(">", ">") - .replace('"', """) - .replace("'", "'") - ) - - -def html_quote(string): - """Escape and quote a string to be used as an HTTP attribute.""" - return '"%s"' % html_escape(string).replace("\n", " ").replace("\r", " ").replace( - "\t", " " - ) - - -def yieldroutes(func): - """Return a generator for routes that match the signature (name, args) - of the func parameter. This may yield more than one route if the function - takes optional keyword arguments. The output is best described by example:: - - a() -> '/a' - b(x, y) -> '/b//' - c(x, y=5) -> '/c/' and '/c//' - d(x=5, y=6) -> '/d' and '/d/' and '/d//' - """ - path = "/" + func.__name__.replace("__", "/").lstrip("/") - spec = getargspec(func) - argc = len(spec[0]) - len(spec[3] or []) - path += ("/<%s>" * argc) % tuple(spec[0][:argc]) - yield path - for arg in spec[0][argc:]: - path += "/<%s>" % arg - yield path - - -def path_shift(script_name, path_info, shift=1): - """Shift path fragments from PATH_INFO to SCRIPT_NAME and vice versa. - - :return: The modified paths. - :param script_name: The SCRIPT_NAME path. - :param script_name: The PATH_INFO path. - :param shift: The number of path fragments to shift. May be negative to - change the shift direction. (default: 1) - """ - if shift == 0: - return script_name, path_info - pathlist = path_info.strip("/").split("/") - scriptlist = script_name.strip("/").split("/") - if pathlist and pathlist[0] == "": - pathlist = [] - if scriptlist and scriptlist[0] == "": - scriptlist = [] - if 0 < shift <= len(pathlist): - moved = pathlist[:shift] - scriptlist = scriptlist + moved - pathlist = pathlist[shift:] - elif 0 > shift >= -len(scriptlist): - moved = scriptlist[shift:] - pathlist = moved + pathlist - scriptlist = scriptlist[:shift] - else: - empty = "SCRIPT_NAME" if shift < 0 else "PATH_INFO" - raise AssertionError("Cannot shift. Nothing left from %s" % empty) - new_script_name = "/" + "/".join(scriptlist) - new_path_info = "/" + "/".join(pathlist) - if path_info.endswith("/") and pathlist: - new_path_info += "/" - return new_script_name, new_path_info - - -def auth_basic(check, realm="private", text="Access denied"): - """Callback decorator to require HTTP auth (basic). - TODO: Add route(check_auth=...) parameter.""" - - def decorator(func): - @functools.wraps(func) - def wrapper(*a, **ka): - user, password = request.auth or (None, None) - if user is None or not check(user, password): - err = HTTPError(401, text) - err.add_header("WWW-Authenticate", 'Basic realm="%s"' % realm) - return err - return func(*a, **ka) - - return wrapper - - return decorator - - -def make_default_app_wrapper(name): - """Return a callable that relays calls to the current default app.""" - - @functools.wraps(getattr(Bottle, name)) - def wrapper(*a, **ka): - return getattr(app(), name)(*a, **ka) - - return wrapper - - -route = make_default_app_wrapper("route") -get = make_default_app_wrapper("get") -post = make_default_app_wrapper("post") -put = make_default_app_wrapper("put") -delete = make_default_app_wrapper("delete") -patch = make_default_app_wrapper("patch") -error = make_default_app_wrapper("error") -mount = make_default_app_wrapper("mount") -hook = make_default_app_wrapper("hook") -install = make_default_app_wrapper("install") -uninstall = make_default_app_wrapper("uninstall") -url = make_default_app_wrapper("get_url") - - -class ServerAdapter: - quiet = False - - def __init__(self, host="127.0.0.1", port=8080, **options): - self.options = options - self.host = host - self.port = int(port) - - def run(self, handler): # pragma: no cover - pass - - def __repr__(self): - args = ", ".join(f"{k}={repr(v)}" for k, v in self.options.items()) - return f"{self.__class__.__name__}({args})" - - -class CGIServer(ServerAdapter): - quiet = True - - def run(self, handler): # pragma: no cover - from wsgiref.handlers import CGIHandler - - def fixed_environ(environ, start_response): - environ.setdefault("PATH_INFO", "") - return handler(environ, start_response) - - CGIHandler().run(fixed_environ) - - -class FlupFCGIServer(ServerAdapter): - def run(self, handler): # pragma: no cover - import flup.server.fcgi - - self.options.setdefault("bindAddress", (self.host, self.port)) - flup.server.fcgi.WSGIServer(handler, **self.options).run() - - -class WSGIRefServer(ServerAdapter): - def run(self, app): # pragma: no cover - import socket - from wsgiref.simple_server import WSGIRequestHandler, WSGIServer, make_server - - class FixedHandler(WSGIRequestHandler): - def address_string(self): # Prevent reverse DNS lookups please. - return self.client_address[0] - - def log_request(*args, **kw): - if not self.quiet: - return WSGIRequestHandler.log_request(*args, **kw) - - handler_cls = self.options.get("handler_class", FixedHandler) - server_cls = self.options.get("server_class", WSGIServer) - - if ":" in self.host: # Fix wsgiref for IPv6 addresses. - if getattr(server_cls, "address_family") == socket.AF_INET: - - class server_cls(server_cls): - address_family = socket.AF_INET6 - - self.srv = make_server(self.host, self.port, app, server_cls, handler_cls) - self.port = self.srv.server_port # update port actual port (0 means random) - try: - self.srv.serve_forever() - except KeyboardInterrupt: - self.srv.server_close() # Prevent ResourceWarning: unclosed socket - raise - - -class CherryPyServer(ServerAdapter): - def run(self, handler): # pragma: no cover - depr( - 0, - 13, - "The wsgi server part of cherrypy was split into a new project called 'cheroot'.", - "Use the 'cheroot' server adapter instead of cherrypy.", - ) - from cherrypy import wsgiserver # This will fail for CherryPy >= 9 - - self.options["bind_addr"] = (self.host, self.port) - self.options["wsgi_app"] = handler - - certfile = self.options.get("certfile") - if certfile: - del self.options["certfile"] - keyfile = self.options.get("keyfile") - if keyfile: - del self.options["keyfile"] - - server = wsgiserver.CherryPyWSGIServer(**self.options) - if certfile: - server.ssl_certificate = certfile - if keyfile: - server.ssl_private_key = keyfile - - try: - server.start() - finally: - server.stop() - - -class CherootServer(ServerAdapter): - def run(self, handler): # pragma: no cover - from cheroot import wsgi - from cheroot.ssl import builtin - - self.options["bind_addr"] = (self.host, self.port) - self.options["wsgi_app"] = handler - certfile = self.options.pop("certfile", None) - keyfile = self.options.pop("keyfile", None) - chainfile = self.options.pop("chainfile", None) - server = wsgi.Server(**self.options) - if certfile and keyfile: - server.ssl_adapter = builtin.BuiltinSSLAdapter(certfile, keyfile, chainfile) - try: - server.start() - finally: - server.stop() - - -class WaitressServer(ServerAdapter): - def run(self, handler): - from waitress import serve - - serve(handler, host=self.host, port=self.port, _quiet=self.quiet, **self.options) - - -class PasteServer(ServerAdapter): - def run(self, handler): # pragma: no cover - from paste import httpserver - from paste.translogger import TransLogger - - handler = TransLogger(handler, setup_console_handler=(not self.quiet)) - httpserver.serve(handler, host=self.host, port=str(self.port), **self.options) - - -class MeinheldServer(ServerAdapter): - def run(self, handler): - from meinheld import server - - server.listen((self.host, self.port)) - server.run(handler) - - -class FapwsServer(ServerAdapter): - """Extremely fast webserver using libev. See https://github.com/william-os4y/fapws3""" - - def run(self, handler): # pragma: no cover - depr(0, 13, "fapws3 is not maintained and support will be dropped.") - import fapws._evwsgi as evwsgi - from fapws import base, config - - port = self.port - if float(config.SERVER_IDENT[-2:]) > 0.4: - # fapws3 silently changed its API in 0.5 - port = str(port) - evwsgi.start(self.host, port) - # fapws3 never releases the GIL. Complain upstream. I tried. No luck. - if "BOTTLE_CHILD" in os.environ and not self.quiet: - _stderr("WARNING: Auto-reloading does not work with Fapws3.") - _stderr(" (Fapws3 breaks python thread support)") - evwsgi.set_base_module(base) - - def app(environ, start_response): - environ["wsgi.multiprocess"] = False - return handler(environ, start_response) - - evwsgi.wsgi_cb(("", app)) - evwsgi.run() - - -class TornadoServer(ServerAdapter): - """The super hyped asynchronous server by facebook. Untested.""" - - def run(self, handler): # pragma: no cover - import tornado.httpserver - import tornado.ioloop - import tornado.wsgi - - container = tornado.wsgi.WSGIContainer(handler) - server = tornado.httpserver.HTTPServer(container) - server.listen(port=self.port, address=self.host) - tornado.ioloop.IOLoop.instance().start() - - -class AppEngineServer(ServerAdapter): - """Adapter for Google App Engine.""" - - quiet = True - - def run(self, handler): - depr( - 0, - 13, - "AppEngineServer no longer required", - "Configure your application directly in your app.yaml", - ) - from google.appengine.ext.webapp import util - - # A main() function in the handler script enables 'App Caching'. - # Lets makes sure it is there. This _really_ improves performance. - module = sys.modules.get("__main__") - if module and not hasattr(module, "main"): - module.main = lambda: util.run_wsgi_app(handler) - util.run_wsgi_app(handler) - - -class TwistedServer(ServerAdapter): - """Untested.""" - - def run(self, handler): - from twisted.internet import reactor - from twisted.python.threadpool import ThreadPool - from twisted.web import server, wsgi - - thread_pool = ThreadPool() - thread_pool.start() - reactor.addSystemEventTrigger("after", "shutdown", thread_pool.stop) - factory = server.Site(wsgi.WSGIResource(reactor, thread_pool, handler)) - reactor.listenTCP(self.port, factory, interface=self.host) - if not reactor.running: - reactor.run() - - -class DieselServer(ServerAdapter): - """Untested.""" - - def run(self, handler): - depr(0, 13, "Diesel is not tested or supported and will be removed.") - from diesel.protocols.wsgi import WSGIApplication - - app = WSGIApplication(handler, port=self.port) - app.run() - - -class GeventServer(ServerAdapter): - """Untested. Options: - - * See gevent.wsgi.WSGIServer() documentation for more options. - """ - - def run(self, handler): - from gevent import local, pywsgi - - if not isinstance(threading.local(), local.local): - msg = "Bottle requires gevent.monkey.patch_all() (before import)" - raise RuntimeError(msg) - if self.quiet: - self.options["log"] = None - address = (self.host, self.port) - server = pywsgi.WSGIServer(address, handler, **self.options) - if "BOTTLE_CHILD" in os.environ: - import signal - - signal.signal(signal.SIGINT, lambda s, f: server.stop()) - server.serve_forever() - - -class GunicornServer(ServerAdapter): - """Untested. See http://gunicorn.org/configure.html for options.""" - - def run(self, handler): - from gunicorn.app.base import BaseApplication - - if self.host.startswith("unix:"): - config = {"bind": self.host} - else: - config = {"bind": "%s:%d" % (self.host, self.port)} - - config.update(self.options) - - class GunicornApplication(BaseApplication): - def load_config(self): - for key, value in config.items(): - self.cfg.set(key, value) - - def load(self): - return handler - - GunicornApplication().run() - - -class EventletServer(ServerAdapter): - """Untested. Options: - - * `backlog` adjust the eventlet backlog parameter which is the maximum - number of queued connections. Should be at least 1; the maximum - value is system-dependent. - * `family`: (default is 2) socket family, optional. See socket - documentation for available families. - """ - - def run(self, handler): - from eventlet import listen, patcher, wsgi - - if not patcher.is_monkey_patched(os): - msg = "Bottle requires eventlet.monkey_patch() (before import)" - raise RuntimeError(msg) - socket_args = {} - for arg in ("backlog", "family"): - try: - socket_args[arg] = self.options.pop(arg) - except KeyError: - pass - address = (self.host, self.port) - try: - wsgi.server(listen(address, **socket_args), handler, log_output=(not self.quiet)) - except TypeError: - # Fallback, if we have old version of eventlet - wsgi.server(listen(address), handler) - - -class BjoernServer(ServerAdapter): - """Fast server written in C: https://github.com/jonashaag/bjoern""" - - def run(self, handler): - from bjoern import run - - run(handler, self.host, self.port, reuse_port=True) - - -class AsyncioServerAdapter(ServerAdapter): - """Extend ServerAdapter for adding custom event loop""" - - def get_event_loop(self): - pass - - -class AiohttpServer(AsyncioServerAdapter): - """Asynchronous HTTP client/server framework for asyncio - https://pypi.python.org/pypi/aiohttp/ - https://pypi.org/project/aiohttp-wsgi/ - """ - - def get_event_loop(self): - import asyncio - - return asyncio.new_event_loop() - - def run(self, handler): - import asyncio - - from aiohttp_wsgi.wsgi import serve - - self.loop = self.get_event_loop() - asyncio.set_event_loop(self.loop) - - if "BOTTLE_CHILD" in os.environ: - import signal - - signal.signal(signal.SIGINT, lambda s, f: self.loop.stop()) - - serve(handler, host=self.host, port=self.port) - - -class AiohttpUVLoopServer(AiohttpServer): - """uvloop - https://github.com/MagicStack/uvloop - """ - - def get_event_loop(self): - import uvloop - - return uvloop.new_event_loop() - - -class AutoServer(ServerAdapter): - """Untested.""" - - adapters = [ - WaitressServer, - PasteServer, - TwistedServer, - CherryPyServer, - CherootServer, - WSGIRefServer, - ] - - def run(self, handler): - for sa in self.adapters: - try: - return sa(self.host, self.port, **self.options).run(handler) - except ImportError: - pass - - -server_names = { - "cgi": CGIServer, - "flup": FlupFCGIServer, - "wsgiref": WSGIRefServer, - "waitress": WaitressServer, - "cherrypy": CherryPyServer, - "cheroot": CherootServer, - "paste": PasteServer, - "fapws3": FapwsServer, - "tornado": TornadoServer, - "gae": AppEngineServer, - "twisted": TwistedServer, - "diesel": DieselServer, - "meinheld": MeinheldServer, - "gunicorn": GunicornServer, - "eventlet": EventletServer, - "gevent": GeventServer, - "bjoern": BjoernServer, - "aiohttp": AiohttpServer, - "uvloop": AiohttpUVLoopServer, - "auto": AutoServer, -} - - -def load(target, **namespace): - """Import a module or fetch an object from a module. - - * ``package.module`` returns `module` as a module object. - * ``pack.mod:name`` returns the module variable `name` from `pack.mod`. - * ``pack.mod:func()`` calls `pack.mod.func()` and returns the result. - - The last form accepts not only function calls, but any type of - expression. Keyword arguments passed to this function are available as - local variables. Example: ``import_string('re:compile(x)', x='[a-z]')`` - """ - module, target = target.split(":", 1) if ":" in target else (target, None) - if module not in sys.modules: - __import__(module) - if not target: - return sys.modules[module] - if target.isalnum(): - return getattr(sys.modules[module], target) - package_name = module.split(".")[0] - namespace[package_name] = sys.modules[package_name] - return eval(f"{module}.{target}", namespace) - - -def load_app(target): - """Load a bottle application from a module and make sure that the import - does not affect the current default application, but returns a separate - application object. See :func:`load` for the target parameter.""" - global NORUN - NORUN, nr_old = True, NORUN - tmp = default_app.push() # Create a new "default application" - try: - rv = load(target) # Import the target module - return rv if callable(rv) else tmp - finally: - default_app.remove(tmp) # Remove the temporary added default application - NORUN = nr_old - - -_debug = debug - - -def run( - app=None, - server="wsgiref", - host="127.0.0.1", - port=8080, - interval=1, - reloader=False, - quiet=False, - plugins=None, - debug=None, - config=None, - **kargs, -): - """Start a server instance. This method blocks until the server terminates. - - :param app: WSGI application or target string supported by - :func:`load_app`. (default: :func:`default_app`) - :param server: Server adapter to use. See :data:`server_names` keys - for valid names or pass a :class:`ServerAdapter` subclass. - (default: `wsgiref`) - :param host: Server address to bind to. Pass ``0.0.0.0`` to listens on - all interfaces including the external one. (default: 127.0.0.1) - :param port: Server port to bind to. Values below 1024 require root - privileges. (default: 8080) - :param reloader: Start auto-reloading server? (default: False) - :param interval: Auto-reloader interval in seconds (default: 1) - :param quiet: Suppress output to stdout and stderr? (default: False) - :param options: Options passed to the server adapter. - """ - if NORUN: - return - if reloader and not os.environ.get("BOTTLE_CHILD"): - import subprocess - - fd, lockfile = tempfile.mkstemp(prefix="bottle.", suffix=".lock") - environ = os.environ.copy() - environ["BOTTLE_CHILD"] = "true" - environ["BOTTLE_LOCKFILE"] = lockfile - args = [sys.executable] + sys.argv - # If a package was loaded with `python -m`, then `sys.argv` needs to be - # restored to the original value, or imports might break. See #1336 - if getattr(sys.modules.get("__main__"), "__package__", None): - args[1:1] = ["-m", sys.modules["__main__"].__package__] - - try: - os.close(fd) # We never write to this file - while os.path.exists(lockfile): - p = subprocess.Popen(args, env=environ) - while p.poll() is None: - os.utime(lockfile, None) # Tell child we are still alive - time.sleep(interval) - if p.returncode == 3: # Child wants to be restarted - continue - sys.exit(p.returncode) - except KeyboardInterrupt: - pass - finally: - if os.path.exists(lockfile): - os.unlink(lockfile) - return - - try: - if debug is not None: - _debug(debug) - app = app or default_app() - if isinstance(app, basestring): - app = load_app(app) - if not callable(app): - raise ValueError("Application is not callable: %r" % app) - - for plugin in plugins or []: - if isinstance(plugin, basestring): - plugin = load(plugin) - app.install(plugin) - - if config: - app.config.update(config) - - if server in server_names: - server = server_names.get(server) - if isinstance(server, basestring): - server = load(server) - if isinstance(server, type): - server = server(host=host, port=port, **kargs) - if not isinstance(server, ServerAdapter): - raise ValueError("Unknown or unsupported server: %r" % server) - - server.quiet = server.quiet or quiet - if not server.quiet: - _stderr( - "Bottle (dbt v{}) server starting up (using {})...".format( - __version__, repr(server) - ) - ) - if server.host.startswith("unix:"): - _stderr("Listening on %s" % server.host) - else: - _stderr("Listening on http://%s:%d/" % (server.host, server.port)) - _stderr("Hit Ctrl-C to quit.\n") - - if reloader: - lockfile = os.environ.get("BOTTLE_LOCKFILE") - bgcheck = FileCheckerThread(lockfile, interval) - with bgcheck: - server.run(app) - if bgcheck.status == "reload": - sys.exit(3) - else: - server.run(app) - except KeyboardInterrupt: - pass - except (SystemExit, MemoryError): - raise - except: - if not reloader: - raise - if not getattr(server, "quiet", quiet): - print_exc() - time.sleep(interval) - sys.exit(3) - - -class FileCheckerThread(threading.Thread): - """Interrupt main-thread as soon as a changed module file is detected, - the lockfile gets deleted or gets too old.""" - - def __init__(self, lockfile, interval): - threading.Thread.__init__(self) - self.daemon = True - self.lockfile, self.interval = lockfile, interval - #: Is one of 'reload', 'error' or 'exit' - self.status = None - - def run(self): - exists = os.path.exists - mtime = lambda p: os.stat(p).st_mtime - files = dict() - - for module in list(sys.modules.values()): - path = getattr(module, "__file__", "") or "" - if path[-4:] in (".pyo", ".pyc"): - path = path[:-1] - if path and exists(path): - files[path] = mtime(path) - - while not self.status: - if not exists(self.lockfile) or mtime(self.lockfile) < time.time() - self.interval - 5: - self.status = "error" - thread.interrupt_main() - for path, lmtime in list(files.items()): - if not exists(path) or mtime(path) > lmtime: - self.status = "reload" - thread.interrupt_main() - break - time.sleep(self.interval) - - def __enter__(self): - self.start() - - def __exit__(self, exc_type, *_): - if not self.status: - self.status = "exit" # silent exit - self.join() - return exc_type is not None and issubclass(exc_type, KeyboardInterrupt) - - -class TemplateError(BottleException): - pass - - -class BaseTemplate: - """Base class and minimal API for template adapters""" - - extensions = ["tpl", "html", "thtml", "stpl"] - settings = {} # used in prepare() - defaults = {} # used in render() - - def __init__(self, source=None, name=None, lookup=None, encoding="utf8", **settings): - """Create a new template. - If the source parameter (str or buffer) is missing, the name argument - is used to guess a template filename. Subclasses can assume that - self.source and/or self.filename are set. Both are strings. - The lookup, encoding and settings parameters are stored as instance - variables. - The lookup parameter stores a list containing directory paths. - The encoding parameter should be used to decode byte strings or files. - The settings parameter contains a dict for engine-specific settings. - """ - self.name = name - self.source = source.read() if hasattr(source, "read") else source - self.filename = source.filename if hasattr(source, "filename") else None - self.lookup = [os.path.abspath(x) for x in lookup] if lookup else [] - self.encoding = encoding - self.settings = self.settings.copy() # Copy from class variable - self.settings.update(settings) # Apply - if not self.source and self.name: - self.filename = self.search(self.name, self.lookup) - if not self.filename: - raise TemplateError("Template %s not found." % repr(name)) - if not self.source and not self.filename: - raise TemplateError("No template specified.") - self.prepare(**self.settings) - - @classmethod - def search(cls, name, lookup=None): - """Search name in all directories specified in lookup. - First without, then with common extensions. Return first hit.""" - if not lookup: - raise depr(0, 12, "Empty template lookup path.", "Configure a template lookup path.") - - if os.path.isabs(name): - raise depr( - 0, - 12, - "Use of absolute path for template name.", - "Refer to templates with names or paths relative to the lookup path.", - ) - - for spath in lookup: - spath = os.path.abspath(spath) + os.sep - fname = os.path.abspath(os.path.join(spath, name)) - if not fname.startswith(spath): - continue - if os.path.isfile(fname): - return fname - for ext in cls.extensions: - if os.path.isfile(f"{fname}.{ext}"): - return f"{fname}.{ext}" - - @classmethod - def global_config(cls, key, *args): - """This reads or sets the global settings stored in class.settings.""" - if args: - cls.settings = cls.settings.copy() # Make settings local to class - cls.settings[key] = args[0] - else: - return cls.settings[key] - - def prepare(self, **options): - """Run preparations (parsing, caching, ...). - It should be possible to call this again to refresh a template or to - update settings. - """ - raise NotImplementedError - - def render(self, *args, **kwargs): - """Render the template with the specified local variables and return - a single byte or unicode string. If it is a byte string, the encoding - must match self.encoding. This method must be thread-safe! - Local variables may be provided in dictionaries (args) - or directly, as keywords (kwargs). - """ - raise NotImplementedError - - -class MakoTemplate(BaseTemplate): - def prepare(self, **options): - from mako.lookup import TemplateLookup - from mako.template import Template - - options.update({"input_encoding": self.encoding}) - options.setdefault("format_exceptions", bool(DEBUG)) - lookup = TemplateLookup(directories=self.lookup, **options) - if self.source: - self.tpl = Template(self.source, lookup=lookup, **options) - else: - self.tpl = Template(uri=self.name, filename=self.filename, lookup=lookup, **options) - - def render(self, *args, **kwargs): - for dictarg in args: - kwargs.update(dictarg) - _defaults = self.defaults.copy() - _defaults.update(kwargs) - return self.tpl.render(**_defaults) - - -class CheetahTemplate(BaseTemplate): - def prepare(self, **options): - from Cheetah.Template import Template - - self.context = threading.local() - self.context.vars = {} - options["searchList"] = [self.context.vars] - if self.source: - self.tpl = Template(source=self.source, **options) - else: - self.tpl = Template(file=self.filename, **options) - - def render(self, *args, **kwargs): - for dictarg in args: - kwargs.update(dictarg) - self.context.vars.update(self.defaults) - self.context.vars.update(kwargs) - out = str(self.tpl) - self.context.vars.clear() - return out - - -class Jinja2Template(BaseTemplate): - def prepare(self, filters=None, tests=None, globals={}, **kwargs): - from jinja2 import Environment, FunctionLoader - - self.env = Environment(loader=FunctionLoader(self.loader), **kwargs) - if filters: - self.env.filters.update(filters) - if tests: - self.env.tests.update(tests) - if globals: - self.env.globals.update(globals) - if self.source: - self.tpl = self.env.from_string(self.source) - else: - self.tpl = self.env.get_template(self.name) - - def render(self, *args, **kwargs): - for dictarg in args: - kwargs.update(dictarg) - _defaults = self.defaults.copy() - _defaults.update(kwargs) - return self.tpl.render(**_defaults) - - def loader(self, name): - if name == self.filename: - fname = name - else: - fname = self.search(name, self.lookup) - if not fname: - return - with open(fname, "rb") as f: - return (f.read().decode(self.encoding), fname, lambda: False) - - -class SimpleTemplate(BaseTemplate): - def prepare(self, escape_func=html_escape, noescape=False, syntax=None, **ka): - self.cache = {} - enc = self.encoding - self._str = lambda x: touni(x, enc) - self._escape = lambda x: escape_func(touni(x, enc)) - self.syntax = syntax - if noescape: - self._str, self._escape = self._escape, self._str - - @cached_property - def co(self): - return compile(self.code, self.filename or "", "exec") - - @cached_property - def code(self): - source = self.source - if not source: - with open(self.filename, "rb") as f: - source = f.read() - try: - source, encoding = touni(source), "utf8" - except UnicodeError: - raise depr(0, 11, "Unsupported template encodings.", "Use utf-8 for templates.") - parser = StplParser(source, encoding=encoding, syntax=self.syntax) - code = parser.translate() - self.encoding = parser.encoding - return code - - def _rebase(self, _env, _name=None, **kwargs): - _env["_rebase"] = (_name, kwargs) - - def _include(self, _env, _name=None, **kwargs): - env = _env.copy() - env.update(kwargs) - if _name not in self.cache: - self.cache[_name] = self.__class__(name=_name, lookup=self.lookup, syntax=self.syntax) - return self.cache[_name].execute(env["_stdout"], env) - - def execute(self, _stdout, kwargs): - env = self.defaults.copy() - env.update(kwargs) - env.update( - { - "_stdout": _stdout, - "_printlist": _stdout.extend, - "include": functools.partial(self._include, env), - "rebase": functools.partial(self._rebase, env), - "_rebase": None, - "_str": self._str, - "_escape": self._escape, - "get": env.get, - "setdefault": env.setdefault, - "defined": env.__contains__, - } - ) - exec(self.co, env) - if env.get("_rebase"): - subtpl, rargs = env.pop("_rebase") - rargs["base"] = "".join(_stdout) # copy stdout - del _stdout[:] # clear stdout - return self._include(env, subtpl, **rargs) - return env - - def render(self, *args, **kwargs): - """Render the template using keyword arguments as local variables.""" - env = {} - stdout = [] - for dictarg in args: - env.update(dictarg) - env.update(kwargs) - self.execute(stdout, env) - return "".join(stdout) - - -class StplSyntaxError(TemplateError): - pass - - -class StplParser: - """Parser for stpl templates.""" - - _re_cache = {} #: Cache for compiled re patterns - - # This huge pile of voodoo magic splits python code into 8 different tokens. - # We use the verbose (?x) regex mode to make this more manageable - - _re_tok = r"""( - [urbURB]* - (?: ''(?!') - |""(?!") - |'{6} - |"{6} - |'(?:[^\\']|\\.)+?' - |"(?:[^\\"]|\\.)+?" - |'{3}(?:[^\\]|\\.|\n)+?'{3} - |"{3}(?:[^\\]|\\.|\n)+?"{3} - ) - )""" - - _re_inl = _re_tok.replace(r"|\n", "") # We re-use this string pattern later - - _re_tok += r""" - # 2: Comments (until end of line, but not the newline itself) - |(\#.*) - - # 3: Open and close (4) grouping tokens - |([\[\{\(]) - |([\]\}\)]) - - # 5,6: Keywords that start or continue a python block (only start of line) - |^([\ \t]*(?:if|for|while|with|try|def|class)\b) - |^([\ \t]*(?:elif|else|except|finally)\b) - - # 7: Our special 'end' keyword (but only if it stands alone) - |((?:^|;)[\ \t]*end[\ \t]*(?=(?:%(block_close)s[\ \t]*)?\r?$|;|\#)) - - # 8: A customizable end-of-code-block template token (only end of line) - |(%(block_close)s[\ \t]*(?=\r?$)) - - # 9: And finally, a single newline. The 10th token is 'everything else' - |(\r?\n) - """ - - # Match the start tokens of code areas in a template - _re_split = r"""(?m)^[ \t]*(\\?)((%(line_start)s)|(%(block_start)s))""" - # Match inline statements (may contain python strings) - _re_inl = r"""%%(inline_start)s((?:%s|[^'"\n])*?)%%(inline_end)s""" % _re_inl - - # add the flag in front of the regexp to avoid Deprecation warning (see Issue #949) - # verbose and dot-matches-newline mode - _re_tok = "(?mx)" + _re_tok - _re_inl = "(?mx)" + _re_inl - - default_syntax = "<% %> % {{ }}" - - def __init__(self, source, syntax=None, encoding="utf8"): - self.source, self.encoding = touni(source, encoding), encoding - self.set_syntax(syntax or self.default_syntax) - self.code_buffer, self.text_buffer = [], [] - self.lineno, self.offset = 1, 0 - self.indent, self.indent_mod = 0, 0 - self.paren_depth = 0 - - def get_syntax(self): - """Tokens as a space separated string (default: <% %> % {{ }})""" - return self._syntax - - def set_syntax(self, syntax): - self._syntax = syntax - self._tokens = syntax.split() - if syntax not in self._re_cache: - names = "block_start block_close line_start inline_start inline_end" - etokens = map(re.escape, self._tokens) - pattern_vars = dict(zip(names.split(), etokens)) - patterns = (self._re_split, self._re_tok, self._re_inl) - patterns = [re.compile(p % pattern_vars) for p in patterns] - self._re_cache[syntax] = patterns - self.re_split, self.re_tok, self.re_inl = self._re_cache[syntax] - - syntax = property(get_syntax, set_syntax) - - def translate(self): - if self.offset: - raise RuntimeError("Parser is a one time instance.") - while True: - m = self.re_split.search(self.source, pos=self.offset) - if m: - text = self.source[self.offset : m.start()] - self.text_buffer.append(text) - self.offset = m.end() - if m.group(1): # Escape syntax - line, sep, _ = self.source[self.offset :].partition("\n") - self.text_buffer.append( - self.source[m.start() : m.start(1)] + m.group(2) + line + sep - ) - self.offset += len(line + sep) - continue - self.flush_text() - self.offset += self.read_code( - self.source[self.offset :], multiline=bool(m.group(4)) - ) - else: - break - self.text_buffer.append(self.source[self.offset :]) - self.flush_text() - return "".join(self.code_buffer) - - def read_code(self, pysource, multiline): - code_line, comment = "", "" - offset = 0 - while True: - m = self.re_tok.search(pysource, pos=offset) - if not m: - code_line += pysource[offset:] - offset = len(pysource) - self.write_code(code_line.strip(), comment) - break - code_line += pysource[offset : m.start()] - offset = m.end() - _str, _com, _po, _pc, _blk1, _blk2, _end, _cend, _nl = m.groups() - if self.paren_depth > 0 and (_blk1 or _blk2): # a if b else c - code_line += _blk1 or _blk2 - continue - if _str: # Python string - code_line += _str - elif _com: # Python comment (up to EOL) - comment = _com - if multiline and _com.strip().endswith(self._tokens[1]): - multiline = False # Allow end-of-block in comments - elif _po: # open parenthesis - self.paren_depth += 1 - code_line += _po - elif _pc: # close parenthesis - if self.paren_depth > 0: - # we could check for matching parentheses here, but it's - # easier to leave that to python - just check counts - self.paren_depth -= 1 - code_line += _pc - elif _blk1: # Start-block keyword (if/for/while/def/try/...) - code_line = _blk1 - self.indent += 1 - self.indent_mod -= 1 - elif _blk2: # Continue-block keyword (else/elif/except/...) - code_line = _blk2 - self.indent_mod -= 1 - elif _cend: # The end-code-block template token (usually '%>') - if multiline: - multiline = False - else: - code_line += _cend - elif _end: - self.indent -= 1 - self.indent_mod += 1 - else: # \n - self.write_code(code_line.strip(), comment) - self.lineno += 1 - code_line, comment, self.indent_mod = "", "", 0 - if not multiline: - break - - return offset - - def flush_text(self): - text = "".join(self.text_buffer) - del self.text_buffer[:] - if not text: - return - parts, pos, nl = [], 0, "\\\n" + " " * self.indent - for m in self.re_inl.finditer(text): - prefix, pos = text[pos : m.start()], m.end() - if prefix: - parts.append(nl.join(map(repr, prefix.splitlines(True)))) - if prefix.endswith("\n"): - parts[-1] += nl - parts.append(self.process_inline(m.group(1).strip())) - if pos < len(text): - prefix = text[pos:] - lines = prefix.splitlines(True) - if lines[-1].endswith("\\\\\n"): - lines[-1] = lines[-1][:-3] - elif lines[-1].endswith("\\\\\r\n"): - lines[-1] = lines[-1][:-4] - parts.append(nl.join(map(repr, lines))) - code = "_printlist((%s,))" % ", ".join(parts) - self.lineno += code.count("\n") + 1 - self.write_code(code) - - @staticmethod - def process_inline(chunk): - if chunk[0] == "!": - return "_str(%s)" % chunk[1:] - return "_escape(%s)" % chunk - - def write_code(self, line, comment=""): - code = " " * (self.indent + self.indent_mod) - code += line.lstrip() + comment + "\n" - self.code_buffer.append(code) - - -def template(*args, **kwargs): - """ - Get a rendered template as a string iterator. - You can use a name, a filename or a template string as first parameter. - Template rendering arguments can be passed as dictionaries - or directly (as keyword arguments). - """ - tpl = args[0] if args else None - for dictarg in args[1:]: - kwargs.update(dictarg) - adapter = kwargs.pop("template_adapter", SimpleTemplate) - lookup = kwargs.pop("template_lookup", TEMPLATE_PATH) - tplid = (id(lookup), tpl) - if tplid not in TEMPLATES or DEBUG: - settings = kwargs.pop("template_settings", {}) - if isinstance(tpl, adapter): - TEMPLATES[tplid] = tpl - if settings: - TEMPLATES[tplid].prepare(**settings) - elif "\n" in tpl or "{" in tpl or "%" in tpl or "$" in tpl: - TEMPLATES[tplid] = adapter(source=tpl, lookup=lookup, **settings) - else: - TEMPLATES[tplid] = adapter(name=tpl, lookup=lookup, **settings) - if not TEMPLATES[tplid]: - abort(500, "Template (%s) not found" % tpl) - return TEMPLATES[tplid].render(kwargs) - - -mako_template = functools.partial(template, template_adapter=MakoTemplate) -cheetah_template = functools.partial(template, template_adapter=CheetahTemplate) -jinja2_template = functools.partial(template, template_adapter=Jinja2Template) - - -def view(tpl_name, **defaults): - """Decorator: renders a template for a handler. - The handler can control its behavior like that: - - - return a dict of template vars to fill out the template - - return something other than a dict and the view decorator will not - process the template, but return the handler result as is. - This includes returning a HTTPResponse(dict) to get, - for instance, JSON with autojson or other castfilters. - """ - - def decorator(func): - @functools.wraps(func) - def wrapper(*args, **kwargs): - result = func(*args, **kwargs) - if isinstance(result, (dict, DictMixin)): - tplvars = defaults.copy() - tplvars.update(result) - return template(tpl_name, **tplvars) - elif result is None: - return template(tpl_name, **defaults) - return result - - return wrapper - - return decorator - - -mako_view = functools.partial(view, template_adapter=MakoTemplate) -cheetah_view = functools.partial(view, template_adapter=CheetahTemplate) -jinja2_view = functools.partial(view, template_adapter=Jinja2Template) - -TEMPLATE_PATH = ["./", "./views/"] -TEMPLATES = {} -DEBUG = False -NORUN = False # If set, run() does nothing. Used by load_app() - -#: A dict to map HTTP status codes (e.g. 404) to phrases (e.g. 'Not Found') -HTTP_CODES = httplib.responses.copy() -HTTP_CODES[418] = "I'm a teapot" # RFC 2324 -HTTP_CODES[428] = "Precondition Required" -HTTP_CODES[429] = "Too Many Requests" -HTTP_CODES[431] = "Request Header Fields Too Large" -HTTP_CODES[451] = "Unavailable For Legal Reasons" # RFC 7725 -HTTP_CODES[511] = "Network Authentication Required" -_HTTP_STATUS_LINES = {k: "%d %s" % (k, v) for (k, v) in HTTP_CODES.items()} - -#: The default template used for error pages. Override with @error() -ERROR_PAGE_TEMPLATE = ( - """ -%%try: - %%from %s import DEBUG, request - - - - Error: {{e.status}} - - - -

Error: {{e.status}}

-

Sorry, the requested URL {{repr(request.url)}} - caused an error:

-
{{e.body}}
- %%if DEBUG and e.exception: -

Exception:

- %%try: - %%exc = repr(e.exception) - %%except: - %%exc = '' %% type(e.exception).__name__ - %%end -
{{exc}}
- %%end - %%if DEBUG and e.traceback: -

Traceback:

-
{{e.traceback}}
- %%end - - -%%except ImportError: - ImportError: Could not generate the error page. Please add bottle to - the import path. -%%end -""" - % __name__ -) - -#: A thread-safe instance of :class:`LocalRequest`. If accessed from within a -#: request callback, this instance always refers to the *current* request -#: (even on a multi-threaded server). -request = LocalRequest() - -#: A thread-safe instance of :class:`LocalResponse`. It is used to change the -#: HTTP response for the *current* request. -response = LocalResponse() - -#: A thread-safe namespace. Not used by Bottle. -local = threading.local() - -# Initialize app stack (create first empty Bottle app now deferred until needed) -# BC: 0.6.4 and needed for run() -apps = app = default_app = AppStack() - -#: A virtual package that redirects import statements. -#: Example: ``import bottle.ext.sqlite`` actually imports `bottle_sqlite`. -ext = _ImportRedirect( - "bottle.ext" if __name__ == "__main__" else __name__ + ".ext", "bottle_%s" -).module - - -def _main(argv): # pragma: no coverage - args, parser = _cli_parse(argv) - - def _cli_error(cli_msg): - parser.print_help() - _stderr("\nError: %s\n" % cli_msg) - sys.exit(1) - - if args.version: - print("Bottle %s" % __version__) - sys.exit(0) - if not args.app: - _cli_error("No application entry point specified.") - - sys.path.insert(0, ".") - sys.modules.setdefault("bottle", sys.modules["__main__"]) - - host, port = (args.bind or "localhost"), 8080 - if ":" in host and host.rfind("]") < host.rfind(":"): - host, port = host.rsplit(":", 1) - host = host.strip("[]") - - config = ConfigDict() - - for cfile in args.conf or []: - try: - if cfile.endswith(".json"): - with open(cfile, "rb") as fp: - config.load_dict(json_loads(fp.read())) - else: - config.load_config(cfile) - except configparser.Error as parse_error: - _cli_error(parse_error) - except OSError: - _cli_error("Unable to read config file %r" % cfile) - except (UnicodeError, TypeError, ValueError) as error: - _cli_error(f"Unable to parse config file {cfile!r}: {error}") - - for cval in args.param or []: - if "=" in cval: - config.update((cval.split("=", 1),)) - else: - config[cval] = True - - run( - args.app, - host=host, - port=int(port), - server=args.server, - reloader=args.reload, - plugins=args.plugin, - debug=args.debug, - config=config, - ) - - -# endregion - -# region: dbt-core-interface server - -SERVER_MUTEX = threading.Lock() - - -@dataclass -class ServerRunResult: - """The result of running a query.""" - - column_names: List[str] - rows: List[List[Any]] - raw_code: str - executed_code: str - - -@dataclass -class ServerCompileResult: - """The result of compiling a project.""" - - result: str - - -@dataclass -class ServerResetResult: - """The result of resetting the Server database.""" - - result: str - - -@dataclass -class ServerRegisterResult: - """The result of registering a project.""" - - added: str - projects: List[str] - - -@dataclass -class ServerUnregisterResult: - """The result of unregistering a project.""" - - removed: str - projects: List[str] - - -class ServerErrorCode(Enum): - """The error codes that can be returned by the Server API.""" - - FailedToReachServer = -1 - CompileSqlFailure = 1 - ExecuteSqlFailure = 2 - ProjectParseFailure = 3 - ProjectNotRegistered = 4 - ProjectHeaderNotSupplied = 5 - - -@dataclass -class ServerError: - """An error that can be serialized to JSON.""" - - code: ServerErrorCode - message: str - data: Dict[str, Any] - - -@dataclass -class ServerErrorContainer: - """A container for an ServerError that can be serialized to JSON.""" - - error: ServerError - - -def server_serializer(o): - """Encode JSON. Handles server-specific types.""" - if isinstance(o, decimal.Decimal): - return float(o) - if isinstance(o, ServerErrorCode): - return o.value - return str(o) - - -def remove_comments(string) -> str: # noqa: C901 - """Remove comments from a string.""" - pattern = r"(\".*?\"|\'.*?\')|(/\*.*?\*/|//[^\r\n]*$)" - # first group captures quoted strings (double or single) - # second group captures comments (//single-line or /* multi-line */) - regex = re.compile(pattern, re.MULTILINE | re.DOTALL) - - def _replacer(match): - # if the 2nd group (capturing comments) is not None, - # it means we have captured a non-quoted (real) comment string. - if match.group(2) is not None: - return "" # so we will return empty to remove the comment - else: # otherwise, we will return the 1st group - return match.group(1) # captured quoted-string - - multiline_comments_removed = regex.sub(_replacer, string) + "\n" - output = "" - for line in multiline_comments_removed.splitlines(keepends=True): - if line.strip().startswith("--"): - continue - s_quote_c = 0 - d_quote_c = 0 - cmt_dash = 0 - split_ix = -1 - for i, c in enumerate(line): - if cmt_dash >= 2: - # found 2 sequential dashes, split here - split_ix = i - cmt_dash - break - if c == '"': - # inc quote count - d_quote_c += 1 - elif c == "'": - # inc quote count - s_quote_c += 1 - elif c == "-" and d_quote_c % 2 == 0 and s_quote_c % 2 == 0: - # dash and not in a quote, inc dash count - cmt_dash += 1 - continue - # reset dash count each iteration - cmt_dash = 0 - if split_ix > 0: - output += line[:split_ix] + "\n" - else: - output += line - return "".join(output) - - -class DbtInterfaceServerPlugin: - """Used to inject the dbt-core-interface runner into the request context.""" - - name = "dbt-interface-server" - api = 2 - - def __init__(self, runner: Optional[DbtProject] = None): - """Initialize the plugin with the runner to inject into the request context.""" - self.runners = DbtProjectContainer() - if runner: - self.runners.add_parsed_project(runner) - - def apply(self, callback, route): - """Apply the plugin to the route callback.""" - - def wrapper(*args, **kwargs): - start = time.time() - body = callback(*args, **kwargs, runners=self.runners) - end = time.time() - response.headers["X-dbt-Exec-Time"] = str(end - start) - return body - - return wrapper - - -@route("/run", method="POST") -def run_sql(runners: DbtProjectContainer) -> Union[ServerRunResult, ServerErrorContainer, str]: - """Run SQL against a dbt project.""" - # Project Support - project_runner = ( - runners.get_project(request.get_header("X-dbt-Project")) or runners.get_default_project() - ) - if not project_runner: - response.status = 400 - return asdict( - ServerErrorContainer( - error=ServerError( - code=ServerErrorCode.ProjectNotRegistered, - message=( - "Project is not registered. Make a POST request to the /register endpoint" - " first to register a runner" - ), - data={"registered_projects": runners.registered_projects()}, - ) - ) - ) - - # Query Construction - query = remove_comments(request.body.read().decode("utf-8")) - limit = request.query.get("limit", 200) - query_with_limit = ( - # we need to support `TOP` too - f"select * from ({query}) as __server_query limit {limit}" - ) - - try: - result = project_runner.execute_code(query_with_limit) - except Exception as execution_err: - return asdict( - ServerErrorContainer( - error=ServerError( - code=ServerErrorCode.ExecuteSqlFailure, - message=str(execution_err), - data=execution_err.__dict__, - ) - ) - ) - - # Re-extract compiled query and return data structure - compiled_query = re.search( - r"select \* from \(([\w\W]+)\) as __server_query", result.compiled_code - ).groups()[0] - return asdict( - ServerRunResult( - rows=[list(row) for row in result.table.rows], - column_names=result.table.column_names, - executed_code=compiled_query.strip(), - raw_code=query, - ) - ) - - -@route("/compile", method="POST") -def compile_sql( - runners: DbtProjectContainer, -) -> Union[ServerCompileResult, ServerErrorContainer, str]: - """Compiles a SQL query.""" - # Project Support - project_runner = ( - runners.get_project(request.get_header("X-dbt-Project")) or runners.get_default_project() - ) - if not project_runner: - response.status = 400 - return asdict( - ServerErrorContainer( - error=ServerError( - code=ServerErrorCode.ProjectNotRegistered, - message=( - "Project is not registered. Make a POST request to the /register endpoint" - " first to register a runner" - ), - data={"registered_projects": runners.registered_projects()}, - ) - ) - ) - - # Query Compilation - query: str = request.body.read().decode("utf-8").strip() - if has_jinja(query): - try: - compiled_query = project_runner.compile_code(query).compiled_code - except Exception as compile_err: - return asdict( - ServerErrorContainer( - error=ServerError( - code=ServerErrorCode.CompileSqlFailure, - message=str(compile_err), - data=compile_err.__dict__, - ) - ) - ) - else: - compiled_query = query - - return asdict(ServerCompileResult(result=compiled_query)) - - -@route(["/parse", "/reset"]) -def reset(runners: DbtProjectContainer) -> Union[ServerResetResult, ServerErrorContainer, str]: - """Reset the runner and clear the cache.""" - # Project Support - project_runner = ( - runners.get_project(request.get_header("X-dbt-Project")) or runners.get_default_project() - ) - if not project_runner: - response.status = 400 - return asdict( - ServerErrorContainer( - error=ServerError( - code=ServerErrorCode.ProjectNotRegistered, - message=( - "Project is not registered. Make a POST request to the /register endpoint" - " first to register a runner" - ), - data={"registered_projects": runners.registered_projects()}, - ) - ) - ) - - # Determines if we should clear caches and reset config before re-seeding runner - reset = str(request.query.get("reset", "false")).lower() == "true" - - # Get targets - old_target = getattr(project_runner.base_config, "target", project_runner.config.target_name) - new_target = request.query.get("target", old_target) - - if not reset and old_target == new_target: - # Async (target same) - if SERVER_MUTEX.acquire(blocking=False): - LOGGER.debug("Mutex locked") - parse_job = threading.Thread( - target=_reset, args=(project_runner, reset, old_target, new_target) - ) - parse_job.start() - return asdict(ServerResetResult(result="Initializing project parsing")) - else: - LOGGER.debug("Mutex is locked, reparse in progress") - return asdict(ServerResetResult(result="Currently reparsing project")) - else: - # Sync (target changed or reset is true) - if SERVER_MUTEX.acquire(blocking=old_target != new_target): - LOGGER.debug("Mutex locked") - return asdict(_reset(project_runner, reset, old_target, new_target)) - else: - LOGGER.debug("Mutex is locked, reparse in progress") - return asdict(ServerResetResult(result="Currently reparsing project")) - - -def _reset( - runner: DbtProject, reset: bool, old_target: str, new_target: str -) -> Union[ServerResetResult, ServerErrorContainer]: - """Reset the project runner. - - Can be called asynchronously or synchronously. - """ - target_did_change = old_target != new_target - try: - runner.base_config.target = new_target - LOGGER.debug("Starting reparse") - runner.safe_parse_project(reinit=reset or target_did_change) - except Exception as reparse_err: - LOGGER.debug("Reparse error") - runner.base_config.target = old_target - rv = ServerErrorContainer( - error=ServerError( - code=ServerErrorCode.ProjectParseFailure, - message=str(reparse_err), - data=reparse_err.__dict__, - ) - ) - else: - LOGGER.debug("Reparse success") - rv = ServerResetResult( - result=( - f"Profile target changed from {old_target} to {new_target}!" - if target_did_change - else f"Reparsed project with profile {old_target}!" - ) - ) - finally: - LOGGER.debug("Unlocking mutex") - SERVER_MUTEX.release() - return rv - - -@route("/register", method="POST") -def register(runners: DbtProjectContainer) -> Union[ServerResetResult, ServerErrorContainer, str]: - """Register a new project runner.""" - # Project Support - project = request.get_header("X-dbt-Project") - if not project: - response.status = 400 - return asdict( - ServerErrorContainer( - error=ServerError( - code=ServerErrorCode.ProjectHeaderNotSupplied, - message=( - "Project header `X-dbt-Project` was not supplied but is required for this" - " endpoint" - ), - data=dict(request.headers), - ) - ) - ) - if project in runners: - # Idempotent - return asdict(ServerRegisterResult(added=project, projects=runners.registered_projects())) - - # Inputs - project_dir = request.json["project_dir"] - profiles_dir = request.json["profiles_dir"] - target = request.json.get("target") - - try: - new_runner = DbtProject( - project_dir=project_dir, - profiles_dir=profiles_dir, - target=target, - ) - except Exception as init_err: - return asdict( - ServerErrorContainer( - error=ServerError( - code=ServerErrorCode.ProjectParseFailure, - message=str(init_err), - data=init_err.__dict__, - ) - ) - ) - - runners[project] = new_runner - runners.add_parsed_project - return asdict(ServerRegisterResult(added=project, projects=runners.registered_projects())) - - -@route("/unregister", method="POST") -def unregister(runners: DbtProjectContainer) -> Union[ServerResetResult, ServerErrorContainer, str]: - """Unregister a project runner from the server.""" - # Project Support - project = request.get_header("X-dbt-Project") - if not project: - response.status = 400 - return asdict( - ServerErrorContainer( - error=ServerError( - code=ServerErrorCode.ProjectHeaderNotSupplied, - message=( - "Project header `X-dbt-Project` was not supplied but is required for this" - " endpoint" - ), - data=dict(request.headers), - ) - ) - ) - if project not in runners: - response.status = 400 - return asdict( - ServerErrorContainer( - error=ServerError( - code=ServerErrorCode.ProjectNotRegistered, - message=( - "Project is not registered. Make a POST request to the /register endpoint" - " first to register a runner" - ), - data={"registered_projects": runners.registered_projects()}, - ) - ) - ) - runners.drop_project(project) - return asdict(ServerUnregisterResult(removed=project, projects=runners.registered_projects())) - - -@route("/v1/info", methods="GET") -def trino_info(runners: DbtProjectContainer): - """Trino info endpoint.""" - return { - "coordinator": {}, - "workers": [], - "memory": {}, - "jvm": {}, - "system": {}, - } - - -# TODO: These are here while we map agate types to trino types. -# public final class ClientStandardTypes -# { -# public static final String BIGINT = "bigint"; -# public static final String INTEGER = "integer"; -# public static final String SMALLINT = "smallint"; -# public static final String TINYINT = "tinyint"; -# public static final String BOOLEAN = "boolean"; -# public static final String DATE = "date"; -# public static final String DECIMAL = "decimal"; -# public static final String REAL = "real"; -# public static final String DOUBLE = "double"; -# public static final String HYPER_LOG_LOG = "HyperLogLog"; -# public static final String QDIGEST = "qdigest"; -# public static final String P4_HYPER_LOG_LOG = "P4HyperLogLog"; -# public static final String INTERVAL_DAY_TO_SECOND = "interval day to second"; -# public static final String INTERVAL_YEAR_TO_MONTH = "interval year to month"; -# public static final String TIMESTAMP = "timestamp"; -# public static final String TIMESTAMP_WITH_TIME_ZONE = "timestamp with time zone"; -# public static final String TIME = "time"; -# public static final String TIME_WITH_TIME_ZONE = "time with time zone"; -# public static final String VARBINARY = "varbinary"; -# public static final String VARCHAR = "varchar"; -# public static final String CHAR = "char"; -# public static final String ROW = "row"; -# public static final String ARRAY = "array"; -# public static final String MAP = "map"; -# public static final String JSON = "json"; -# public static final String IPADDRESS = "ipaddress"; -# public static final String UUID = "uuid"; -# public static final String GEOMETRY = "Geometry"; -# public static final String SPHERICAL_GEOGRAPHY = "SphericalGeography"; -# public static final String BING_TILE = "BingTile"; -# private ClientStandardTypes() {} -# } - -# TODO: This is just a note to acknowledge that we cannot use the default trino catalog -# yet some integrations may expect this type of query to succeed. -# select schema_name AS label, -# schema_name AS schema, -# 'connection.schema' as type, -# 'group-by-ref-type' as iconId, -# '' as database -# from .information_schema.schemata - - -@route("/v1/statement", method="POST") -def trino_statement(runners: DbtProjectContainer): - """Trino statement endpoint. - - This endpoint is used to execute queries and return the results. - It is very minimal right now. The only purpose is to proxy SELECT queries - to dbt from a JDBC and return the results to the JDBC. - """ - from agate import data_types - - # User Support - _user = request.headers.get("X-Presto-User", "default") - # Project Support - project_runner = ( - runners.get_project(request.get_header("X-dbt-Project")) or runners.get_default_project() - ) - if not project_runner: - return { - "errorName": "ProjectNotRegistered", - "errorType": "USER_ERROR", - "errorLocation": {"lineNumber": 1, "columnNumber": 1}, - "error": "Project is not registered. Make a POST request to the /register endpoint", - } - query = request.body.read().decode("utf-8") - res = project_runner.execute_code(query) - columns = [] - for column in res.table.columns: - if isinstance(column.data_type, data_types.Text): - columns += [ - { - "name": column.name, - "type": "varchar", - "typeSignature": { - "rawType": "varchar", - "arguments": [{"kind": "LONG_LITERAL", "value": 255}], - }, - } - ] - elif isinstance(column.data_type, data_types.Number): - columns += [ - { - "name": column.name, - "type": "bigint", - "typeSignature": {"rawType": "bigint", "arguments": []}, - } - ] - elif isinstance(column.data_type, data_types.Boolean): - columns += [ - { - "name": column.name, - "type": "boolean", - "typeSignature": {"rawType": "boolean", "arguments": []}, - } - ] - elif isinstance(column.data_type, data_types.Date): - columns += [ - { - "name": column.name, - "type": "date", - "typeSignature": {"rawType": "date", "arguments": []}, - } - ] - elif isinstance(column.data_type, data_types.DateTime): - columns += [ - { - "name": column.name, - "type": "timestamp", - "typeSignature": {"rawType": "timestamp", "arguments": []}, - } - ] - elif isinstance(column.data_type, data_types.TimeDelta): - columns += [ - { - "name": column.name, - "type": "interval day to second", - "typeSignature": { - "rawType": "interval day to second", - "arguments": [], - }, - } - ] - else: - columns += [ - { - "name": column.name, - "type": "varchar", - "typeSignature": { - "rawType": "varchar", - "arguments": [{"kind": "LONG_LITERAL", "value": 255}], - }, - } - ] - return { - "id": "someId", - # TODO: this should not be static - "infoUri": "http://localhost:8581/v1/info", - "columns": columns, - "data": [list(row) for row in res.table.rows], - "stats": { - "state": "FINISHED", - "nodes": 1, - }, - } - - -@route(["/health", "/api/health"], methods="GET") -def health_check(runners: DbtProjectContainer) -> dict: - """Health check endpoint.""" - # Project Support - project_runner = ( - runners.get_project(request.get_header("X-dbt-Project")) or runners.get_default_project() - ) - if not project_runner: - response.status = 400 - return asdict( - ServerErrorContainer( - error=ServerError( - code=ServerErrorCode.ProjectNotRegistered, - message=( - "Project is not registered. Make a POST request to the /register endpoint" - " first to register a runner" - ), - data={"registered_projects": runners.registered_projects()}, - ) - ) - ) - return { - "result": { - "status": "ready", - "project_name": project_runner.config.project_name, - "target_name": project_runner.config.target_name, - "profile_name": project_runner.config.project_name, - "logs": project_runner.config.log_path, - "timestamp": str(datetime.utcnow()), - "error": None, - }, - "id": str(uuid.uuid4()), - "dbt-interface-server": __name__, - } - - -ServerPlugin = DbtInterfaceServerPlugin() -install(ServerPlugin) -install(JSONPlugin(json_dumps=lambda body: json.dumps(body, default=server_serializer))) - - -def run_server(runner: Optional[DbtProject] = None, host="localhost", port=8581): - """Run the dbt core interface server. - - See supported servers below. By default, the server will run with the - `WSGIRefServer` which is a pure Python server. If you want to use a different server, - you will need to install the dependencies for that server. - - (CGIServer, FlupFCGIServer, WSGIRefServer, WaitressServer, - CherryPyServer, CherootServer, PasteServer, FapwsServer, - TornadoServer, AppEngineServer, TwistedServer, DieselServer, - MeinheldServer, GunicornServer, EventletServer, GeventServer, - BjoernServer, AiohttpServer, AiohttpUVLoopServer, AutoServer) - """ - if runner: - ServerPlugin.runners.add_parsed_project(runner) - run(host=host, port=port) - - -# endregion - -# region: dbt-core-interface data diffs - -try: - # TODO: we can drop this and use subprocesses to run git commands - from git import Repo -except ImportError: - pass -else: - from pathlib import Path - - def build_diff_queries(model: str, runner: DbtProject) -> Tuple[str, str]: - """Leverage git to build two temporary tables for diffing the results of a query throughout a change.""" - # Resolve git node - node = runner.get_ref_node(model) - dbt_path = Path(node.root_path) - repo = Repo(dbt_path, search_parent_directories=True) - t = next(Path(repo.working_dir).rglob(node.original_file_path)).relative_to( - repo.working_dir - ) - sha = repo.head.object.hexsha - target = repo.head.object.tree[str(t)] - - # Create original node - git_node_name = "z_" + sha[-7:] - original_node = runner.get_server_node( - target.data_stream.read().decode("utf-8"), git_node_name - ) - - # Alias changed node - changed_node = node - - # Compile models - original_node = runner.compile_node(original_node) - changed_node = runner.compile_node(changed_node) - - return original_node.compiled_sql, changed_node.compiled_sql - - def build_diff_tables(model: str, runner: DbtProject) -> Tuple["BaseRelation", "BaseRelation"]: - """Leverage git to build two temporary tables for diffing the results of a query throughout a change.""" - # Resolve git node - node = runner.get_ref_node(model) - dbt_path = Path(node.root_path) - repo = Repo(dbt_path, search_parent_directories=True) - t = next(Path(repo.working_dir).rglob(node.original_file_path)).relative_to( - repo.working_dir - ) - sha = repo.head.object.hexsha - target = repo.head.object.tree[str(t)] - - # Create original node - git_node_name = "z_" + sha[-7:] - original_node = runner.get_server_node( - target.data_stream.read().decode("utf-8"), git_node_name - ) - - # Alias changed node - changed_node = node - - # Compile models - original_node = runner.compile_node(original_node).node - changed_node = runner.compile_node(changed_node).node - - # Lookup and resolve original ref based on git sha - git_node_parts = original_node.database, "dbt_diff", git_node_name - ref_a, did_exist = runner.get_or_create_relation(*git_node_parts) - if not did_exist: - LOGGER.info("Creating new relation for %s", ref_a) - with runner.adapter.connection_named("dbt-osmosis"): - runner.execute_macro( - "create_schema", - kwargs={"relation": ref_a}, - ) - runner.execute_macro( - "create_table_as", - kwargs={ - "sql": original_node.compiled_sql, - "relation": ref_a, - "temporary": True, - }, - run_compiled_sql=True, - ) - - # Resolve modified fake ref based on hash of it compiled SQL - temp_node_name = ( - "z_" - + hashlib.md5( - changed_node.compiled_sql.encode("utf-8"), - usedforsecurity=False, - ).hexdigest()[-7:] - ) - git_node_parts = original_node.database, "dbt_diff", temp_node_name - ref_b, did_exist = runner.get_or_create_relation(*git_node_parts) - if not did_exist: - ref_b = runner.adapter.Relation.create(*git_node_parts) - LOGGER.info("Creating new relation for %s", ref_b) - with runner.adapter.connection_named("dbt-osmosis"): - runner.execute_macro( - "create_schema", - kwargs={"relation": ref_b}, - ) - runner.execute_macro( - "create_table_as", - kwargs={ - "sql": original_node.compiled_sql, - "relation": ref_b, - "temporary": True, - }, - run_compiled_sql=True, - ) - - return ref_a, ref_b - - def diff_tables( - ref_a: "BaseRelation", - ref_b: "BaseRelation", - pk: str, - runner: DbtProject, - aggregate: bool = True, - ) -> "Table": - """Given two relations, compare the results and return a table of the differences.""" - LOGGER.info("Running diff") - _, table = runner.adapter_execute( - runner.execute_macro( - ( - "_dbt_osmosis_compare_relations_agg" - if aggregate - else "_dbt_osmosis_compare_relations" - ), - kwargs={ - "a_relation": ref_a, - "b_relation": ref_b, - "primary_key": pk, - }, - ), - auto_begin=True, - fetch=True, - ) - return table - - def diff_queries( - sql_a: str, sql_b: str, pk: str, runner: DbtProject, aggregate: bool = True - ) -> "Table": - """Given two queries, compare the results and return a table of the differences.""" - LOGGER.info("Running diff") - _, table = runner.adapter_execute( - runner.execute_macro( - "_dbt_osmosis_compare_queries_agg" if aggregate else "_dbt_osmosis_compare_queries", - kwargs={ - "a_query": sql_a, - "b_query": sql_b, - "primary_key": pk, - }, - ), - auto_begin=True, - fetch=True, - ) - return table - - def diff_and_print_to_console( - model: str, - pk: str, - runner: DbtProject, - make_temp_tables: bool = False, - agg: bool = True, - output: str = "table", - ) -> None: - """Compare two tables and print the results to the console.""" - import agate - - if make_temp_tables: - table = diff_tables(*build_diff_tables(model, runner), pk, runner, agg) - else: - table = diff_queries(*build_diff_queries(model, runner), pk, runner, agg) - print("") - output = output.lower() - if output == "table": - table.print_table() - elif output in ("chart", "bar"): - if not agg: - LOGGER.warn( - "Cannot render output format chart with --no-agg option, defaulting to table" - ) - table.print_table() - else: - _table = table.compute( - [ - ( - "in_original, in_changed", - agate.Formula(agate.Text(), lambda r: "%(in_a)s, %(in_b)s" % r), - ) - ] - ) - _table.print_bars( - label_column_name="in_original, in_changed", value_column_name="count" - ) - elif output == "csv": - table.to_csv("dbt-osmosis-diff.csv") - else: - LOGGER.warn("No such output format %s, defaulting to table", output) - table.print_table() - - -# endregion - -if __name__ == "__main__": - import argparse - - parser = argparse.ArgumentParser( - description="Run the dbt interface server. Defaults to the WSGIRefServer" - ) - parser.add_argument( - "--host", - default="localhost", - help="The host to run the server on. Defaults to localhost", - ) - parser.add_argument( - "--port", - default=8581, - help="The port to run the server on. Defaults to 8581", - ) - parser.add_argument( - "--project", - default=None, - help="The path to the dbt project to run. Defaults to None", - ) - args = parser.parse_args() - if args.project: - run_server(DbtProject.from_path(args.project), host=args.host, port=args.port) - else: - run_server(host=args.host, port=args.port) From 5ebba7f18206442a7644c922f18302630ef5fe7f Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Mon, 30 Dec 2024 12:34:43 -0700 Subject: [PATCH 11/46] wip: continue working on functional rewrite --- src/dbt_osmosis/core/osmosis.py | 169 +++++++++++++++++--------------- 1 file changed, 88 insertions(+), 81 deletions(-) diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index 66af45de..b59e6b97 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -38,7 +38,10 @@ SeedNode, SourceDefinition, ) -from dbt.contracts.results import CatalogArtifact, CatalogKey, CatalogTable, ColumnMetadata +from dbt.contracts.results import CatalogArtifact, ColumnMetadata +from dbt.contracts.results import ( + CatalogKey as TableRef, +) from dbt.mp_context import get_mp_context from dbt.node_types import NodeType from dbt.parser.manifest import ManifestLoader, process_node @@ -339,6 +342,7 @@ class YamlRefactorContext: ) _mutation_count: int = field(default=0, init=False) + _catalog: CatalogArtifact | None = field(default=None, init=False) def register_mutations(self, count: int) -> None: """Increment the mutation count by a specified amount.""" @@ -367,6 +371,18 @@ def skip_patterns(self) -> list[str]: defs = self.project.config.vars.to_dict().get("dbt-osmosis", {}).copy() return defs.pop(SKIP_PATTERNS, []) + def read_catalog(self) -> CatalogArtifact | None: + """Read the catalog file if it exists.""" + if self._catalog: + return self._catalog + if not self.settings.catalog_file: + return None + fp = Path(self.settings.catalog_file) + if not fp.exists(): + return None + self._catalog = CatalogArtifact.from_dict(json.loads(fp.read_text())) + return self._catalog + def __post_init__(self) -> None: if EMPTY_STRING not in self.placeholders: self.placeholders = (EMPTY_STRING, *self.placeholders) @@ -501,78 +517,59 @@ def _maybe_use_precise_dtype(col: t.Any, settings: YamlRefactorSettings) -> str: return col.dtype -def get_catalog_key_for_node(node: ResultNode) -> CatalogKey: - """Make an appropriate catalog key for a dbt node.""" - if node.resource_type == NodeType.Source: - return CatalogKey(node.database, node.schema, node.identifier or node.name) - return CatalogKey(node.database, node.schema, node.alias or node.name) +def get_table_ref(node: ResultNode | BaseRelation) -> TableRef: + """Make an appropriate table ref for a dbt node or relation.""" + if isinstance(node, BaseRelation): + assert node.schema, "Schema must be set for a BaseRelation to generate a TableRef" + assert node.identifier, "Identifier must be set for a BaseRelation to generate a TableRef" + return TableRef(node.database, node.schema, node.identifier) + elif node.resource_type == NodeType.Source: + return TableRef(node.database, node.schema, node.identifier or node.name) + else: + return TableRef(node.database, node.schema, node.name) -def get_columns(context: YamlRefactorContext, key: CatalogKey) -> dict[str, ColumnMetadata]: +def get_columns(context: YamlRefactorContext, ref: TableRef) -> dict[str, ColumnMetadata]: """Equivalent to get_columns_meta in old code but directly referencing a key, not a node.""" normalized_cols = OrderedDict() - skip_patterns = context.skip_patterns - catalog = None - if context.settings.catalog_file: - # TODO: no reason to re-read this file on every call - path = Path(context.settings.catalog_file) - if path.is_file(): - catalog = CatalogArtifact.from_dict(json.loads(path.read_text())) - - if catalog: - # TODO: no reason to dict unpack every call here... - catalog_candidates = {**catalog.nodes, **catalog.sources} - catalog_entry = _find_first(catalog_candidates.values(), lambda c: c.key() == key) + offset = 0 + + def process_column(col: BaseColumn | ColumnMetadata): + nonlocal offset + if any(re.match(b, col.name) for b in context.skip_patterns): + return + normalized = normalize_column_name(col.name, context.project.config.credentials.type) + if not isinstance(col, ColumnMetadata): + dtype = _maybe_use_precise_dtype(col, context.settings) + col = ColumnMetadata( + name=normalized, type=dtype, index=offset, comment=getattr(col, "comment", None) + ) + normalized_cols[normalized] = col + offset += 1 + if hasattr(col, "flatten"): + for struct_field in t.cast(Iterable[BaseColumn], getattr(col, "flatten")()): + process_column(struct_field) + + if catalog := context.read_catalog(): + catalog_entry = _find_first( + chain(catalog.nodes.values(), catalog.sources.values()), lambda c: c.key() == ref + ) if catalog_entry: for column in catalog_entry.columns.values(): - if any(re.match(p, column.name) for p in skip_patterns): - continue - normalized = normalize_column_name( - column.name, context.project.config.credentials.type - ) - normalized_cols[normalized] = ColumnMetadata( - name=normalized, type=column.type, index=column.index, comment=column.comment - ) + process_column(column) return normalized_cols - relation: BaseRelation | None = context.project.adapter.get_relation( - key.database, - key.schema, - key.name, - ) - if not relation: + relation: BaseRelation | None = context.project.adapter.get_relation(*ref) + if relation is None: return normalized_cols try: - # TODO: the following should be a recursive function to handle nested columns, probably - for index, column in enumerate( - t.cast(Iterable[BaseColumn], context.project.adapter.get_columns_in_relation(relation)) + for column in t.cast( + Iterable[BaseColumn], context.project.adapter.get_columns_in_relation(relation) ): - if any(re.match(b, column.name) for b in skip_patterns): - continue - normalized = normalize_column_name(column.name, context.project.config.credentials.type) - dtype = _maybe_use_precise_dtype(column, context.settings) - normalized_cols[normalized] = ColumnMetadata( - name=normalized, type=dtype, index=index, comment=getattr(column, "comment", None) - ) - if hasattr(column, "flatten"): - for _, subcolumn in enumerate( - t.cast(Iterable[BaseColumn], getattr(column, "flatten")()) - ): - if any(re.match(b, subcolumn.name) for b in skip_patterns): - continue - normalized = normalize_column_name( - subcolumn.name, context.project.config.credentials.type - ) - dtype = _maybe_use_precise_dtype(subcolumn, context.settings) - normalized_cols[normalized] = ColumnMetadata( - name=normalized, - type=dtype, - index=index, - comment=getattr(subcolumn, "comment", None), - ) + process_column(column) except Exception as ex: - logger.warning(f"Could not introspect columns for {key}: {ex}") + logger.warning(f"Could not introspect columns for {ref}: {ex}") return normalized_cols @@ -609,19 +606,20 @@ def create_missing_source_yamls(context: YamlRefactorContext) -> None: ) def _describe(rel: BaseRelation) -> dict[str, t.Any]: - columns = [] - for c in t.cast( - Iterable[BaseColumn], context.project.adapter.get_columns_in_relation(rel) - ): - if any(re.match(b, c.name) for b in context.skip_patterns): - continue - # NOTE: we should be consistent about recursively flattening structs - normalized_column = normalize_column_name( - c.name, context.project.config.credentials.type - ) - dt = c.dtype.lower() if context.settings.output_to_lower else c.dtype - columns.append({"name": normalized_column, "description": "", "data_type": dt}) - return {"name": rel.identifier, "description": "", "columns": columns} + return { + "name": rel.identifier, + "description": "", + "columns": [ + { + "name": name, + "description": meta.comment or "", + "data_type": meta.type.lower() + if context.settings.output_to_lower + else meta.type, + } + for name, meta in get_columns(context, get_table_ref(rel)).items() + ], + } tables = [ schema @@ -819,7 +817,8 @@ def _job(uid: str, loc: SchemaFileLocation) -> None: def pretty_print_plan(plan: RestructureDeltaPlan) -> None: """Pretty print the restructure plan for the dbt project.""" for op in plan.operations: - logger.info(f"Processing {op.content}") + str_content = str(op.content)[:80] + "..." + logger.info(f"Processing {str_content}") if not op.superseded_paths: logger.info(f"CREATE or MERGE => {op.file_path}") else: @@ -854,12 +853,25 @@ def _remove_sources(existing_doc: dict[str, t.Any], nodes: list[ResultNode]) -> existing_doc["sources"] = keep_sources -def apply_restructure_plan(context: YamlRefactorContext, plan: RestructureDeltaPlan) -> None: +def apply_restructure_plan( + context: YamlRefactorContext, plan: RestructureDeltaPlan, *, confirm: bool = False +) -> None: """Apply the restructure plan for the dbt project.""" if not plan.operations: logger.info("No changes needed.") return + if confirm: + pretty_print_plan(plan) + while confirm: + response = input("Apply the restructure plan? [y/N]: ") + if response.lower() in ("y", "yes"): + break + elif response.lower() in ("n", "no", ""): + logger.info("Skipping restructure plan.") + return + logger.info("Please respond with 'y' or 'n'.") + for op in plan.operations: output_doc: dict[str, t.Any] = {"version": 2} if op.file_path.exists(): @@ -1218,9 +1230,4 @@ def run_example_compilation_flow() -> None: project = create_dbt_project_context(c) yaml_context = YamlRefactorContext(project) plan = draft_restructure_delta_plan(yaml_context) - # print("Plan =>", plan) - pretty_print_plan(plan) - apply_restructure_plan(yaml_context, plan) - exit(0) - plan = draft_project_structure_update_plan(yaml_context) - _ = commit_project_restructure_to_disk(yaml_context, plan) + apply_restructure_plan(yaml_context, plan, confirm=True) From e9c040a013066ee8f9b64a58497a9ee9f3cdb8b7 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Mon, 30 Dec 2024 16:19:39 -0700 Subject: [PATCH 12/46] wip: continue working on functional rewrite --- src/dbt_osmosis/core/osmosis.py | 723 +++++++++++++++----------------- 1 file changed, 329 insertions(+), 394 deletions(-) diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index b59e6b97..5ff5486e 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -65,6 +65,9 @@ """This key is used to skip certain column name patterns in dbt-osmosis""" +# Basic DBT Setup + + def discover_project_dir() -> str: """Return the directory containing a dbt_project.yml if found, else the current dir.""" cwd = Path.cwd() @@ -128,6 +131,101 @@ def config_to_namespace(cfg: DbtConfiguration) -> argparse.Namespace: ) +@dataclass +class DbtProjectContext: + """A data object that includes references to: + + - The loaded dbt config + - The manifest + - The sql/macro parsers + + With mutexes for thread safety. The adapter is lazily instantiated and has a TTL which allows + for re-use across multiple operations in long-running processes. (is the idea) + """ + + args: argparse.Namespace + config: RuntimeConfig + manifest: Manifest + sql_parser: SqlBlockParser + macro_parser: SqlMacroParser + adapter_ttl: float = 3600.0 + + _adapter_mutex: threading.Lock = field(default_factory=threading.Lock, init=False) + _manifest_mutex: threading.Lock = field(default_factory=threading.Lock, init=False) + _adapter: BaseAdapter | None = None + _adapter_created_at: float = 0.0 + + @property + def is_adapter_expired(self) -> bool: + """Check if the adapter has expired based on the adapter TTL.""" + return time.time() - self._adapter_created_at > self.adapter_ttl + + @property + def adapter(self) -> BaseAdapter: + """Get the adapter instance, creating a new one if the current one has expired.""" + with self._adapter_mutex: + if not self._adapter or self.is_adapter_expired: + self._adapter = instantiate_adapter(self.config) + self._adapter.set_macro_resolver(self.manifest) + self._adapter_created_at = time.time() + return self._adapter + + @property + def manifest_mutex(self) -> threading.Lock: + """Return the manifest mutex for thread safety.""" + return self._manifest_mutex + + +def instantiate_adapter(runtime_config: RuntimeConfig) -> BaseAdapter: + """Instantiate a dbt adapter based on the runtime configuration.""" + register_adapter(runtime_config, get_mp_context()) + adapter = get_adapter(runtime_config) + adapter.set_macro_context_generator(t.cast(t.Any, generate_runtime_macro_context)) + adapter.connections.set_connection_name("dbt-osmosis") + return t.cast(BaseAdapter, t.cast(t.Any, adapter)) + + +def create_dbt_project_context(config: DbtConfiguration) -> DbtProjectContext: + """Build a DbtProjectContext from a DbtConfiguration.""" + args = config_to_namespace(config) + dbt_flags.set_from_args(args, args) + runtime_cfg = RuntimeConfig.from_args(args) + + adapter = instantiate_adapter(runtime_cfg) + setattr(runtime_cfg, "adapter", adapter) + loader = ManifestLoader( + runtime_cfg, + runtime_cfg.load_dependencies(), + ) + manifest = loader.load() + manifest.build_flat_graph() + + adapter.set_macro_resolver(manifest) + + sql_parser = SqlBlockParser(runtime_cfg, manifest, runtime_cfg) + macro_parser = SqlMacroParser(runtime_cfg, manifest) + + return DbtProjectContext( + args=args, + config=runtime_cfg, + manifest=manifest, + sql_parser=sql_parser, + macro_parser=macro_parser, + ) + + +def reload_manifest(context: DbtProjectContext) -> None: + """Reload the dbt project manifest. Useful for picking up mutations.""" + loader = ManifestLoader(context.config, context.config.load_dependencies()) + manifest = loader.load() + manifest.build_flat_graph() + context.adapter.set_macro_resolver(manifest) + context.manifest = manifest + + +# YAML + File Data + + def create_yaml_instance( indent_mapping: int = 2, indent_sequence: int = 4, @@ -223,98 +321,6 @@ class YamlRefactorSettings: output_to_lower: bool = False -@dataclass -class DbtProjectContext: - """A data object that includes references to: - - - The loaded dbt config - - The manifest - - The sql/macro parsers - - With mutexes for thread safety. The adapter is lazily instantiated and has a TTL which allows - for re-use across multiple operations in long-running processes. (is the idea) - """ - - args: argparse.Namespace - config: RuntimeConfig - manifest: Manifest - sql_parser: SqlBlockParser - macro_parser: SqlMacroParser - adapter_ttl: float = 3600.0 - - _adapter_mutex: threading.Lock = field(default_factory=threading.Lock, init=False) - _manifest_mutex: threading.Lock = field(default_factory=threading.Lock, init=False) - _adapter: BaseAdapter | None = None - _adapter_created_at: float = 0.0 - - @property - def is_adapter_expired(self) -> bool: - """Check if the adapter has expired based on the adapter TTL.""" - return time.time() - self._adapter_created_at > self.adapter_ttl - - @property - def adapter(self) -> BaseAdapter: - """Get the adapter instance, creating a new one if the current one has expired.""" - with self._adapter_mutex: - if not self._adapter or self.is_adapter_expired: - self._adapter = instantiate_adapter(self.config) - self._adapter.set_macro_resolver(self.manifest) - self._adapter_created_at = time.time() - return self._adapter - - @property - def manifest_mutex(self) -> threading.Lock: - """Return the manifest mutex for thread safety.""" - return self._manifest_mutex - - -def instantiate_adapter(runtime_config: RuntimeConfig) -> BaseAdapter: - """Instantiate a dbt adapter based on the runtime configuration.""" - register_adapter(runtime_config, get_mp_context()) - adapter = get_adapter(runtime_config) - adapter.set_macro_context_generator(t.cast(t.Any, generate_runtime_macro_context)) - adapter.connections.set_connection_name("dbt-osmosis") - return t.cast(BaseAdapter, t.cast(t.Any, adapter)) - - -def create_dbt_project_context(config: DbtConfiguration) -> DbtProjectContext: - """Build a DbtProjectContext from a DbtConfiguration.""" - args = config_to_namespace(config) - dbt_flags.set_from_args(args, args) - runtime_cfg = RuntimeConfig.from_args(args) - - adapter = instantiate_adapter(runtime_cfg) - setattr(runtime_cfg, "adapter", adapter) - loader = ManifestLoader( - runtime_cfg, - runtime_cfg.load_dependencies(), - ) - manifest = loader.load() - manifest.build_flat_graph() - - adapter.set_macro_resolver(manifest) - - sql_parser = SqlBlockParser(runtime_cfg, manifest, runtime_cfg) - macro_parser = SqlMacroParser(runtime_cfg, manifest) - - return DbtProjectContext( - args=args, - config=runtime_cfg, - manifest=manifest, - sql_parser=sql_parser, - macro_parser=macro_parser, - ) - - -def reload_manifest(context: DbtProjectContext) -> None: - """Reload the dbt project manifest. Useful for picking up mutations.""" - loader = ManifestLoader(context.config, context.config.load_dependencies()) - manifest = loader.load() - manifest.build_flat_graph() - context.adapter.set_macro_resolver(manifest) - context.manifest = manifest - - @dataclass class YamlRefactorContext: """A data object that includes references to: @@ -398,6 +404,9 @@ def load_catalog(settings: YamlRefactorSettings) -> CatalogArtifact | None: return CatalogArtifact.from_dict(json.loads(fp.read_text())) +# Basic compile & execute + + def _has_jinja(code: str) -> bool: """Check if a code string contains jinja tokens.""" return any(token in code for token in ("{{", "}}", "{%", "%}", "{#", "#}")) @@ -439,6 +448,9 @@ def execute_sql_code(context: DbtProjectContext, raw_sql: str) -> AdapterRespons return resp +# Node filtering + + def _is_fqn_match(node: ResultNode, fqn_str: str) -> bool: """Filter models based on the provided fully qualified name matching on partial segments.""" if not fqn_str: @@ -499,6 +511,200 @@ def f(node: ResultNode) -> bool: yield uid, dbt_node +# Inheritance Logic + + +@t.overload +def _find_first(coll: Iterable[T], predicate: t.Callable[[T], bool], default: T) -> T: ... + + +@t.overload +def _find_first( + coll: Iterable[T], predicate: t.Callable[[T], bool], default: None = ... +) -> T | None: ... + + +def _find_first( + coll: Iterable[T], predicate: t.Callable[[T], bool], default: T | None = None +) -> T | None: + """Find the first item in a container that satisfies a predicate.""" + for item in coll: + if predicate(item): + return item + return default + + +def _build_node_ancestor_tree( + manifest: Manifest, + node: ResultNode, + tree: dict[str, list[str]] | None = None, + visited: set[str] | None = None, + depth: int = 1, +) -> dict[str, list[str]]: + """Build a flat graph of a node and it's ancestors.""" + + if tree is None or visited is None: + visited = set(node.unique_id) + tree = {"generation_0": [node.unique_id]} + depth = 1 + + if not hasattr(node, "depends_on"): + return tree + + for dep in getattr(node.depends_on, "nodes", []): + if dep not in visited: + visited.add(dep) + member = manifest.nodes.get(dep, manifest.sources.get(dep)) + if member: + tree.setdefault(f"generation_{depth}", []).append(dep) + _ = _build_node_ancestor_tree(manifest, member, tree, visited, depth + 1) + + for generation in tree.values(): + generation.sort() # For deterministic ordering + + return tree + + +def _get_member_yaml(context: YamlRefactorContext, member: ResultNode) -> dict[str, t.Any] | None: + """Get the parsed YAML for a dbt model or source node.""" + project_dir = Path(context.project.config.project_root) + + if isinstance(member, SourceDefinition): + if not member.original_file_path: + return None + path = project_dir.joinpath(member.original_file_path) + sources = t.cast(list[dict[str, t.Any]], _read_yaml(context, path).get("sources", [])) + source = _find_first(sources, lambda s: s["name"] == member.source_name, {}) + tables = source.get("tables", []) + return _find_first(tables, lambda tbl: tbl["name"] == member.name) + + elif isinstance(member, (ModelNode, SeedNode)): + if not member.patch_path: + return None + path = project_dir.joinpath(member.patch_path.split("://")[-1]) + section = f"{member.resource_type}s" + models = t.cast(list[dict[str, t.Any]], _read_yaml(context, path).get(section, [])) + return _find_first(models, lambda model: model["name"] == member.name) + + return None + + +def _build_column_knowledge_grap( + context: YamlRefactorContext, node: ResultNode +) -> dict[str, dict[str, t.Any]]: + """Generate a column knowledge graph for a dbt model or source node.""" + tree = _build_node_ancestor_tree(context.project.manifest, node) + _ = tree.pop("generation_0") + + column_knowledge_graph: dict[str, dict[str, t.Any]] = {} + for generation in reversed(sorted(tree.keys())): + ancestors = tree[generation] + for ancestor_uid in ancestors: + ancestor = context.project.manifest.nodes.get( + ancestor_uid, context.project.manifest.sources.get(ancestor_uid) + ) + if not ancestor: + continue + + for name, metadata in ancestor.columns.items(): + graph_node = column_knowledge_graph.setdefault(name, {}) + if context.settings.add_progenitor_to_meta: + graph_node.setdefault("meta", {}).setdefault( + "osmosis_progenitor", ancestor.name + ) + + graph_edge = metadata.to_dict() + + if context.settings.use_unrendered_descriptions: + raw_yaml = _get_member_yaml(context, ancestor) or {} + raw_columns = t.cast(list[dict[str, t.Any]], raw_yaml.get("columns", [])) + raw_column_metadata = _find_first(raw_columns, lambda c: c["name"] == name, {}) + if undrendered_description := raw_column_metadata.get("description"): + graph_edge["description"] = undrendered_description + + current_tags = graph_node.get("tags", []) + if incoming_tags := (set(graph_edge.pop("tags", [])) | set(current_tags)): + graph_edge["tags"] = list(incoming_tags) + + current_meta = graph_node.get("meta", {}) + if incoming_meta := {**current_meta, **graph_edge.pop("meta", {})}: + graph_edge["meta"] = incoming_meta + + for inheritable in context.settings.add_inheritance_for_specified_keys: + current_val = graph_node.get(inheritable) + if incoming_val := graph_edge.pop(inheritable, current_val): + graph_edge[inheritable] = incoming_val + + if graph_edge.get("description", EMPTY_STRING) in context.placeholders: + del graph_edge["description"] + if graph_edge.get("tags") == []: + del graph_edge["tags"] + if graph_edge.get("meta") == {}: + del graph_edge["meta"] + for k in list(graph_edge.keys()): + if graph_edge[k] is None: + graph_edge.pop(k) + + graph_node.update(graph_edge) + + return {name: meta.to_dict() for name, meta in node.columns.items()} + + +def inherit_upstream_column_knowledge( + context: YamlRefactorContext, node: ResultNode +) -> dict[str, dict[str, t.Any]]: + """Inherit column level knowledge from the ancestors of a dbt model or source node producing a column data structure usable in dbt yaml files. + + This mutates the manifest node in place and returns the column data structure for use in a dbt yaml file. + """ + inheritable = ["description"] + if not context.settings.skip_add_tags: + inheritable.append("tags") + if not context.settings.skip_merge_meta: + inheritable.append("meta") + for extra in context.settings.add_inheritance_for_specified_keys: + if extra not in inheritable: + inheritable.append(extra) + column_knowledge_graph = _build_column_knowledge_grap(context, node) + for name, node_column in node.columns.items(): + # NOTE: This is our graph "lookup", and our best [only] opportunity to apply user defined fuzzing + # so we should make the composable and robust + kwargs = column_knowledge_graph.get(name) + if kwargs is None: + continue + node.columns[name] = node_column.replace( + **{k: v for k, v in kwargs.items() if v is not None and k in inheritable} + ) + + return {name: meta.to_dict() for name, meta in node.columns.items()} + + +def inject_missing_columns(context: YamlRefactorContext, node: ResultNode | None = None) -> None: + """Add missing columns to a dbt node and it's corresponding yaml section. Changes are implicitly buffered until commit_yamls is called.""" + if context.settings.skip_add_columns: + return + if node is None: + for node in context.project.manifest.nodes.values(): + inject_missing_columns(context, node) + return + yaml_section = _get_member_yaml(context, node) or {} + current_columns = { + normalize_column_name(c["name"], context.project.config.credentials.type) + for c in yaml_section.get("columns", []) + } + incoming_columns = get_columns(context, get_table_ref(node)) + for incoming_name, incoming_meta in incoming_columns.items(): + if incoming_name not in node.columns and incoming_name not in current_columns: + logger.info( + f"Detected and reconciling missing column {incoming_name} in node {node.unique_id}" + ) + gen_col = {"name": incoming_name, "description": incoming_meta.comment} + if dtype := incoming_meta.type: + gen_col["data_type"] = dtype.lower() if context.settings.output_to_lower else dtype + node.columns[incoming_name] = ColumnInfo.from_dict(gen_col) + yaml_section.setdefault("columns", []).append(gen_col) + + def normalize_column_name(column: str, credentials_type: str) -> str: """Apply case normalization to a column name based on the credentials type.""" if credentials_type == "snowflake" and column.startswith('"') and column.endswith('"'): @@ -529,8 +735,14 @@ def get_table_ref(node: ResultNode | BaseRelation) -> TableRef: return TableRef(node.database, node.schema, node.name) +_COLUMN_LIST_CACHE = {} + + def get_columns(context: YamlRefactorContext, ref: TableRef) -> dict[str, ColumnMetadata]: """Equivalent to get_columns_meta in old code but directly referencing a key, not a node.""" + if ref in _COLUMN_LIST_CACHE: + return _COLUMN_LIST_CACHE[ref] + normalized_cols = OrderedDict() offset = 0 @@ -571,6 +783,7 @@ def process_column(col: BaseColumn | ColumnMetadata): except Exception as ex: logger.warning(f"Could not introspect columns for {ref}: {ex}") + _COLUMN_LIST_CACHE[ref] = normalized_cols return normalized_cols @@ -707,12 +920,18 @@ def build_yaml_file_mapping( return out_map +# TODO: detect if something is dirty to minimize disk writes on commits +_YAML_BUFFER_CACHE: dict[Path, t.Any] = {} + + def _read_yaml(context: YamlRefactorContext, path: Path) -> dict[str, t.Any]: """Read a yaml file from disk.""" - if not path.is_file(): - return {} - with context.yaml_handler_lock: - return t.cast(dict[str, t.Any], context.yaml_handler.load(path)) + if path not in _YAML_BUFFER_CACHE: + if not path.is_file(): + return {} + with context.yaml_handler_lock: + _YAML_BUFFER_CACHE[path] = t.cast(dict[str, t.Any], context.yaml_handler.load(path)) + return _YAML_BUFFER_CACHE[path] def _write_yaml(context: YamlRefactorContext, path: Path, data: dict[str, t.Any]) -> None: @@ -720,9 +939,21 @@ def _write_yaml(context: YamlRefactorContext, path: Path, data: dict[str, t.Any] with context.yaml_handler_lock: path.parent.mkdir(parents=True, exist_ok=True) context.yaml_handler.dump(data, path) + if path in _YAML_BUFFER_CACHE: + del _YAML_BUFFER_CACHE[path] context.register_mutations(1) +def commit_yamls(context: YamlRefactorContext) -> None: + """Commit all files in the yaml buffer cache to disk.""" + with context.yaml_handler_lock: + for path in list(_YAML_BUFFER_CACHE.keys()): + with path.open("w") as f: + context.yaml_handler.dump(_YAML_BUFFER_CACHE[path], f) + del _YAML_BUFFER_CACHE[path] + context.register_mutations(1) + + def _generate_minimal_model_yaml(node: ModelNode) -> dict[str, t.Any]: """Generate a minimal model yaml for a dbt model node.""" return {"name": node.name, "columns": []} @@ -911,304 +1142,6 @@ def apply_restructure_plan( logger.info(f"Migrated doc from {path} -> {op.file_path}") -def propagate_documentation_downstream( - context: YamlRefactorContext, force_inheritance: bool = False -) -> None: - folder_map = build_yaml_file_mapping(context) - futures = [] - with context.project.adapter.connection_named("dbt-osmosis"): - for unique_id, node in filter_models(context): - futures.append( - context.pool.submit( - _run_model_doc_sync, - context, - unique_id, - node, - folder_map, - force_inheritance, - output_to_lower, - ) - ) - wait(futures) - - -# TODO: more work to do below the fold here - - -_ColumnLevelKnowledge = dict[str, t.Any] -_KnowledgeBase = dict[str, _ColumnLevelKnowledge] - - -def _build_node_ancestor_tree( - manifest: Manifest, - node: ResultNode, - family_tree: dict[str, list[str]] | None = None, - members_found: list[str] | None = None, - depth: int = 0, -) -> dict[str, list[str]]: - """Recursively build dictionary of parents in generational order using a simple DFS algorithm""" - # Set initial values - if family_tree is None: - family_tree = {} - if members_found is None: - members_found = [] - - # If the node has no dependencies, return the family tree as it is - if not hasattr(node, "depends_on"): - return family_tree - - # Iterate over the parents of the node mutating family_tree - for parent in getattr(node.depends_on, "nodes", []): - member = manifest.nodes.get(parent, manifest.sources.get(parent)) - if member and parent not in members_found: - family_tree.setdefault(f"generation_{depth}", []).append(parent) - _ = _build_node_ancestor_tree(manifest, member, family_tree, members_found, depth + 1) - members_found.append(parent) - - return family_tree - - -def _find_first(coll: Iterable[T], predicate: t.Callable[[T], bool]) -> T | None: - """Find the first item in a container that satisfies a predicate.""" - for item in coll: - if predicate(item): - return item - - -def get_member_yaml(context: YamlRefactorContext, member: ResultNode) -> dict[str, t.Any] | None: - """Get the parsed YAML for a dbt model or source node.""" - project_dir = Path(context.project.config.project_root) - yaml_handler = context.yaml_handler - - if isinstance(member, SourceDefinition): - if not member.original_file_path: - return None - path = project_dir.joinpath(member.original_file_path) - if not path.exists(): - return None - with path.open("r") as f: - parsed_yaml = yaml_handler.load(f) - data: t.Any = parsed_yaml.get("sources", []) - src = _find_first(data, lambda s: s["name"] == member.source_name) - if not src: - return None - tables = src.get("tables", []) - return _find_first(tables, lambda tbl: tbl["name"] == member.name) - - elif isinstance(member, (ModelNode, SeedNode)): - if not member.patch_path: - return None - patch_file = project_dir.joinpath(member.patch_path.split("://")[-1]) - if not patch_file.is_file(): - return None - with patch_file.open("r") as f: - parsed_yaml = yaml_handler.load(f) - section_key = f"{member.resource_type}s" - data = parsed_yaml.get(section_key, []) - return _find_first(data, lambda model: model["name"] == member.name) - - return None - - -def inherit_column_level_knowledge( - context: YamlRefactorContext, family_tree: dict[str, list[str]] -) -> _KnowledgeBase: - """Generate a knowledge base by applying inheritance logic based on the family tree graph.""" - knowledge: _KnowledgeBase = {} - placeholders = context.placeholders - manifest = context.project.manifest - - # If the user wants to use unrendered descriptions - use_unrendered = context.settings.use_unrendered_descriptions - - # We traverse from the last generation to the earliest - # so that the "nearest" ancestor overwrites the older ones. - for gen_name in reversed(family_tree.keys()): - members_in_generation = family_tree[gen_name] - for ancestor_id in members_in_generation: - member = manifest.nodes.get(ancestor_id, manifest.sources.get(ancestor_id)) - if not member: - continue - - member_yaml: dict[str, t.Any] | None = None - if use_unrendered: - member_yaml = get_member_yaml(context, member) - - # For each column in the ancestor - for col_name, col_info in member.columns.items(): - # If we haven't seen this column name yet, seed it with minimal data - _ = knowledge.setdefault( - col_name, - {"progenitor": ancestor_id, "generation": gen_name}, - ) - merged_info = col_info.to_dict() - - # If the description is in placeholders, discard it - if merged_info.get("description", "") in placeholders: - merged_info["description"] = "" - - # If user wants unrendered, read from YAML file if present - if member_yaml and "columns" in member_yaml: - col_in_yaml = _find_first( - member_yaml["columns"], lambda c: c["name"] == merged_info["name"] - ) - if col_in_yaml and "description" in col_in_yaml: - merged_info["description"] = col_in_yaml["description"] - - # Merge tags - existing_tags = knowledge[col_name].get("tags", []) - new_tags = set(merged_info.pop("tags", [])) | set(existing_tags) - if new_tags: - merged_info["tags"] = list(new_tags) - - # Merge meta - existing_meta = knowledge[col_name].get("meta", {}) - combined_meta = {**existing_meta, **merged_info.pop("meta", {})} - if combined_meta: - merged_info["meta"] = combined_meta - - # Now unify - knowledge[col_name].update(merged_info) - - return knowledge - - -def get_node_columns_with_inherited_knowledge( - context: YamlRefactorContext, node: ResultNode -) -> _KnowledgeBase: - """Build a knowledgebase for the node by climbing the ancestor tree and merging column doc info from nearest to farthest ancestors.""" - family_tree = _build_node_ancestor_tree(context.project.manifest, node) - return inherit_column_level_knowledge(context, family_tree) - - -def get_prior_knowledge(knowledge: _KnowledgeBase, column: str) -> _ColumnLevelKnowledge: - """If the user has changed column name's case or prefix, attempt to find the best match among possible variants (lowercase, pascalCase, etc.) - - We sort so that any source/seed is considered first, then models, - and within each group we sort descending by generation. - """ - camelcase: str = re.sub(r"_(.)", lambda m: m.group(1).upper(), column) - pascalcase: str = camelcase[0].upper() + camelcase[1:] if camelcase else camelcase - variants = (column, column.lower(), camelcase, pascalcase) - - def is_source_or_seed(k: _ColumnLevelKnowledge) -> bool: - p = k.get("progenitor", "") - return p.startswith("source") or p.startswith("seed") - - matches: list[_ColumnLevelKnowledge] = [] - for var in variants: - found = knowledge.get(var) - if found is not None: - matches.append(found) - - def _sort_k(k: _ColumnLevelKnowledge) -> tuple[bool, str]: - return (not is_source_or_seed(k), k.get("generation", "")) - - sorted_matches = sorted(matches, key=_sort_k, reverse=True) - return sorted_matches[0] if sorted_matches else {} - - -def merge_knowledge_with_original_knowledge( - prior_knowledge: _ColumnLevelKnowledge, - original_knowledge: _ColumnLevelKnowledge, - add_progenitor_to_meta: bool, - progenitor: str, -) -> _ColumnLevelKnowledge: - """Merge two column level knowledge dictionaries.""" - merged = dict(original_knowledge) - - # Unify tags - if "tags" in prior_knowledge: - prior_tags = set(prior_knowledge["tags"]) - merged_tags = set(merged.get("tags", [])) - merged["tags"] = list(prior_tags | merged_tags) - - # Unify meta - if "meta" in prior_knowledge: - new_meta = {**merged.get("meta", {}), **prior_knowledge["meta"]} - merged["meta"] = new_meta - - # If the user wants the source or seed name in meta, apply it - if add_progenitor_to_meta and progenitor: - merged.setdefault("meta", {}) - merged["meta"]["osmosis_progenitor"] = progenitor - - # If meta says "osmosis_keep_description" => keep the original description - if merged.get("meta", {}).get("osmosis_keep_description"): - # Do nothing - pass - else: - # Otherwise if prior knowledge has a non-empty description, override - if prior_knowledge.get("description"): - merged["description"] = prior_knowledge["description"] - - # Remove empty tags or meta - if merged.get("tags") == []: - merged.pop("tags", None) - if merged.get("meta") == {}: - merged.pop("meta", None) - - return merged - - -def update_undocumented_columns_with_prior_knowledge( - undocumented_columns: Iterable[str], - node: ManifestNode, - yaml_file_model_section: dict[str, t.Any], - knowledge: _KnowledgeBase, - skip_add_tags: bool, - skip_merge_meta: bool, - add_progenitor_to_meta: bool, - add_inheritance_for_specified_keys: Iterable[str] = (), -) -> int: - """For columns that are undocumented, we find prior knowledge in the knowledge dict, merge it with the existing column's knowledge, then assign it to both node and YAML.""" - # Which keys are we allowed to adopt from prior knowledge - inheritables = ["description"] - if not skip_add_tags: - inheritables.append("tags") - if not skip_merge_meta: - inheritables.append("meta") - for k in add_inheritance_for_specified_keys: - if k not in inheritables: - inheritables.append(k) - - changes = 0 - for column in undocumented_columns: - if column not in node.columns: - node.columns[column] = ColumnInfo.from_dict({"name": column}) - original_dict = node.columns[column].to_dict() - - prior = get_prior_knowledge(knowledge, column) - progenitor = t.cast(str, prior.pop("progenitor", "")) - - # Only keep keys we want to inherit - filtered_prior = {kk: vv for kk, vv in prior.items() if kk in inheritables} - - new_knowledge = merge_knowledge_with_original_knowledge( - filtered_prior, - original_dict, - add_progenitor_to_meta, - progenitor, - ) - if new_knowledge == original_dict: - continue - - node.columns[column] = ColumnInfo.from_dict(new_knowledge) - for col_def in yaml_file_model_section.get("columns", []): - if col_def.get("name") == column: - # Only update the keys we are inheriting - for k2 in filtered_prior: - col_def[k2] = new_knowledge.get(k2, col_def.get(k2)) - logger.info( - "[osmosis] Inherited knowledge for column: '%s' from progenitor '%s' in node '%s'", - column, - progenitor, - node.unique_id, - ) - changes += 1 - return changes - - # NOTE: usage example of the more FP style module below @@ -1231,3 +1164,5 @@ def run_example_compilation_flow() -> None: yaml_context = YamlRefactorContext(project) plan = draft_restructure_delta_plan(yaml_context) apply_restructure_plan(yaml_context, plan, confirm=True) + inject_missing_columns(yaml_context) + commit_yamls(yaml_context) From 528be4653affa2ea2ea7fd70991a7607a34cb196 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Mon, 30 Dec 2024 16:50:12 -0700 Subject: [PATCH 13/46] wip: continue working on functional rewrite --- src/dbt_osmosis/core/osmosis.py | 34 +++++++++++++++++++++++++++++++-- 1 file changed, 32 insertions(+), 2 deletions(-) diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index 5ff5486e..28c0f3f1 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -687,7 +687,9 @@ def inject_missing_columns(context: YamlRefactorContext, node: ResultNode | None for node in context.project.manifest.nodes.values(): inject_missing_columns(context, node) return - yaml_section = _get_member_yaml(context, node) or {} + yaml_section = _get_member_yaml(context, node) + if yaml_section is None: + return current_columns = { normalize_column_name(c["name"], context.project.config.credentials.type) for c in yaml_section.get("columns", []) @@ -698,13 +700,40 @@ def inject_missing_columns(context: YamlRefactorContext, node: ResultNode | None logger.info( f"Detected and reconciling missing column {incoming_name} in node {node.unique_id}" ) - gen_col = {"name": incoming_name, "description": incoming_meta.comment} + gen_col = {"name": incoming_name, "description": incoming_meta.comment or ""} if dtype := incoming_meta.type: gen_col["data_type"] = dtype.lower() if context.settings.output_to_lower else dtype node.columns[incoming_name] = ColumnInfo.from_dict(gen_col) yaml_section.setdefault("columns", []).append(gen_col) +def remove_columns_not_in_database( + context: YamlRefactorContext, node: ResultNode | None = None +) -> None: + """Remove columns from a dbt node and it's corresponding yaml section that are not present in the database. Changes are implicitly buffered until commit_yamls is called.""" + if context.settings.skip_add_columns: + return + if node is None: + for node in context.project.manifest.nodes.values(): + remove_columns_not_in_database(context, node) + return + yaml_section = _get_member_yaml(context, node) + if yaml_section is None: + return + current_columns = { + normalize_column_name(c["name"], context.project.config.credentials.type) + for c in yaml_section.get("columns", []) + } + incoming_columns = get_columns(context, get_table_ref(node)) + extra_columns = current_columns - set(incoming_columns.keys()) + for extra_column in extra_columns: + logger.info(f"Detected and removing extra column {extra_column} in node {node.unique_id}") + _ = node.columns.pop(extra_column, None) + yaml_section["columns"] = [ + c for c in yaml_section.get("columns", []) if c["name"] != extra_column + ] + + def normalize_column_name(column: str, credentials_type: str) -> str: """Apply case normalization to a column name based on the credentials type.""" if credentials_type == "snowflake" and column.startswith('"') and column.endswith('"'): @@ -1165,4 +1194,5 @@ def run_example_compilation_flow() -> None: plan = draft_restructure_delta_plan(yaml_context) apply_restructure_plan(yaml_context, plan, confirm=True) inject_missing_columns(yaml_context) + remove_columns_not_in_database(yaml_context) commit_yamls(yaml_context) From 9b6b0a574127e11b5b19ef40e07eddadfa7cc07a Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Mon, 30 Dec 2024 20:27:56 -0700 Subject: [PATCH 14/46] chore: reorganize some stuff in osmosis py --- src/dbt_osmosis/core/osmosis.py | 460 ++++++++++++++++++-------------- 1 file changed, 258 insertions(+), 202 deletions(-) diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index 28c0f3f1..172018cc 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -31,7 +31,6 @@ from dbt.contracts.graph.manifest import Manifest from dbt.contracts.graph.nodes import ( ColumnInfo, - ManifestNode, ManifestSQLNode, ModelNode, ResultNode, @@ -66,6 +65,7 @@ # Basic DBT Setup +# =============== def discover_project_dir() -> str: @@ -224,6 +224,7 @@ def reload_manifest(context: DbtProjectContext) -> None: # YAML + File Data +# ================ def create_yaml_instance( @@ -405,6 +406,7 @@ def load_catalog(settings: YamlRefactorSettings) -> CatalogArtifact | None: # Basic compile & execute +# ======================= def _has_jinja(code: str) -> bool: @@ -449,6 +451,7 @@ def execute_sql_code(context: DbtProjectContext, raw_sql: str) -> AdapterRespons # Node filtering +# ============== def _is_fqn_match(node: ResultNode, fqn_str: str) -> bool: @@ -511,7 +514,8 @@ def f(node: ResultNode) -> bool: yield uid, dbt_node -# Inheritance Logic +# Introspection +# ============= @t.overload @@ -534,206 +538,6 @@ def _find_first( return default -def _build_node_ancestor_tree( - manifest: Manifest, - node: ResultNode, - tree: dict[str, list[str]] | None = None, - visited: set[str] | None = None, - depth: int = 1, -) -> dict[str, list[str]]: - """Build a flat graph of a node and it's ancestors.""" - - if tree is None or visited is None: - visited = set(node.unique_id) - tree = {"generation_0": [node.unique_id]} - depth = 1 - - if not hasattr(node, "depends_on"): - return tree - - for dep in getattr(node.depends_on, "nodes", []): - if dep not in visited: - visited.add(dep) - member = manifest.nodes.get(dep, manifest.sources.get(dep)) - if member: - tree.setdefault(f"generation_{depth}", []).append(dep) - _ = _build_node_ancestor_tree(manifest, member, tree, visited, depth + 1) - - for generation in tree.values(): - generation.sort() # For deterministic ordering - - return tree - - -def _get_member_yaml(context: YamlRefactorContext, member: ResultNode) -> dict[str, t.Any] | None: - """Get the parsed YAML for a dbt model or source node.""" - project_dir = Path(context.project.config.project_root) - - if isinstance(member, SourceDefinition): - if not member.original_file_path: - return None - path = project_dir.joinpath(member.original_file_path) - sources = t.cast(list[dict[str, t.Any]], _read_yaml(context, path).get("sources", [])) - source = _find_first(sources, lambda s: s["name"] == member.source_name, {}) - tables = source.get("tables", []) - return _find_first(tables, lambda tbl: tbl["name"] == member.name) - - elif isinstance(member, (ModelNode, SeedNode)): - if not member.patch_path: - return None - path = project_dir.joinpath(member.patch_path.split("://")[-1]) - section = f"{member.resource_type}s" - models = t.cast(list[dict[str, t.Any]], _read_yaml(context, path).get(section, [])) - return _find_first(models, lambda model: model["name"] == member.name) - - return None - - -def _build_column_knowledge_grap( - context: YamlRefactorContext, node: ResultNode -) -> dict[str, dict[str, t.Any]]: - """Generate a column knowledge graph for a dbt model or source node.""" - tree = _build_node_ancestor_tree(context.project.manifest, node) - _ = tree.pop("generation_0") - - column_knowledge_graph: dict[str, dict[str, t.Any]] = {} - for generation in reversed(sorted(tree.keys())): - ancestors = tree[generation] - for ancestor_uid in ancestors: - ancestor = context.project.manifest.nodes.get( - ancestor_uid, context.project.manifest.sources.get(ancestor_uid) - ) - if not ancestor: - continue - - for name, metadata in ancestor.columns.items(): - graph_node = column_knowledge_graph.setdefault(name, {}) - if context.settings.add_progenitor_to_meta: - graph_node.setdefault("meta", {}).setdefault( - "osmosis_progenitor", ancestor.name - ) - - graph_edge = metadata.to_dict() - - if context.settings.use_unrendered_descriptions: - raw_yaml = _get_member_yaml(context, ancestor) or {} - raw_columns = t.cast(list[dict[str, t.Any]], raw_yaml.get("columns", [])) - raw_column_metadata = _find_first(raw_columns, lambda c: c["name"] == name, {}) - if undrendered_description := raw_column_metadata.get("description"): - graph_edge["description"] = undrendered_description - - current_tags = graph_node.get("tags", []) - if incoming_tags := (set(graph_edge.pop("tags", [])) | set(current_tags)): - graph_edge["tags"] = list(incoming_tags) - - current_meta = graph_node.get("meta", {}) - if incoming_meta := {**current_meta, **graph_edge.pop("meta", {})}: - graph_edge["meta"] = incoming_meta - - for inheritable in context.settings.add_inheritance_for_specified_keys: - current_val = graph_node.get(inheritable) - if incoming_val := graph_edge.pop(inheritable, current_val): - graph_edge[inheritable] = incoming_val - - if graph_edge.get("description", EMPTY_STRING) in context.placeholders: - del graph_edge["description"] - if graph_edge.get("tags") == []: - del graph_edge["tags"] - if graph_edge.get("meta") == {}: - del graph_edge["meta"] - for k in list(graph_edge.keys()): - if graph_edge[k] is None: - graph_edge.pop(k) - - graph_node.update(graph_edge) - - return {name: meta.to_dict() for name, meta in node.columns.items()} - - -def inherit_upstream_column_knowledge( - context: YamlRefactorContext, node: ResultNode -) -> dict[str, dict[str, t.Any]]: - """Inherit column level knowledge from the ancestors of a dbt model or source node producing a column data structure usable in dbt yaml files. - - This mutates the manifest node in place and returns the column data structure for use in a dbt yaml file. - """ - inheritable = ["description"] - if not context.settings.skip_add_tags: - inheritable.append("tags") - if not context.settings.skip_merge_meta: - inheritable.append("meta") - for extra in context.settings.add_inheritance_for_specified_keys: - if extra not in inheritable: - inheritable.append(extra) - column_knowledge_graph = _build_column_knowledge_grap(context, node) - for name, node_column in node.columns.items(): - # NOTE: This is our graph "lookup", and our best [only] opportunity to apply user defined fuzzing - # so we should make the composable and robust - kwargs = column_knowledge_graph.get(name) - if kwargs is None: - continue - node.columns[name] = node_column.replace( - **{k: v for k, v in kwargs.items() if v is not None and k in inheritable} - ) - - return {name: meta.to_dict() for name, meta in node.columns.items()} - - -def inject_missing_columns(context: YamlRefactorContext, node: ResultNode | None = None) -> None: - """Add missing columns to a dbt node and it's corresponding yaml section. Changes are implicitly buffered until commit_yamls is called.""" - if context.settings.skip_add_columns: - return - if node is None: - for node in context.project.manifest.nodes.values(): - inject_missing_columns(context, node) - return - yaml_section = _get_member_yaml(context, node) - if yaml_section is None: - return - current_columns = { - normalize_column_name(c["name"], context.project.config.credentials.type) - for c in yaml_section.get("columns", []) - } - incoming_columns = get_columns(context, get_table_ref(node)) - for incoming_name, incoming_meta in incoming_columns.items(): - if incoming_name not in node.columns and incoming_name not in current_columns: - logger.info( - f"Detected and reconciling missing column {incoming_name} in node {node.unique_id}" - ) - gen_col = {"name": incoming_name, "description": incoming_meta.comment or ""} - if dtype := incoming_meta.type: - gen_col["data_type"] = dtype.lower() if context.settings.output_to_lower else dtype - node.columns[incoming_name] = ColumnInfo.from_dict(gen_col) - yaml_section.setdefault("columns", []).append(gen_col) - - -def remove_columns_not_in_database( - context: YamlRefactorContext, node: ResultNode | None = None -) -> None: - """Remove columns from a dbt node and it's corresponding yaml section that are not present in the database. Changes are implicitly buffered until commit_yamls is called.""" - if context.settings.skip_add_columns: - return - if node is None: - for node in context.project.manifest.nodes.values(): - remove_columns_not_in_database(context, node) - return - yaml_section = _get_member_yaml(context, node) - if yaml_section is None: - return - current_columns = { - normalize_column_name(c["name"], context.project.config.credentials.type) - for c in yaml_section.get("columns", []) - } - incoming_columns = get_columns(context, get_table_ref(node)) - extra_columns = current_columns - set(incoming_columns.keys()) - for extra_column in extra_columns: - logger.info(f"Detected and removing extra column {extra_column} in node {node.unique_id}") - _ = node.columns.pop(extra_column, None) - yaml_section["columns"] = [ - c for c in yaml_section.get("columns", []) if c["name"] != extra_column - ] - - def normalize_column_name(column: str, credentials_type: str) -> str: """Apply case normalization to a column name based on the credentials type.""" if credentials_type == "snowflake" and column.startswith('"') and column.endswith('"'): @@ -765,6 +569,7 @@ def get_table_ref(node: ResultNode | BaseRelation) -> TableRef: _COLUMN_LIST_CACHE = {} +"""Cache for column lists to avoid redundant introspection.""" def get_columns(context: YamlRefactorContext, ref: TableRef) -> dict[str, ColumnMetadata]: @@ -816,6 +621,10 @@ def process_column(col: BaseColumn | ColumnMetadata): return normalized_cols +# Restructuring Logic +# =================== + + def create_missing_source_yamls(context: YamlRefactorContext) -> None: """Create source files for sources defined in the dbt_project.yml dbt-osmosis var which don't exist as nodes. @@ -951,6 +760,7 @@ def build_yaml_file_mapping( # TODO: detect if something is dirty to minimize disk writes on commits _YAML_BUFFER_CACHE: dict[Path, t.Any] = {} +"""Cache for yaml file buffers to avoid redundant disk reads/writes and simplify edits.""" def _read_yaml(context: YamlRefactorContext, path: Path) -> dict[str, t.Any]: @@ -1171,6 +981,251 @@ def apply_restructure_plan( logger.info(f"Migrated doc from {path} -> {op.file_path}") +# Inheritance Logic + + +def _build_node_ancestor_tree( + manifest: Manifest, + node: ResultNode, + tree: dict[str, list[str]] | None = None, + visited: set[str] | None = None, + depth: int = 1, +) -> dict[str, list[str]]: + """Build a flat graph of a node and it's ancestors.""" + + if tree is None or visited is None: + visited = set(node.unique_id) + tree = {"generation_0": [node.unique_id]} + depth = 1 + + if not hasattr(node, "depends_on"): + return tree + + for dep in getattr(node.depends_on, "nodes", []): + if dep not in visited: + visited.add(dep) + member = manifest.nodes.get(dep, manifest.sources.get(dep)) + if member: + tree.setdefault(f"generation_{depth}", []).append(dep) + _ = _build_node_ancestor_tree(manifest, member, tree, visited, depth + 1) + + for generation in tree.values(): + generation.sort() # For deterministic ordering + + return tree + + +def _get_member_yaml(context: YamlRefactorContext, member: ResultNode) -> dict[str, t.Any] | None: + """Get the parsed YAML for a dbt model or source node.""" + project_dir = Path(context.project.config.project_root) + + if isinstance(member, SourceDefinition): + if not member.original_file_path: + return None + path = project_dir.joinpath(member.original_file_path) + sources = t.cast(list[dict[str, t.Any]], _read_yaml(context, path).get("sources", [])) + source = _find_first(sources, lambda s: s["name"] == member.source_name, {}) + tables = source.get("tables", []) + return _find_first(tables, lambda tbl: tbl["name"] == member.name) + + elif isinstance(member, (ModelNode, SeedNode)): + if not member.patch_path: + return None + path = project_dir.joinpath(member.patch_path.split("://")[-1]) + section = f"{member.resource_type}s" + models = t.cast(list[dict[str, t.Any]], _read_yaml(context, path).get(section, [])) + return _find_first(models, lambda model: model["name"] == member.name) + + return None + + +def _build_column_knowledge_grap( + context: YamlRefactorContext, node: ResultNode +) -> dict[str, dict[str, t.Any]]: + """Generate a column knowledge graph for a dbt model or source node.""" + tree = _build_node_ancestor_tree(context.project.manifest, node) + _ = tree.pop("generation_0") + + column_knowledge_graph: dict[str, dict[str, t.Any]] = {} + for generation in reversed(sorted(tree.keys())): + ancestors = tree[generation] + for ancestor_uid in ancestors: + ancestor = context.project.manifest.nodes.get( + ancestor_uid, context.project.manifest.sources.get(ancestor_uid) + ) + if not ancestor: + continue + + for name, metadata in ancestor.columns.items(): + graph_node = column_knowledge_graph.setdefault(name, {}) + if context.settings.add_progenitor_to_meta: + graph_node.setdefault("meta", {}).setdefault( + "osmosis_progenitor", ancestor.name + ) + + graph_edge = metadata.to_dict() + + if context.settings.use_unrendered_descriptions: + raw_yaml = _get_member_yaml(context, ancestor) or {} + raw_columns = t.cast(list[dict[str, t.Any]], raw_yaml.get("columns", [])) + raw_column_metadata = _find_first(raw_columns, lambda c: c["name"] == name, {}) + if undrendered_description := raw_column_metadata.get("description"): + graph_edge["description"] = undrendered_description + + current_tags = graph_node.get("tags", []) + if incoming_tags := (set(graph_edge.pop("tags", [])) | set(current_tags)): + graph_edge["tags"] = list(incoming_tags) + + current_meta = graph_node.get("meta", {}) + if incoming_meta := {**current_meta, **graph_edge.pop("meta", {})}: + graph_edge["meta"] = incoming_meta + + for inheritable in context.settings.add_inheritance_for_specified_keys: + current_val = graph_node.get(inheritable) + if incoming_val := graph_edge.pop(inheritable, current_val): + graph_edge[inheritable] = incoming_val + + if graph_edge.get("description", EMPTY_STRING) in context.placeholders: + del graph_edge["description"] + if graph_edge.get("tags") == []: + del graph_edge["tags"] + if graph_edge.get("meta") == {}: + del graph_edge["meta"] + for k in list(graph_edge.keys()): + if graph_edge[k] is None: + graph_edge.pop(k) + + graph_node.update(graph_edge) + + return {name: meta.to_dict() for name, meta in node.columns.items()} + + +def inherit_upstream_column_knowledge( + context: YamlRefactorContext, node: ResultNode +) -> dict[str, dict[str, t.Any]]: + """Inherit column level knowledge from the ancestors of a dbt model or source node producing a column data structure usable in dbt yaml files. + + This mutates the manifest node in place and returns the column data structure for use in a dbt yaml file. + """ + inheritable = ["description"] + if not context.settings.skip_add_tags: + inheritable.append("tags") + if not context.settings.skip_merge_meta: + inheritable.append("meta") + for extra in context.settings.add_inheritance_for_specified_keys: + if extra not in inheritable: + inheritable.append(extra) + column_knowledge_graph = _build_column_knowledge_grap(context, node) + for name, node_column in node.columns.items(): + # NOTE: This is our graph "lookup", and our best [only] opportunity to apply user defined fuzzing + # so we should make the composable and robust + kwargs = column_knowledge_graph.get(name) + if kwargs is None: + continue + node.columns[name] = node_column.replace( + **{k: v for k, v in kwargs.items() if v is not None and k in inheritable} + ) + + return {name: meta.to_dict() for name, meta in node.columns.items()} + + +def inject_missing_columns(context: YamlRefactorContext, node: ResultNode | None = None) -> None: + """Add missing columns to a dbt node and it's corresponding yaml section. Changes are implicitly buffered until commit_yamls is called.""" + if context.settings.skip_add_columns: + return + if node is None: + for node in context.project.manifest.nodes.values(): + inject_missing_columns(context, node) + return + yaml_section = _get_member_yaml(context, node) + if yaml_section is None: + return + current_columns = { + normalize_column_name(c["name"], context.project.config.credentials.type) + for c in yaml_section.get("columns", []) + } + incoming_columns = get_columns(context, get_table_ref(node)) + for incoming_name, incoming_meta in incoming_columns.items(): + if incoming_name not in node.columns and incoming_name not in current_columns: + logger.info( + f"Detected and reconciling missing column {incoming_name} in node {node.unique_id}" + ) + gen_col = {"name": incoming_name, "description": incoming_meta.comment or ""} + if dtype := incoming_meta.type: + gen_col["data_type"] = dtype.lower() if context.settings.output_to_lower else dtype + node.columns[incoming_name] = ColumnInfo.from_dict(gen_col) + yaml_section.setdefault("columns", []).append(gen_col) + + +def remove_columns_not_in_database( + context: YamlRefactorContext, node: ResultNode | None = None +) -> None: + """Remove columns from a dbt node and it's corresponding yaml section that are not present in the database. Changes are implicitly buffered until commit_yamls is called.""" + if context.settings.skip_add_columns: + return + if node is None: + for node in context.project.manifest.nodes.values(): + remove_columns_not_in_database(context, node) + return + yaml_section = _get_member_yaml(context, node) + if yaml_section is None: + return + current_columns = { + normalize_column_name(c["name"], context.project.config.credentials.type) + for c in yaml_section.get("columns", []) + } + incoming_columns = get_columns(context, get_table_ref(node)) + extra_columns = current_columns - set(incoming_columns.keys()) + for extra_column in extra_columns: + logger.info(f"Detected and removing extra column {extra_column} in node {node.unique_id}") + _ = node.columns.pop(extra_column, None) + yaml_section["columns"] = [ + c for c in yaml_section.get("columns", []) if c["name"] != extra_column + ] + + +def sort_columns_as_in_database( + context: YamlRefactorContext, node: ResultNode | None = None +) -> None: + """Sort columns in a dbt node and it's corresponding yaml section as they appear in the database. Changes are implicitly buffered until commit_yamls is called.""" + if node is None: + for node in context.project.manifest.nodes.values(): + sort_columns_as_in_database(context, node) + return + yaml_section = _get_member_yaml(context, node) + if yaml_section is None: + return + incoming_columns = get_columns(context, get_table_ref(node)) + + def _position(column: dict[str, t.Any]): + db_info = incoming_columns.get(column["name"]) + if db_info is None: + return 99999 + return db_info.index + + t.cast(list[dict[str, t.Any]], yaml_section["columns"]).sort(key=_position) + node.columns = { + k: v for k, v in sorted(node.columns.items(), key=lambda i: _position(i[1].to_dict())) + } + context.register_mutations(1) + + +def sort_columns_alphabetically( + context: YamlRefactorContext, node: ResultNode | None = None +) -> None: + """Sort columns in a dbt node and it's corresponding yaml section alphabetically. Changes are implicitly buffered until commit_yamls is called.""" + if node is None: + for node in context.project.manifest.nodes.values(): + sort_columns_alphabetically(context, node) + return + yaml_section = _get_member_yaml(context, node) + if yaml_section is None: + return + t.cast(list[dict[str, t.Any]], yaml_section["columns"]).sort(key=lambda c: c["name"]) + node.columns = {k: v for k, v in sorted(node.columns.items(), key=lambda i: i[0])} + context.register_mutations(1) + + # NOTE: usage example of the more FP style module below @@ -1195,4 +1250,5 @@ def run_example_compilation_flow() -> None: apply_restructure_plan(yaml_context, plan, confirm=True) inject_missing_columns(yaml_context) remove_columns_not_in_database(yaml_context) + sort_columns_as_in_database(yaml_context) commit_yamls(yaml_context) From 74b73612caf79406c4953d21154e28df96ce836f Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Mon, 30 Dec 2024 20:32:24 -0700 Subject: [PATCH 15/46] chore: comment out noisy test failures while we are wip --- src/dbt_osmosis/core/osmosis.py | 7 +- tests/test_column_level_knowledge.py | 77 +- .../test_column_level_knowledge_propagator.py | 1475 +++++++++-------- tests/test_yaml_manager.py | 233 +-- 4 files changed, 898 insertions(+), 894 deletions(-) diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index 172018cc..3dfe047a 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -764,7 +764,7 @@ def build_yaml_file_mapping( def _read_yaml(context: YamlRefactorContext, path: Path) -> dict[str, t.Any]: - """Read a yaml file from disk.""" + """Read a yaml file from disk. Adds an entry to the buffer cache so all operations on a path are consistent.""" if path not in _YAML_BUFFER_CACHE: if not path.is_file(): return {} @@ -774,7 +774,7 @@ def _read_yaml(context: YamlRefactorContext, path: Path) -> dict[str, t.Any]: def _write_yaml(context: YamlRefactorContext, path: Path, data: dict[str, t.Any]) -> None: - """Write a yaml file to disk and register a mutation with the context.""" + """Write a yaml file to disk and register a mutation with the context. Clears the path from the buffer cache.""" with context.yaml_handler_lock: path.parent.mkdir(parents=True, exist_ok=True) context.yaml_handler.dump(data, path) @@ -784,7 +784,7 @@ def _write_yaml(context: YamlRefactorContext, path: Path, data: dict[str, t.Any] def commit_yamls(context: YamlRefactorContext) -> None: - """Commit all files in the yaml buffer cache to disk.""" + """Commit all files in the yaml buffer cache to disk. Clears the buffer cache and registers mutations.""" with context.yaml_handler_lock: for path in list(_YAML_BUFFER_CACHE.keys()): with path.open("w") as f: @@ -982,6 +982,7 @@ def apply_restructure_plan( # Inheritance Logic +# ================= def _build_node_ancestor_tree( diff --git a/tests/test_column_level_knowledge.py b/tests/test_column_level_knowledge.py index 286e0bd9..c6b5fd54 100644 --- a/tests/test_column_level_knowledge.py +++ b/tests/test_column_level_knowledge.py @@ -1,38 +1,39 @@ -from dbt_osmosis.core.column_level_knowledge import get_prior_knowledge - - -class TestDbtYamlManager: - def test_get_prior_knowledge(test): - knowledge = { - "myColumn": { - "progenitor": "source.my_model.source.Order", - "generation": "generation_0", - "name": "my_column", - }, - "my_column": { - "progenitor": "model.my_model.mart.Order", - "generation": "generation_0", - "name": "my_column", - }, - } - assert ( - get_prior_knowledge(knowledge, "my_column")["progenitor"] - == "source.my_model.source.Order" - ) - - def test_get_prior_knowledge_with_camel_case(test): - knowledge = { - "myColumn": { - "progenitor": "model.my_model.dwh.Order", - "generation": "generation_1", - "name": "myColumn", - }, - "my_column": { - "progenitor": "model.my_model.mart.Order", - "generation": "generation_0", - "name": "my_column", - }, - } - assert ( - get_prior_knowledge(knowledge, "my_column")["progenitor"] == "model.my_model.dwh.Order" - ) +# TODO: refactor this test +# from dbt_osmosis.core.column_level_knowledge import get_prior_knowledge +# +# +# class TestDbtYamlManager: +# def test_get_prior_knowledge(test): +# knowledge = { +# "myColumn": { +# "progenitor": "source.my_model.source.Order", +# "generation": "generation_0", +# "name": "my_column", +# }, +# "my_column": { +# "progenitor": "model.my_model.mart.Order", +# "generation": "generation_0", +# "name": "my_column", +# }, +# } +# assert ( +# get_prior_knowledge(knowledge, "my_column")["progenitor"] +# == "source.my_model.source.Order" +# ) +# +# def test_get_prior_knowledge_with_camel_case(test): +# knowledge = { +# "myColumn": { +# "progenitor": "model.my_model.dwh.Order", +# "generation": "generation_1", +# "name": "myColumn", +# }, +# "my_column": { +# "progenitor": "model.my_model.mart.Order", +# "generation": "generation_0", +# "name": "my_column", +# }, +# } +# assert ( +# get_prior_knowledge(knowledge, "my_column")["progenitor"] == "model.my_model.dwh.Order" +# ) diff --git a/tests/test_column_level_knowledge_propagator.py b/tests/test_column_level_knowledge_propagator.py index 1e4fb9c7..53ca5fb5 100644 --- a/tests/test_column_level_knowledge_propagator.py +++ b/tests/test_column_level_knowledge_propagator.py @@ -1,737 +1,738 @@ -import json -from pathlib import Path - -import dbt.version -import pytest -from dbt.contracts.graph.manifest import Manifest -from packaging.version import Version - -from dbt_osmosis.core.column_level_knowledge_propagator import ( - ColumnLevelKnowledgePropagator, - _build_node_ancestor_tree, - _inherit_column_level_knowledge, -) - -dbt_version = Version(dbt.version.get_installed_version().to_version_string(skip_matcher=True)) - - -def load_manifest() -> Manifest: - manifest_path = Path(__file__).parent.parent / "demo_duckdb/target/manifest.json" - with manifest_path.open("r") as f: - manifest_text = f.read() - manifest_dict = json.loads(manifest_text) - return Manifest.from_dict(manifest_dict) - - -def test_build_node_ancestor_tree(): - manifest = load_manifest() - target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] - expect = { - "generation_0": [ - "model.jaffle_shop_duckdb.stg_customers", - "model.jaffle_shop_duckdb.stg_orders", - "model.jaffle_shop_duckdb.stg_payments", - ], - "generation_1": [ - "seed.jaffle_shop_duckdb.raw_customers", - "seed.jaffle_shop_duckdb.raw_orders", - "seed.jaffle_shop_duckdb.raw_payments", - ], - } - assert _build_node_ancestor_tree(manifest, target_node) == expect - - -def test_inherit_column_level_knowledge(): - manifest = load_manifest() - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ - "customer_id" - ].description = "THIS COLUMN IS UPDATED FOR TESTING" - manifest.nodes["seed.jaffle_shop_duckdb.raw_orders"].columns[ - "status" - ].description = "THIS COLUMN IS UPDATED FOR TESTING" - - expect = { - "customer_id": { - "progenitor": "model.jaffle_shop_duckdb.stg_customers", - "generation": "generation_0", - "name": "customer_id", - "description": "THIS COLUMN IS UPDATED FOR TESTING", - "data_type": "INTEGER", - "constraints": [], - "quote": None, - }, - "first_name": { - "progenitor": "model.jaffle_shop_duckdb.stg_customers", - "generation": "generation_0", - "name": "first_name", - "data_type": "VARCHAR", - "constraints": [], - "quote": None, - }, - "last_name": { - "progenitor": "model.jaffle_shop_duckdb.stg_customers", - "generation": "generation_0", - "name": "last_name", - "data_type": "VARCHAR", - "constraints": [], - "quote": None, - }, - "rank": { - "progenitor": "model.jaffle_shop_duckdb.stg_customers", - "generation": "generation_0", - "name": "rank", - "data_type": "VARCHAR", - "constraints": [], - "quote": None, - }, - "order_id": { - "progenitor": "model.jaffle_shop_duckdb.stg_orders", - "generation": "generation_0", - "name": "order_id", - "data_type": "INTEGER", - "constraints": [], - "quote": None, - }, - "order_date": { - "progenitor": "model.jaffle_shop_duckdb.stg_orders", - "generation": "generation_0", - "name": "order_date", - "data_type": "DATE", - "constraints": [], - "quote": None, - }, - "status": { - "progenitor": "seed.jaffle_shop_duckdb.raw_orders", - "generation": "generation_1", - "name": "status", - "description": "THIS COLUMN IS UPDATED FOR TESTING", - "data_type": "VARCHAR", - "constraints": [], - "quote": None, - }, - "payment_id": { - "progenitor": "model.jaffle_shop_duckdb.stg_payments", - "generation": "generation_0", - "name": "payment_id", - "data_type": "INTEGER", - "constraints": [], - "quote": None, - }, - "payment_method": { - "progenitor": "model.jaffle_shop_duckdb.stg_payments", - "generation": "generation_0", - "name": "payment_method", - "data_type": "VARCHAR", - "constraints": [], - "quote": None, - }, - "amount": { - "progenitor": "model.jaffle_shop_duckdb.stg_payments", - "generation": "generation_0", - "name": "amount", - "data_type": "DOUBLE", - "constraints": [], - "quote": None, - }, - } - if dbt_version >= Version("1.9.0"): - for key in expect.keys(): - expect[key]["granularity"] = None - - target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] - family_tree = _build_node_ancestor_tree(manifest, target_node) - placeholders = [""] - assert _inherit_column_level_knowledge(manifest, family_tree, placeholders) == expect - - -def test_update_undocumented_columns_with_prior_knowledge(): - manifest = load_manifest() - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ - "customer_id" - ].description = "THIS COLUMN IS UPDATED FOR TESTING" - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].meta = { - "my_key": "my_value" - } - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].tags = [ - "my_tag1", - "my_tag2", - ] - - target_node_name = "model.jaffle_shop_duckdb.customers" - manifest.nodes[target_node_name].columns["customer_id"].tags = set( - [ - "my_tag3", - "my_tag4", - ] - ) - manifest.nodes[target_node_name].columns["customer_id"].meta = { - "my_key": "my_old_value", - "my_new_key": "my_new_value", - } - target_node = manifest.nodes[target_node_name] - knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( - manifest, target_node, placeholders=[""] - ) - yaml_file_model_section = { - "columns": [ - { - "name": "customer_id", - } - ] - } - undocumented_columns = target_node.columns.keys() - ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( - undocumented_columns, - target_node, - yaml_file_model_section, - knowledge, - skip_add_tags=False, - skip_merge_meta=False, - add_progenitor_to_meta=False, - ) - - assert yaml_file_model_section["columns"][0]["name"] == "customer_id" - assert ( - yaml_file_model_section["columns"][0]["description"] == "THIS COLUMN IS UPDATED FOR TESTING" - ) - assert yaml_file_model_section["columns"][0]["meta"] == { - "my_key": "my_value", - "my_new_key": "my_new_value", - } - assert set(yaml_file_model_section["columns"][0]["tags"]) == set( - ["my_tag1", "my_tag2", "my_tag3", "my_tag4"] - ) - - assert target_node.columns["customer_id"].description == "THIS COLUMN IS UPDATED FOR TESTING" - assert target_node.columns["customer_id"].meta == { - "my_key": "my_value", - "my_new_key": "my_new_value", - } - assert set(target_node.columns["customer_id"].tags) == set( - ["my_tag1", "my_tag2", "my_tag3", "my_tag4"] - ) - - -def test_update_undocumented_columns_with_prior_knowledge_skip_add_tags(): - manifest = load_manifest() - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ - "customer_id" - ].description = "THIS COLUMN IS UPDATED FOR TESTING" - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].meta = { - "my_key": "my_value" - } - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].tags = [ - "my_tag1", - "my_tag2", - ] - - target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] - knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( - manifest, target_node, placeholders=[""] - ) - yaml_file_model_section = { - "columns": [ - { - "name": "customer_id", - } - ] - } - undocumented_columns = target_node.columns.keys() - ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( - undocumented_columns, - target_node, - yaml_file_model_section, - knowledge, - skip_add_tags=True, - skip_merge_meta=False, - add_progenitor_to_meta=False, - ) - - assert yaml_file_model_section["columns"][0]["name"] == "customer_id" - assert ( - yaml_file_model_section["columns"][0]["description"] == "THIS COLUMN IS UPDATED FOR TESTING" - ) - assert yaml_file_model_section["columns"][0]["meta"] == {"my_key": "my_value"} - assert "tags" not in yaml_file_model_section["columns"][0] - - assert target_node.columns["customer_id"].description == "THIS COLUMN IS UPDATED FOR TESTING" - assert target_node.columns["customer_id"].meta == {"my_key": "my_value"} - assert set(target_node.columns["customer_id"].tags) == set([]) - - -def test_update_undocumented_columns_with_prior_knowledge_skip_merge_meta(): - manifest = load_manifest() - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ - "customer_id" - ].description = "THIS COLUMN IS UPDATED FOR TESTING" - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].meta = { - "my_key": "my_value" - } - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].tags = [ - "my_tag1", - "my_tag2", - ] - - target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] - knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( - manifest, target_node, placeholders=[""] - ) - yaml_file_model_section = { - "columns": [ - { - "name": "customer_id", - } - ] - } - undocumented_columns = target_node.columns.keys() - ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( - undocumented_columns, - target_node, - yaml_file_model_section, - knowledge, - skip_add_tags=False, - skip_merge_meta=True, - add_progenitor_to_meta=False, - ) - - assert yaml_file_model_section["columns"][0]["name"] == "customer_id" - assert ( - yaml_file_model_section["columns"][0]["description"] == "THIS COLUMN IS UPDATED FOR TESTING" - ) - assert "meta" not in yaml_file_model_section["columns"][0] - assert set(yaml_file_model_section["columns"][0]["tags"]) == set(["my_tag1", "my_tag2"]) - - assert target_node.columns["customer_id"].description == "THIS COLUMN IS UPDATED FOR TESTING" - assert target_node.columns["customer_id"].meta == {} - assert set(target_node.columns["customer_id"].tags) == set(["my_tag1", "my_tag2"]) - - -def test_update_undocumented_columns_with_prior_knowledge_add_progenitor_to_meta(): - manifest = load_manifest() - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ - "customer_id" - ].description = "THIS COLUMN IS UPDATED FOR TESTING" - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].meta = { - "my_key": "my_value" - } - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].tags = [ - "my_tag1", - "my_tag2", - ] - - target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] - knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( - manifest, target_node, placeholders=[""] - ) - yaml_file_model_section = { - "columns": [ - { - "name": "customer_id", - } - ] - } - undocumented_columns = target_node.columns.keys() - ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( - undocumented_columns, - target_node, - yaml_file_model_section, - knowledge, - skip_add_tags=False, - skip_merge_meta=False, - add_progenitor_to_meta=True, - ) - - assert yaml_file_model_section["columns"][0]["name"] == "customer_id" - assert ( - yaml_file_model_section["columns"][0]["description"] == "THIS COLUMN IS UPDATED FOR TESTING" - ) - assert yaml_file_model_section["columns"][0]["meta"] == { - "my_key": "my_value", - "osmosis_progenitor": "model.jaffle_shop_duckdb.stg_customers", - } - assert set(yaml_file_model_section["columns"][0]["tags"]) == set(["my_tag1", "my_tag2"]) - - assert target_node.columns["customer_id"].description == "THIS COLUMN IS UPDATED FOR TESTING" - assert target_node.columns["customer_id"].meta == { - "my_key": "my_value", - "osmosis_progenitor": "model.jaffle_shop_duckdb.stg_customers", - } - assert set(target_node.columns["customer_id"].tags) == set(["my_tag1", "my_tag2"]) - - -def test_update_undocumented_columns_with_prior_knowledge_with_osmosis_keep_description(): - manifest = load_manifest() - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ - "customer_id" - ].description = "THIS COLUMN IS UPDATED FOR TESTING" - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].meta = { - "my_key": "my_value", - } - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].tags = [ - "my_tag1", - "my_tag2", - ] - - column_description_not_updated = ( - "This column will not be updated as it has the 'osmosis_keep_description' attribute" - ) - target_node_name = "model.jaffle_shop_duckdb.customers" - - manifest.nodes[target_node_name].columns[ - "customer_id" - ].description = column_description_not_updated - manifest.nodes[target_node_name].columns["customer_id"].tags = set( - [ - "my_tag3", - "my_tag4", - ] - ) - manifest.nodes[target_node_name].columns["customer_id"].meta = { - "my_key": "my_value", - "osmosis_keep_description": True, - } - - target_node = manifest.nodes[target_node_name] - knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( - manifest, target_node, placeholders=[""] - ) - yaml_file_model_section = { - "columns": [ - { - "name": "customer_id", - } - ] - } - undocumented_columns = target_node.columns.keys() - ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( - undocumented_columns, - target_node, - yaml_file_model_section, - knowledge, - skip_add_tags=True, - skip_merge_meta=True, - add_progenitor_to_meta=False, - ) - - assert yaml_file_model_section["columns"][0]["name"] == "customer_id" - assert yaml_file_model_section["columns"][0]["description"] == column_description_not_updated - assert yaml_file_model_section["columns"][0]["meta"] == { - "my_key": "my_value", - "osmosis_keep_description": True, - } - assert set(yaml_file_model_section["columns"][0]["tags"]) == set(["my_tag3", "my_tag4"]) - - assert target_node.columns["customer_id"].description == column_description_not_updated - assert target_node.columns["customer_id"].meta == { - "my_key": "my_value", - "osmosis_keep_description": True, - } - assert set(target_node.columns["customer_id"].tags) == set(["my_tag3", "my_tag4"]) - - -def test_update_undocumented_columns_with_prior_knowledge_add_progenitor_to_meta_and_osmosis_keep_description(): - manifest = load_manifest() - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ - "customer_id" - ].description = "THIS COLUMN IS UPDATED FOR TESTING" - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].meta = { - "my_key": "my_value", - } - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].tags = [ - "my_tag1", - "my_tag2", - ] - - column_description_not_updated = ( - "This column will not be updated as it has the 'osmosis_keep_description' attribute" - ) - target_node_name = "model.jaffle_shop_duckdb.customers" - - manifest.nodes[target_node_name].columns[ - "customer_id" - ].description = column_description_not_updated - manifest.nodes[target_node_name].columns["customer_id"].meta = { - "my_key": "my_value", - "osmosis_keep_description": True, - } - - target_node = manifest.nodes[target_node_name] - knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( - manifest, target_node, placeholders=[""] - ) - yaml_file_model_section = { - "columns": [ - { - "name": "customer_id", - } - ] - } - undocumented_columns = target_node.columns.keys() - ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( - undocumented_columns, - target_node, - yaml_file_model_section, - knowledge, - skip_add_tags=False, - skip_merge_meta=False, - add_progenitor_to_meta=True, - ) - - assert yaml_file_model_section["columns"][0]["name"] == "customer_id" - assert yaml_file_model_section["columns"][0]["description"] == column_description_not_updated - assert yaml_file_model_section["columns"][0]["meta"] == { - "my_key": "my_value", - "osmosis_keep_description": True, - "osmosis_progenitor": "model.jaffle_shop_duckdb.stg_customers", - } - assert set(yaml_file_model_section["columns"][0]["tags"]) == set(["my_tag1", "my_tag2"]) - - assert target_node.columns["customer_id"].description == column_description_not_updated - assert target_node.columns["customer_id"].meta == { - "my_key": "my_value", - "osmosis_keep_description": True, - "osmosis_progenitor": "model.jaffle_shop_duckdb.stg_customers", - } - assert set(target_node.columns["customer_id"].tags) == set(["my_tag1", "my_tag2"]) - - -def test_update_undocumented_columns_with_prior_knowledge_with_add_inheritance_for_specified_keys(): - manifest = load_manifest() - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ - "customer_id" - ].description = "THIS COLUMN IS UPDATED FOR TESTING" - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].meta = { - "my_key": "my_value" - } - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].tags = [ - "my_tag1", - "my_tag2", - ] - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"]._extra = { - "policy_tags": ["my_policy_tag1"], - } - - target_node_name = "model.jaffle_shop_duckdb.customers" - manifest.nodes[target_node_name].columns["customer_id"].tags = set( - [ - "my_tag3", - "my_tag4", - ] - ) - manifest.nodes[target_node_name].columns["customer_id"].meta = { - "my_key": "my_old_value", - "my_new_key": "my_new_value", - } - target_node = manifest.nodes[target_node_name] - knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( - manifest, target_node, placeholders=[""] - ) - yaml_file_model_section = { - "columns": [ - { - "name": "customer_id", - } - ] - } - undocumented_columns = target_node.columns.keys() - ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( - undocumented_columns, - target_node, - yaml_file_model_section, - knowledge, - skip_add_tags=False, - skip_merge_meta=False, - add_progenitor_to_meta=False, - add_inheritance_for_specified_keys=["policy_tags"], - ) - - assert yaml_file_model_section["columns"][0]["name"] == "customer_id" - assert ( - yaml_file_model_section["columns"][0]["description"] == "THIS COLUMN IS UPDATED FOR TESTING" - ) - assert yaml_file_model_section["columns"][0]["meta"] == { - "my_key": "my_value", - "my_new_key": "my_new_value", - } - assert set(yaml_file_model_section["columns"][0]["tags"]) == set( - ["my_tag1", "my_tag2", "my_tag3", "my_tag4"] - ) - assert set(yaml_file_model_section["columns"][0]["policy_tags"]) == set(["my_policy_tag1"]) - - assert target_node.columns["customer_id"].description == "THIS COLUMN IS UPDATED FOR TESTING" - assert target_node.columns["customer_id"].meta == { - "my_key": "my_value", - "my_new_key": "my_new_value", - } - assert set(target_node.columns["customer_id"].tags) == set( - ["my_tag1", "my_tag2", "my_tag3", "my_tag4"] - ) - assert set(target_node.columns["customer_id"]._extra["policy_tags"]) == set(["my_policy_tag1"]) - - -def test_update_undocumented_columns_with_osmosis_prefix_meta_with_prior_knowledge(): - manifest = load_manifest() - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ - "rank" - ].description = "THIS COLUMN IS UPDATED FOR TESTING" - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["rank"].meta = { - "my_key": "my_value", - } - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["rank"].tags = [ - "my_tag1", - "my_tag2", - ] - - target_node_name = "model.jaffle_shop_duckdb.customers" - manifest.nodes[target_node_name].columns["customer_rank"].tags = set( - [ - "my_tag3", - "my_tag4", - ] - ) - manifest.nodes[target_node_name].columns["customer_rank"].meta = { - "my_key": "my_old_value", - "my_new_key": "my_new_value", - "osmosis_prefix": "customer_", - } - target_node = manifest.nodes[target_node_name] - knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( - manifest, target_node, placeholders=[""] - ) - yaml_file_model_section = { - "columns": [ - { - "name": "customer_rank", - } - ] - } - undocumented_columns = target_node.columns.keys() - ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( - undocumented_columns, - target_node, - yaml_file_model_section, - knowledge, - skip_add_tags=False, - skip_merge_meta=False, - add_progenitor_to_meta=False, - ) - - assert yaml_file_model_section["columns"][0]["name"] == "customer_rank" - assert ( - yaml_file_model_section["columns"][0]["description"] == "THIS COLUMN IS UPDATED FOR TESTING" - ) - assert yaml_file_model_section["columns"][0]["meta"] == { - "my_key": "my_value", - "my_new_key": "my_new_value", - "osmosis_prefix": "customer_", - } - assert set(yaml_file_model_section["columns"][0]["tags"]) == set( - ["my_tag1", "my_tag2", "my_tag3", "my_tag4"] - ) - - assert target_node.columns["customer_rank"].description == "THIS COLUMN IS UPDATED FOR TESTING" - assert target_node.columns["customer_rank"].meta == { - "my_key": "my_value", - "my_new_key": "my_new_value", - "osmosis_prefix": "customer_", - } - assert set(target_node.columns["customer_rank"].tags) == set( - ["my_tag1", "my_tag2", "my_tag3", "my_tag4"] - ) - - -def test_update_undocumented_columns_with_osmosis_prefix_meta_with_prior_knowledge_with_osmosis_keep_description(): - manifest = load_manifest() - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ - "rank" - ].description = "THIS COLUMN IS UPDATED FOR TESTING" - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["rank"].meta = { - "my_key": "my_value", - } - manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["rank"].tags = [ - "my_tag1", - "my_tag2", - ] - - column_description_not_updated = ( - "This column will not be updated as it has the 'osmosis_keep_description' attribute" - ) - target_node_name = "model.jaffle_shop_duckdb.customers" - - manifest.nodes[target_node_name].columns[ - "customer_rank" - ].description = column_description_not_updated - manifest.nodes[target_node_name].columns["customer_rank"].tags = set( - [ - "my_tag3", - "my_tag4", - ] - ) - manifest.nodes[target_node_name].columns["customer_rank"].meta = { - "my_key": "my_value", - "osmosis_prefix": "customer_", - "osmosis_keep_description": True, - } - - target_node = manifest.nodes[target_node_name] - knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( - manifest, target_node, placeholders=[""] - ) - yaml_file_model_section = { - "columns": [ - { - "name": "customer_rank", - } - ] - } - undocumented_columns = target_node.columns.keys() - ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( - undocumented_columns, - target_node, - yaml_file_model_section, - knowledge, - skip_add_tags=True, - skip_merge_meta=True, - add_progenitor_to_meta=False, - ) - - assert yaml_file_model_section["columns"][0]["name"] == "customer_rank" - assert yaml_file_model_section["columns"][0]["description"] == column_description_not_updated - assert yaml_file_model_section["columns"][0]["meta"] == { - "my_key": "my_value", - "osmosis_keep_description": True, - "osmosis_prefix": "customer_", - } - assert set(yaml_file_model_section["columns"][0]["tags"]) == set(["my_tag3", "my_tag4"]) - - assert target_node.columns["customer_rank"].description == column_description_not_updated - assert target_node.columns["customer_rank"].meta == { - "my_key": "my_value", - "osmosis_keep_description": True, - "osmosis_prefix": "customer_", - } - assert set(target_node.columns["customer_rank"].tags) == set(["my_tag3", "my_tag4"]) - - -@pytest.mark.parametrize("use_unrendered_descriptions", [True, False]) -def test_use_unrendered_descriptions(use_unrendered_descriptions): - manifest = load_manifest() - # changing directory, assuming that I need to carry profile_dir through as this doesn't work outside of the dbt project - project_dir = Path(__file__).parent.parent / "demo_duckdb" - target_node = manifest.nodes["model.jaffle_shop_duckdb.orders"] - placeholders = [""] - family_tree = _build_node_ancestor_tree(manifest, target_node) - knowledge = _inherit_column_level_knowledge( - manifest, - family_tree, - placeholders, - project_dir, - use_unrendered_descriptions=use_unrendered_descriptions, - ) - if use_unrendered_descriptions: - expected = '{{ doc("orders_status") }}' - else: - expected = "Orders can be one of the following statuses:" - assert knowledge["status"]["description"].startswith( - expected - ) # starts with so I don't have to worry about linux/windows line endings +# TODO: refactor this test +# import json +# from pathlib import Path +# +# import dbt.version +# import pytest +# from dbt.contracts.graph.manifest import Manifest +# from packaging.version import Version +# +# from dbt_osmosis.core.column_level_knowledge_propagator import ( +# ColumnLevelKnowledgePropagator, +# _build_node_ancestor_tree, +# _inherit_column_level_knowledge, +# ) +# +# dbt_version = Version(dbt.version.get_installed_version().to_version_string(skip_matcher=True)) +# +# +# def load_manifest() -> Manifest: +# manifest_path = Path(__file__).parent.parent / "demo_duckdb/target/manifest.json" +# with manifest_path.open("r") as f: +# manifest_text = f.read() +# manifest_dict = json.loads(manifest_text) +# return Manifest.from_dict(manifest_dict) +# +# +# def test_build_node_ancestor_tree(): +# manifest = load_manifest() +# target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] +# expect = { +# "generation_0": [ +# "model.jaffle_shop_duckdb.stg_customers", +# "model.jaffle_shop_duckdb.stg_orders", +# "model.jaffle_shop_duckdb.stg_payments", +# ], +# "generation_1": [ +# "seed.jaffle_shop_duckdb.raw_customers", +# "seed.jaffle_shop_duckdb.raw_orders", +# "seed.jaffle_shop_duckdb.raw_payments", +# ], +# } +# assert _build_node_ancestor_tree(manifest, target_node) == expect +# +# +# def test_inherit_column_level_knowledge(): +# manifest = load_manifest() +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ +# "customer_id" +# ].description = "THIS COLUMN IS UPDATED FOR TESTING" +# manifest.nodes["seed.jaffle_shop_duckdb.raw_orders"].columns[ +# "status" +# ].description = "THIS COLUMN IS UPDATED FOR TESTING" +# +# expect = { +# "customer_id": { +# "progenitor": "model.jaffle_shop_duckdb.stg_customers", +# "generation": "generation_0", +# "name": "customer_id", +# "description": "THIS COLUMN IS UPDATED FOR TESTING", +# "data_type": "INTEGER", +# "constraints": [], +# "quote": None, +# }, +# "first_name": { +# "progenitor": "model.jaffle_shop_duckdb.stg_customers", +# "generation": "generation_0", +# "name": "first_name", +# "data_type": "VARCHAR", +# "constraints": [], +# "quote": None, +# }, +# "last_name": { +# "progenitor": "model.jaffle_shop_duckdb.stg_customers", +# "generation": "generation_0", +# "name": "last_name", +# "data_type": "VARCHAR", +# "constraints": [], +# "quote": None, +# }, +# "rank": { +# "progenitor": "model.jaffle_shop_duckdb.stg_customers", +# "generation": "generation_0", +# "name": "rank", +# "data_type": "VARCHAR", +# "constraints": [], +# "quote": None, +# }, +# "order_id": { +# "progenitor": "model.jaffle_shop_duckdb.stg_orders", +# "generation": "generation_0", +# "name": "order_id", +# "data_type": "INTEGER", +# "constraints": [], +# "quote": None, +# }, +# "order_date": { +# "progenitor": "model.jaffle_shop_duckdb.stg_orders", +# "generation": "generation_0", +# "name": "order_date", +# "data_type": "DATE", +# "constraints": [], +# "quote": None, +# }, +# "status": { +# "progenitor": "seed.jaffle_shop_duckdb.raw_orders", +# "generation": "generation_1", +# "name": "status", +# "description": "THIS COLUMN IS UPDATED FOR TESTING", +# "data_type": "VARCHAR", +# "constraints": [], +# "quote": None, +# }, +# "payment_id": { +# "progenitor": "model.jaffle_shop_duckdb.stg_payments", +# "generation": "generation_0", +# "name": "payment_id", +# "data_type": "INTEGER", +# "constraints": [], +# "quote": None, +# }, +# "payment_method": { +# "progenitor": "model.jaffle_shop_duckdb.stg_payments", +# "generation": "generation_0", +# "name": "payment_method", +# "data_type": "VARCHAR", +# "constraints": [], +# "quote": None, +# }, +# "amount": { +# "progenitor": "model.jaffle_shop_duckdb.stg_payments", +# "generation": "generation_0", +# "name": "amount", +# "data_type": "DOUBLE", +# "constraints": [], +# "quote": None, +# }, +# } +# if dbt_version >= Version("1.9.0"): +# for key in expect.keys(): +# expect[key]["granularity"] = None +# +# target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] +# family_tree = _build_node_ancestor_tree(manifest, target_node) +# placeholders = [""] +# assert _inherit_column_level_knowledge(manifest, family_tree, placeholders) == expect +# +# +# def test_update_undocumented_columns_with_prior_knowledge(): +# manifest = load_manifest() +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ +# "customer_id" +# ].description = "THIS COLUMN IS UPDATED FOR TESTING" +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].meta = { +# "my_key": "my_value" +# } +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].tags = [ +# "my_tag1", +# "my_tag2", +# ] +# +# target_node_name = "model.jaffle_shop_duckdb.customers" +# manifest.nodes[target_node_name].columns["customer_id"].tags = set( +# [ +# "my_tag3", +# "my_tag4", +# ] +# ) +# manifest.nodes[target_node_name].columns["customer_id"].meta = { +# "my_key": "my_old_value", +# "my_new_key": "my_new_value", +# } +# target_node = manifest.nodes[target_node_name] +# knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( +# manifest, target_node, placeholders=[""] +# ) +# yaml_file_model_section = { +# "columns": [ +# { +# "name": "customer_id", +# } +# ] +# } +# undocumented_columns = target_node.columns.keys() +# ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( +# undocumented_columns, +# target_node, +# yaml_file_model_section, +# knowledge, +# skip_add_tags=False, +# skip_merge_meta=False, +# add_progenitor_to_meta=False, +# ) +# +# assert yaml_file_model_section["columns"][0]["name"] == "customer_id" +# assert ( +# yaml_file_model_section["columns"][0]["description"] == "THIS COLUMN IS UPDATED FOR TESTING" +# ) +# assert yaml_file_model_section["columns"][0]["meta"] == { +# "my_key": "my_value", +# "my_new_key": "my_new_value", +# } +# assert set(yaml_file_model_section["columns"][0]["tags"]) == set( +# ["my_tag1", "my_tag2", "my_tag3", "my_tag4"] +# ) +# +# assert target_node.columns["customer_id"].description == "THIS COLUMN IS UPDATED FOR TESTING" +# assert target_node.columns["customer_id"].meta == { +# "my_key": "my_value", +# "my_new_key": "my_new_value", +# } +# assert set(target_node.columns["customer_id"].tags) == set( +# ["my_tag1", "my_tag2", "my_tag3", "my_tag4"] +# ) +# +# +# def test_update_undocumented_columns_with_prior_knowledge_skip_add_tags(): +# manifest = load_manifest() +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ +# "customer_id" +# ].description = "THIS COLUMN IS UPDATED FOR TESTING" +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].meta = { +# "my_key": "my_value" +# } +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].tags = [ +# "my_tag1", +# "my_tag2", +# ] +# +# target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] +# knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( +# manifest, target_node, placeholders=[""] +# ) +# yaml_file_model_section = { +# "columns": [ +# { +# "name": "customer_id", +# } +# ] +# } +# undocumented_columns = target_node.columns.keys() +# ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( +# undocumented_columns, +# target_node, +# yaml_file_model_section, +# knowledge, +# skip_add_tags=True, +# skip_merge_meta=False, +# add_progenitor_to_meta=False, +# ) +# +# assert yaml_file_model_section["columns"][0]["name"] == "customer_id" +# assert ( +# yaml_file_model_section["columns"][0]["description"] == "THIS COLUMN IS UPDATED FOR TESTING" +# ) +# assert yaml_file_model_section["columns"][0]["meta"] == {"my_key": "my_value"} +# assert "tags" not in yaml_file_model_section["columns"][0] +# +# assert target_node.columns["customer_id"].description == "THIS COLUMN IS UPDATED FOR TESTING" +# assert target_node.columns["customer_id"].meta == {"my_key": "my_value"} +# assert set(target_node.columns["customer_id"].tags) == set([]) +# +# +# def test_update_undocumented_columns_with_prior_knowledge_skip_merge_meta(): +# manifest = load_manifest() +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ +# "customer_id" +# ].description = "THIS COLUMN IS UPDATED FOR TESTING" +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].meta = { +# "my_key": "my_value" +# } +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].tags = [ +# "my_tag1", +# "my_tag2", +# ] +# +# target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] +# knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( +# manifest, target_node, placeholders=[""] +# ) +# yaml_file_model_section = { +# "columns": [ +# { +# "name": "customer_id", +# } +# ] +# } +# undocumented_columns = target_node.columns.keys() +# ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( +# undocumented_columns, +# target_node, +# yaml_file_model_section, +# knowledge, +# skip_add_tags=False, +# skip_merge_meta=True, +# add_progenitor_to_meta=False, +# ) +# +# assert yaml_file_model_section["columns"][0]["name"] == "customer_id" +# assert ( +# yaml_file_model_section["columns"][0]["description"] == "THIS COLUMN IS UPDATED FOR TESTING" +# ) +# assert "meta" not in yaml_file_model_section["columns"][0] +# assert set(yaml_file_model_section["columns"][0]["tags"]) == set(["my_tag1", "my_tag2"]) +# +# assert target_node.columns["customer_id"].description == "THIS COLUMN IS UPDATED FOR TESTING" +# assert target_node.columns["customer_id"].meta == {} +# assert set(target_node.columns["customer_id"].tags) == set(["my_tag1", "my_tag2"]) +# +# +# def test_update_undocumented_columns_with_prior_knowledge_add_progenitor_to_meta(): +# manifest = load_manifest() +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ +# "customer_id" +# ].description = "THIS COLUMN IS UPDATED FOR TESTING" +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].meta = { +# "my_key": "my_value" +# } +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].tags = [ +# "my_tag1", +# "my_tag2", +# ] +# +# target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] +# knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( +# manifest, target_node, placeholders=[""] +# ) +# yaml_file_model_section = { +# "columns": [ +# { +# "name": "customer_id", +# } +# ] +# } +# undocumented_columns = target_node.columns.keys() +# ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( +# undocumented_columns, +# target_node, +# yaml_file_model_section, +# knowledge, +# skip_add_tags=False, +# skip_merge_meta=False, +# add_progenitor_to_meta=True, +# ) +# +# assert yaml_file_model_section["columns"][0]["name"] == "customer_id" +# assert ( +# yaml_file_model_section["columns"][0]["description"] == "THIS COLUMN IS UPDATED FOR TESTING" +# ) +# assert yaml_file_model_section["columns"][0]["meta"] == { +# "my_key": "my_value", +# "osmosis_progenitor": "model.jaffle_shop_duckdb.stg_customers", +# } +# assert set(yaml_file_model_section["columns"][0]["tags"]) == set(["my_tag1", "my_tag2"]) +# +# assert target_node.columns["customer_id"].description == "THIS COLUMN IS UPDATED FOR TESTING" +# assert target_node.columns["customer_id"].meta == { +# "my_key": "my_value", +# "osmosis_progenitor": "model.jaffle_shop_duckdb.stg_customers", +# } +# assert set(target_node.columns["customer_id"].tags) == set(["my_tag1", "my_tag2"]) +# +# +# def test_update_undocumented_columns_with_prior_knowledge_with_osmosis_keep_description(): +# manifest = load_manifest() +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ +# "customer_id" +# ].description = "THIS COLUMN IS UPDATED FOR TESTING" +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].meta = { +# "my_key": "my_value", +# } +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].tags = [ +# "my_tag1", +# "my_tag2", +# ] +# +# column_description_not_updated = ( +# "This column will not be updated as it has the 'osmosis_keep_description' attribute" +# ) +# target_node_name = "model.jaffle_shop_duckdb.customers" +# +# manifest.nodes[target_node_name].columns[ +# "customer_id" +# ].description = column_description_not_updated +# manifest.nodes[target_node_name].columns["customer_id"].tags = set( +# [ +# "my_tag3", +# "my_tag4", +# ] +# ) +# manifest.nodes[target_node_name].columns["customer_id"].meta = { +# "my_key": "my_value", +# "osmosis_keep_description": True, +# } +# +# target_node = manifest.nodes[target_node_name] +# knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( +# manifest, target_node, placeholders=[""] +# ) +# yaml_file_model_section = { +# "columns": [ +# { +# "name": "customer_id", +# } +# ] +# } +# undocumented_columns = target_node.columns.keys() +# ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( +# undocumented_columns, +# target_node, +# yaml_file_model_section, +# knowledge, +# skip_add_tags=True, +# skip_merge_meta=True, +# add_progenitor_to_meta=False, +# ) +# +# assert yaml_file_model_section["columns"][0]["name"] == "customer_id" +# assert yaml_file_model_section["columns"][0]["description"] == column_description_not_updated +# assert yaml_file_model_section["columns"][0]["meta"] == { +# "my_key": "my_value", +# "osmosis_keep_description": True, +# } +# assert set(yaml_file_model_section["columns"][0]["tags"]) == set(["my_tag3", "my_tag4"]) +# +# assert target_node.columns["customer_id"].description == column_description_not_updated +# assert target_node.columns["customer_id"].meta == { +# "my_key": "my_value", +# "osmosis_keep_description": True, +# } +# assert set(target_node.columns["customer_id"].tags) == set(["my_tag3", "my_tag4"]) +# +# +# def test_update_undocumented_columns_with_prior_knowledge_add_progenitor_to_meta_and_osmosis_keep_description(): +# manifest = load_manifest() +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ +# "customer_id" +# ].description = "THIS COLUMN IS UPDATED FOR TESTING" +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].meta = { +# "my_key": "my_value", +# } +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].tags = [ +# "my_tag1", +# "my_tag2", +# ] +# +# column_description_not_updated = ( +# "This column will not be updated as it has the 'osmosis_keep_description' attribute" +# ) +# target_node_name = "model.jaffle_shop_duckdb.customers" +# +# manifest.nodes[target_node_name].columns[ +# "customer_id" +# ].description = column_description_not_updated +# manifest.nodes[target_node_name].columns["customer_id"].meta = { +# "my_key": "my_value", +# "osmosis_keep_description": True, +# } +# +# target_node = manifest.nodes[target_node_name] +# knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( +# manifest, target_node, placeholders=[""] +# ) +# yaml_file_model_section = { +# "columns": [ +# { +# "name": "customer_id", +# } +# ] +# } +# undocumented_columns = target_node.columns.keys() +# ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( +# undocumented_columns, +# target_node, +# yaml_file_model_section, +# knowledge, +# skip_add_tags=False, +# skip_merge_meta=False, +# add_progenitor_to_meta=True, +# ) +# +# assert yaml_file_model_section["columns"][0]["name"] == "customer_id" +# assert yaml_file_model_section["columns"][0]["description"] == column_description_not_updated +# assert yaml_file_model_section["columns"][0]["meta"] == { +# "my_key": "my_value", +# "osmosis_keep_description": True, +# "osmosis_progenitor": "model.jaffle_shop_duckdb.stg_customers", +# } +# assert set(yaml_file_model_section["columns"][0]["tags"]) == set(["my_tag1", "my_tag2"]) +# +# assert target_node.columns["customer_id"].description == column_description_not_updated +# assert target_node.columns["customer_id"].meta == { +# "my_key": "my_value", +# "osmosis_keep_description": True, +# "osmosis_progenitor": "model.jaffle_shop_duckdb.stg_customers", +# } +# assert set(target_node.columns["customer_id"].tags) == set(["my_tag1", "my_tag2"]) +# +# +# def test_update_undocumented_columns_with_prior_knowledge_with_add_inheritance_for_specified_keys(): +# manifest = load_manifest() +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ +# "customer_id" +# ].description = "THIS COLUMN IS UPDATED FOR TESTING" +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].meta = { +# "my_key": "my_value" +# } +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].tags = [ +# "my_tag1", +# "my_tag2", +# ] +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"]._extra = { +# "policy_tags": ["my_policy_tag1"], +# } +# +# target_node_name = "model.jaffle_shop_duckdb.customers" +# manifest.nodes[target_node_name].columns["customer_id"].tags = set( +# [ +# "my_tag3", +# "my_tag4", +# ] +# ) +# manifest.nodes[target_node_name].columns["customer_id"].meta = { +# "my_key": "my_old_value", +# "my_new_key": "my_new_value", +# } +# target_node = manifest.nodes[target_node_name] +# knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( +# manifest, target_node, placeholders=[""] +# ) +# yaml_file_model_section = { +# "columns": [ +# { +# "name": "customer_id", +# } +# ] +# } +# undocumented_columns = target_node.columns.keys() +# ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( +# undocumented_columns, +# target_node, +# yaml_file_model_section, +# knowledge, +# skip_add_tags=False, +# skip_merge_meta=False, +# add_progenitor_to_meta=False, +# add_inheritance_for_specified_keys=["policy_tags"], +# ) +# +# assert yaml_file_model_section["columns"][0]["name"] == "customer_id" +# assert ( +# yaml_file_model_section["columns"][0]["description"] == "THIS COLUMN IS UPDATED FOR TESTING" +# ) +# assert yaml_file_model_section["columns"][0]["meta"] == { +# "my_key": "my_value", +# "my_new_key": "my_new_value", +# } +# assert set(yaml_file_model_section["columns"][0]["tags"]) == set( +# ["my_tag1", "my_tag2", "my_tag3", "my_tag4"] +# ) +# assert set(yaml_file_model_section["columns"][0]["policy_tags"]) == set(["my_policy_tag1"]) +# +# assert target_node.columns["customer_id"].description == "THIS COLUMN IS UPDATED FOR TESTING" +# assert target_node.columns["customer_id"].meta == { +# "my_key": "my_value", +# "my_new_key": "my_new_value", +# } +# assert set(target_node.columns["customer_id"].tags) == set( +# ["my_tag1", "my_tag2", "my_tag3", "my_tag4"] +# ) +# assert set(target_node.columns["customer_id"]._extra["policy_tags"]) == set(["my_policy_tag1"]) +# +# +# def test_update_undocumented_columns_with_osmosis_prefix_meta_with_prior_knowledge(): +# manifest = load_manifest() +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ +# "rank" +# ].description = "THIS COLUMN IS UPDATED FOR TESTING" +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["rank"].meta = { +# "my_key": "my_value", +# } +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["rank"].tags = [ +# "my_tag1", +# "my_tag2", +# ] +# +# target_node_name = "model.jaffle_shop_duckdb.customers" +# manifest.nodes[target_node_name].columns["customer_rank"].tags = set( +# [ +# "my_tag3", +# "my_tag4", +# ] +# ) +# manifest.nodes[target_node_name].columns["customer_rank"].meta = { +# "my_key": "my_old_value", +# "my_new_key": "my_new_value", +# "osmosis_prefix": "customer_", +# } +# target_node = manifest.nodes[target_node_name] +# knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( +# manifest, target_node, placeholders=[""] +# ) +# yaml_file_model_section = { +# "columns": [ +# { +# "name": "customer_rank", +# } +# ] +# } +# undocumented_columns = target_node.columns.keys() +# ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( +# undocumented_columns, +# target_node, +# yaml_file_model_section, +# knowledge, +# skip_add_tags=False, +# skip_merge_meta=False, +# add_progenitor_to_meta=False, +# ) +# +# assert yaml_file_model_section["columns"][0]["name"] == "customer_rank" +# assert ( +# yaml_file_model_section["columns"][0]["description"] == "THIS COLUMN IS UPDATED FOR TESTING" +# ) +# assert yaml_file_model_section["columns"][0]["meta"] == { +# "my_key": "my_value", +# "my_new_key": "my_new_value", +# "osmosis_prefix": "customer_", +# } +# assert set(yaml_file_model_section["columns"][0]["tags"]) == set( +# ["my_tag1", "my_tag2", "my_tag3", "my_tag4"] +# ) +# +# assert target_node.columns["customer_rank"].description == "THIS COLUMN IS UPDATED FOR TESTING" +# assert target_node.columns["customer_rank"].meta == { +# "my_key": "my_value", +# "my_new_key": "my_new_value", +# "osmosis_prefix": "customer_", +# } +# assert set(target_node.columns["customer_rank"].tags) == set( +# ["my_tag1", "my_tag2", "my_tag3", "my_tag4"] +# ) +# +# +# def test_update_undocumented_columns_with_osmosis_prefix_meta_with_prior_knowledge_with_osmosis_keep_description(): +# manifest = load_manifest() +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ +# "rank" +# ].description = "THIS COLUMN IS UPDATED FOR TESTING" +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["rank"].meta = { +# "my_key": "my_value", +# } +# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["rank"].tags = [ +# "my_tag1", +# "my_tag2", +# ] +# +# column_description_not_updated = ( +# "This column will not be updated as it has the 'osmosis_keep_description' attribute" +# ) +# target_node_name = "model.jaffle_shop_duckdb.customers" +# +# manifest.nodes[target_node_name].columns[ +# "customer_rank" +# ].description = column_description_not_updated +# manifest.nodes[target_node_name].columns["customer_rank"].tags = set( +# [ +# "my_tag3", +# "my_tag4", +# ] +# ) +# manifest.nodes[target_node_name].columns["customer_rank"].meta = { +# "my_key": "my_value", +# "osmosis_prefix": "customer_", +# "osmosis_keep_description": True, +# } +# +# target_node = manifest.nodes[target_node_name] +# knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( +# manifest, target_node, placeholders=[""] +# ) +# yaml_file_model_section = { +# "columns": [ +# { +# "name": "customer_rank", +# } +# ] +# } +# undocumented_columns = target_node.columns.keys() +# ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( +# undocumented_columns, +# target_node, +# yaml_file_model_section, +# knowledge, +# skip_add_tags=True, +# skip_merge_meta=True, +# add_progenitor_to_meta=False, +# ) +# +# assert yaml_file_model_section["columns"][0]["name"] == "customer_rank" +# assert yaml_file_model_section["columns"][0]["description"] == column_description_not_updated +# assert yaml_file_model_section["columns"][0]["meta"] == { +# "my_key": "my_value", +# "osmosis_keep_description": True, +# "osmosis_prefix": "customer_", +# } +# assert set(yaml_file_model_section["columns"][0]["tags"]) == set(["my_tag3", "my_tag4"]) +# +# assert target_node.columns["customer_rank"].description == column_description_not_updated +# assert target_node.columns["customer_rank"].meta == { +# "my_key": "my_value", +# "osmosis_keep_description": True, +# "osmosis_prefix": "customer_", +# } +# assert set(target_node.columns["customer_rank"].tags) == set(["my_tag3", "my_tag4"]) +# +# +# @pytest.mark.parametrize("use_unrendered_descriptions", [True, False]) +# def test_use_unrendered_descriptions(use_unrendered_descriptions): +# manifest = load_manifest() +# # changing directory, assuming that I need to carry profile_dir through as this doesn't work outside of the dbt project +# project_dir = Path(__file__).parent.parent / "demo_duckdb" +# target_node = manifest.nodes["model.jaffle_shop_duckdb.orders"] +# placeholders = [""] +# family_tree = _build_node_ancestor_tree(manifest, target_node) +# knowledge = _inherit_column_level_knowledge( +# manifest, +# family_tree, +# placeholders, +# project_dir, +# use_unrendered_descriptions=use_unrendered_descriptions, +# ) +# if use_unrendered_descriptions: +# expected = '{{ doc("orders_status") }}' +# else: +# expected = "Orders can be one of the following statuses:" +# assert knowledge["status"]["description"].startswith( +# expected +# ) # starts with so I don't have to worry about linux/windows line endings diff --git a/tests/test_yaml_manager.py b/tests/test_yaml_manager.py index b7b74369..024dfd49 100644 --- a/tests/test_yaml_manager.py +++ b/tests/test_yaml_manager.py @@ -1,116 +1,117 @@ -from pathlib import Path - -import pytest -from dbt.contracts.results import CatalogKey - -from dbt_osmosis.core.osmosis import DbtYamlManager - - -@pytest.fixture(scope="module") -def yaml_manager() -> DbtYamlManager: - return DbtYamlManager(project_dir="demo_duckdb", profiles_dir="demo_duckdb", dry_run=True) - - -def test_initialize_adapter(yaml_manager: DbtYamlManager): - yaml_manager.initialize_adapter() - - -def test_list(yaml_manager: DbtYamlManager): - yaml_manager.list() - - -def test_test(yaml_manager: DbtYamlManager): - yaml_manager.test() - - -def test_run(yaml_manager: DbtYamlManager): - yaml_manager.run() - - -def test_build(yaml_manager: DbtYamlManager): - yaml_manager.build() - - -def test_parse_project(yaml_manager: DbtYamlManager): - yaml_manager.parse_project() - - -def test_safe_parse_project(yaml_manager: DbtYamlManager): - yaml_manager.safe_parse_project() - - -def test_bootstrap_sources(yaml_manager: DbtYamlManager): - yaml_manager.bootstrap_sources() - - -def test_draft_project_structure_update_plan(yaml_manager: DbtYamlManager): - yaml_manager.draft_project_structure_update_plan() - - -def test_commit_project_restructure_to_disk(yaml_manager: DbtYamlManager): - yaml_manager.commit_project_restructure_to_disk() - - -def test_propagate_documentation_downstream(yaml_manager: DbtYamlManager): - yaml_manager.propagate_documentation_downstream() - - -def _customer_column_types(yaml_manager: DbtYamlManager) -> dict[str, str]: - node = next(n for n in yaml_manager.manifest.nodes.values() if n.name == "customers") - assert node - - catalog_key = yaml_manager.get_catalog_key(node) - columns = yaml_manager.get_columns_meta(catalog_key) - assert columns - - column_types = dict({name: meta.type for name, meta in columns.items()}) - assert column_types - return column_types - - -def test_get_columns_meta(yaml_manager: DbtYamlManager): - assert _customer_column_types(yaml_manager) == { - # in DuckDB decimals always have presision and scale - "customer_average_value": "DECIMAL(18,3)", - "customer_id": "INTEGER", - "customer_lifetime_value": "DOUBLE", - "first_name": "VARCHAR", - "first_order": "DATE", - "last_name": "VARCHAR", - "most_recent_order": "DATE", - "number_of_orders": "BIGINT", - } - - -def test_get_columns_meta_char_length(): - yaml_manager = DbtYamlManager( - project_dir="demo_duckdb", profiles_dir="demo_duckdb", char_length=True, dry_run=True - ) - assert _customer_column_types(yaml_manager) == { - # in DuckDB decimals always have presision and scale - "customer_average_value": "DECIMAL(18,3)", - "customer_id": "INTEGER", - "customer_lifetime_value": "DOUBLE", - "first_name": "character varying(256)", - "first_order": "DATE", - "last_name": "character varying(256)", - "most_recent_order": "DATE", - "number_of_orders": "BIGINT", - } - - -def test_get_columns_meta_numeric_precision(): - yaml_manager = DbtYamlManager( - project_dir="demo_duckdb", profiles_dir="demo_duckdb", numeric_precision=True, dry_run=True - ) - assert _customer_column_types(yaml_manager) == { - # in DuckDB decimals always have presision and scale - "customer_average_value": "DECIMAL(18,3)", - "customer_id": "INTEGER", - "customer_lifetime_value": "DOUBLE", - "first_name": "VARCHAR", - "first_order": "DATE", - "last_name": "VARCHAR", - "most_recent_order": "DATE", - "number_of_orders": "BIGINT", - } +# TODO: refactor this test +# from pathlib import Path +# +# import pytest +# from dbt.contracts.results import CatalogKey +# +# from dbt_osmosis.core.osmosis import DbtYamlManager +# +# +# @pytest.fixture(scope="module") +# def yaml_manager() -> DbtYamlManager: +# return DbtYamlManager(project_dir="demo_duckdb", profiles_dir="demo_duckdb", dry_run=True) +# +# +# def test_initialize_adapter(yaml_manager: DbtYamlManager): +# yaml_manager.initialize_adapter() +# +# +# def test_list(yaml_manager: DbtYamlManager): +# yaml_manager.list() +# +# +# def test_test(yaml_manager: DbtYamlManager): +# yaml_manager.test() +# +# +# def test_run(yaml_manager: DbtYamlManager): +# yaml_manager.run() +# +# +# def test_build(yaml_manager: DbtYamlManager): +# yaml_manager.build() +# +# +# def test_parse_project(yaml_manager: DbtYamlManager): +# yaml_manager.parse_project() +# +# +# def test_safe_parse_project(yaml_manager: DbtYamlManager): +# yaml_manager.safe_parse_project() +# +# +# def test_bootstrap_sources(yaml_manager: DbtYamlManager): +# yaml_manager.bootstrap_sources() +# +# +# def test_draft_project_structure_update_plan(yaml_manager: DbtYamlManager): +# yaml_manager.draft_project_structure_update_plan() +# +# +# def test_commit_project_restructure_to_disk(yaml_manager: DbtYamlManager): +# yaml_manager.commit_project_restructure_to_disk() +# +# +# def test_propagate_documentation_downstream(yaml_manager: DbtYamlManager): +# yaml_manager.propagate_documentation_downstream() +# +# +# def _customer_column_types(yaml_manager: DbtYamlManager) -> dict[str, str]: +# node = next(n for n in yaml_manager.manifest.nodes.values() if n.name == "customers") +# assert node +# +# catalog_key = yaml_manager.get_catalog_key(node) +# columns = yaml_manager.get_columns_meta(catalog_key) +# assert columns +# +# column_types = dict({name: meta.type for name, meta in columns.items()}) +# assert column_types +# return column_types +# +# +# def test_get_columns_meta(yaml_manager: DbtYamlManager): +# assert _customer_column_types(yaml_manager) == { +# # in DuckDB decimals always have presision and scale +# "customer_average_value": "DECIMAL(18,3)", +# "customer_id": "INTEGER", +# "customer_lifetime_value": "DOUBLE", +# "first_name": "VARCHAR", +# "first_order": "DATE", +# "last_name": "VARCHAR", +# "most_recent_order": "DATE", +# "number_of_orders": "BIGINT", +# } +# +# +# def test_get_columns_meta_char_length(): +# yaml_manager = DbtYamlManager( +# project_dir="demo_duckdb", profiles_dir="demo_duckdb", char_length=True, dry_run=True +# ) +# assert _customer_column_types(yaml_manager) == { +# # in DuckDB decimals always have presision and scale +# "customer_average_value": "DECIMAL(18,3)", +# "customer_id": "INTEGER", +# "customer_lifetime_value": "DOUBLE", +# "first_name": "character varying(256)", +# "first_order": "DATE", +# "last_name": "character varying(256)", +# "most_recent_order": "DATE", +# "number_of_orders": "BIGINT", +# } +# +# +# def test_get_columns_meta_numeric_precision(): +# yaml_manager = DbtYamlManager( +# project_dir="demo_duckdb", profiles_dir="demo_duckdb", numeric_precision=True, dry_run=True +# ) +# assert _customer_column_types(yaml_manager) == { +# # in DuckDB decimals always have presision and scale +# "customer_average_value": "DECIMAL(18,3)", +# "customer_id": "INTEGER", +# "customer_lifetime_value": "DOUBLE", +# "first_name": "VARCHAR", +# "first_order": "DATE", +# "last_name": "VARCHAR", +# "most_recent_order": "DATE", +# "number_of_orders": "BIGINT", +# } From 9d9e31091d8b9b0a7f4ad13ef9236e133fda66b8 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Mon, 30 Dec 2024 20:36:30 -0700 Subject: [PATCH 16/46] chore: remove app py and add noop test --- src/dbt_osmosis/app.py | 491 --------------------------- tests/test_column_level_knowledge.py | 4 + 2 files changed, 4 insertions(+), 491 deletions(-) delete mode 100644 src/dbt_osmosis/app.py diff --git a/src/dbt_osmosis/app.py b/src/dbt_osmosis/app.py deleted file mode 100644 index 0def9392..00000000 --- a/src/dbt_osmosis/app.py +++ /dev/null @@ -1,491 +0,0 @@ -import argparse -import os -import sys -from collections import OrderedDict -from pathlib import Path -from typing import Optional - -import dbt.config.profile as dbt_profile -import feedparser -import pandas as pd -import streamlit as st -import ydata_profiling -from streamlit_ace import THEMES, st_ace - -from dbt_osmosis.vendored.dbt_core_interface import DEFAULT_PROFILES_DIR, DbtProject - -# from streamlit_pandas_profiling import st_profile_report - - -st.set_page_config(page_title="dbt-osmosis Workbench", page_icon="🌊", layout="wide") -state = st.session_state - -try: # hack in arguments for streamlit run - parser = argparse.ArgumentParser(description="dbt osmosis workbench") - parser.add_argument("--profiles-dir", help="dbt profile directory") - parser.add_argument("--project-dir", help="dbt project directory") - args = vars(parser.parse_args(sys.argv[1:])) -except Exception: - args = {} - -root_path = Path(__file__).parent -demo_dir = root_path / "demo" - -# GLOBAL STATE VARS -DBT = "DBT" -"""DbtProject object""" -PROJ_DIR = "PROJ_DIR" -"""dbt project directory""" -PROF_DIR = "PROF_DIR" -"""dbt profile directory""" - -_proj_dir = args.get("project_dir") -state.setdefault(PROJ_DIR, _proj_dir or os.getenv("DBT_PROJECT_DIR", str(Path.cwd()))) -_prof_dir = args.get("profiles_dir") -state.setdefault(PROF_DIR, _prof_dir or os.getenv("DBT_PROFILES_DIR", DEFAULT_PROFILES_DIR)) - - -RAW_PROFILES = "RAW_PROFILES" -"""All profiles as parsed from raw profiles yaml""" -state.setdefault(RAW_PROFILES, dbt_profile.read_profile(state[PROF_DIR] or DEFAULT_PROFILES_DIR)) - -# SQL WORKBENCH VARS -SQL_RESULT = "SQL_RESULT" -"""SQL result as a pandas dataframe""" -SQL_ADAPTER_RESP = "SQL_ADAPTER_RESP" -"""Adapter response from dbt""" -SQL_QUERY_STATE = "SQL_QUERY_STATE" -"""SQL query state tracking if it is successful or failed""" - -state.setdefault(SQL_RESULT, pd.DataFrame()) -state.setdefault(SQL_ADAPTER_RESP, None) -state.setdefault(SQL_QUERY_STATE, "test") - -# PRIMARY SQL CONTAINERS -COMPILED_SQL = "COMPILED_SQL" -"""Compiled sql container""" -state.setdefault(COMPILED_SQL, "") -RAW_SQL = "RAW_SQL" -"""Raw sql container""" - -if "demo" in state[PROJ_DIR]: - state.setdefault( - RAW_SQL, - """ -{% set payment_methods = ['credit_card', 'coupon', 'bank_transfer', 'gift_card'] %} - -with orders as ( - - select * from {{ ref('stg_orders') }} - -), - -payments as ( - - select * from {{ ref('stg_payments') }} - -), - -order_payments as ( - - select - order_id, - - {% for payment_method in payment_methods -%} - sum(case when payment_method = '{{ payment_method }}' then amount else 0 end) as {{ payment_method }}_amount, - {% endfor -%} - - sum(amount) as total_amount - - from payments - - group by order_id - -), - -final as ( - - select - orders.order_id, - orders.customer_id, - orders.order_date, - orders.status, - - {% for payment_method in payment_methods -%} - - order_payments.{{ payment_method }}_amount, - - {% endfor -%} - - order_payments.total_amount as amount - - from orders - - - left join order_payments - on orders.order_id = order_payments.order_id - -) - -select * from final - """, - ) -else: - state.setdefault(RAW_SQL, "") - -# COMPONENT KEYS -PROFILE_SELECTOR = "PROFILE_SELECTOR" -"""Selected profile""" -THEME_PICKER = "THEME_PICKER" -"""Selected theme for workbench""" -DIALECT_PICKER = "DIALECT_PICKER" -"""Selected SQL dialect for workbench""" -QUERY_LIMITER = "QUERY_LIMITER" -"""Limit results returned in SQL runner""" -BASIC_PROFILE_OPT = "BASIC_PROFILE_OPT" -"""Use basic profiling for pandas-profiling""" -PROFILE_DOWNLOADER = "PROFILE_DOWNLOADER" -"""Controller for downloading HTML results of pandas-profiler""" -DYNAMIC_COMPILATION = "DYNAMIC_COMPILATION" -"""Toggle to compile on-type or compile on control+enter""" - -# COMPONENT OPTIONS -DIALECTS = ("pgsql", "mysql", "sql", "sqlserver") -"""Tuple of SQL dialects usable in ace editor""" - -# TRIGGERS -DBT_DO_RELOAD = "DBT_DO_RELOAD" -"""This triggers dbt to reparse the project""" -RUN_PROFILER = "RUN_PROFILER" -"""Run pandas profiler on test bench result set""" -PIVOT_LAYOUT = "PIVOT_LAYOUT" -"""Pivot the editor layout from side-by-side to top-bottom""" - -state.setdefault(PIVOT_LAYOUT, False) - - -def inject_dbt(change_target: Optional[str] = None): - """Parse dbt project and load context var""" - if DBT not in state or change_target: - dbt_ctx = DbtProject( - project_dir=state[PROJ_DIR], - profiles_dir=state[PROF_DIR], - target=change_target, - ) - else: - dbt_ctx: DbtProject = state[DBT] - dbt_ctx.rebuild_dbt_manifest(reset=True) - state[DBT] = dbt_ctx - return True - - -if DBT not in state: - inject_dbt() -ctx: DbtProject = state[DBT] - -TARGET_PROFILE = "TARGET_PROFILE" -"""Target profile for dbt to execute against""" - -state.setdefault(TARGET_PROFILE, ctx.config.target_name) - - -def toggle_viewer() -> None: - """Toggle the layout of the editor""" - state[PIVOT_LAYOUT] = not state[PIVOT_LAYOUT] - - -def compile_sql(sql: str) -> str: - """Compile SQL using dbt context. - - Mostly a wrapper for dbt-core-interface compile_code - """ - try: - with ctx.adapter.connection_named("__sql_workbench__"): - return ctx.compile_code(sql).compiled_code - except Exception: - # TODO: make this more specific - return None - - -def run_query(sql: str, limit: int = 2000) -> None: - try: - # TODO: expose this as a config option - with ctx.adapter.connection_named("__sql_workbench__"): - result = ctx.execute_code(f"select * from ({sql}) as __all_data limit {limit}") - except Exception as error: - state[SQL_QUERY_STATE] = "error" - state[SQL_ADAPTER_RESP] = str(error) - else: - output = [OrderedDict(zip(result.table.column_names, row)) for row in result.table.rows] - state[SQL_RESULT] = pd.DataFrame(output) - state[SQL_ADAPTER_RESP] = result.adapter_response - state[SQL_QUERY_STATE] = "success" - - -@st.cache -def convert_df_to_csv(dataframe: pd.DataFrame): - return dataframe.to_csv().encode("utf-8") - - -@st.cache( - hash_funcs={ - ydata_profiling.report.presentation.core.container.Container: lambda _: state[COMPILED_SQL], - ydata_profiling.report.presentation.core.html.HTML: lambda _: state[COMPILED_SQL], - }, - allow_output_mutation=True, -) -def build_profile_report( - dataframe: pd.DataFrame, minimal: bool = True -) -> ydata_profiling.ProfileReport: - return dataframe.profile_report(minimal=minimal) - - -@st.cache( - hash_funcs={ - ydata_profiling.report.presentation.core.container.Container: lambda _: state[COMPILED_SQL], - ydata_profiling.report.presentation.core.html.HTML: lambda _: state[COMPILED_SQL], - }, - allow_output_mutation=True, -) -def convert_profile_report_to_html(profile: ydata_profiling.ProfileReport) -> str: - return profile.to_html() - - -st.title("dbt-osmosis 🌊") - -st.sidebar.header("Profiles") -st.sidebar.write( - "Select a profile used for materializing, compiling, and testing models. Can be updated at any" - " time." -) -state[TARGET_PROFILE] = st.sidebar.radio( - f"Loaded profiles from {ctx.config.profile_name}", - [target for target in state[RAW_PROFILES][ctx.config.profile_name].get("outputs", [])], - key=PROFILE_SELECTOR, -) -st.sidebar.markdown(f"Current Target: **{state[TARGET_PROFILE]}**") -st.sidebar.write("") -st.sidebar.write("Utility") -# st.sidebar.button("Reload dbt project", key=DBT_DO_RELOAD) -st.sidebar.caption( - "Refresh the page to reparse dbt. This is useful if any updated models or macros in your" - " physical project on disk have changed and are not yet reflected in the workbench as" - " refable or updated." -) -st.sidebar.write("") -st.sidebar.selectbox("Editor Theme", THEMES, index=8, key=THEME_PICKER) -st.sidebar.selectbox("Editor Language", DIALECTS, key=DIALECT_PICKER) - -# IDE LAYOUT -notificationContainer = st.empty() -descriptionContainer = st.container() -compileOptionContainer = st.container() -ideContainer = st.container() - -descriptionContainer.markdown( - """ -Welcome to the [dbt-osmosis](https://github.com/z3z1ma/dbt-osmosis) workbench 👋. -The workbench serves as a no fuss way to spin up -an environment where you can very quickly iterate on dbt models. In an ideal flow, a developer -can spin up the workbench and use it as a _complement_ to their IDE, not a replacement. This means -copying and pasting over a model you are really digging into 🧑‍💻 OR it is just as valid to use -the workbench as a scratchpad 👷‍♀️. In a full day of development, you may never spin down the workbench. -Refreshing the page is enough to reparse the physical dbt project on disk. The instantaneous feedback -rarely experienced with jinja + ability to execute the SQL both synergize to supercharge ⚡️ productivity! -""" -) - -if not state[PIVOT_LAYOUT]: - idePart1, idePart2 = ideContainer.columns(2) -else: - idePart1 = ideContainer.container() - idePart2 = ideContainer.container() - - -compileOptionContainer.write("") -compileOpt1, compileOpt2 = compileOptionContainer.columns(2) -auto_update = compileOpt1.checkbox("Dynamic Compilation", key=DYNAMIC_COMPILATION, value=True) -if auto_update: - compileOpt1.caption("👉 Compiling SQL on change") -else: - compileOpt1.caption("👉 Compiling SQL with control + enter") -compileOpt2.button("Pivot Layout", on_click=toggle_viewer) - -with idePart1: - state[RAW_SQL] = st_ace( - value=state[RAW_SQL], - theme=state[THEME_PICKER], - language=state[DIALECT_PICKER], - auto_update=auto_update, - key="AceEditor", - max_lines=35, - min_lines=20, - height=500, - ) - -with idePart2: - with st.expander("📝 Compiled SQL", expanded=True): - st.code( - ( - state[COMPILED_SQL] - if state[COMPILED_SQL] - else " --> Invalid Jinja, awaiting model to become valid" - ), - language="sql", - ) - -if compile_sql(state[RAW_SQL]) != state[COMPILED_SQL]: - state[COMPILED_SQL] = compile_sql(state[RAW_SQL]) - st.experimental_rerun() # This eager re-run speeds up the app - - -if ctx.config.target_name != state[TARGET_PROFILE]: # or state[DBT_DO_RELOAD]: - print("Reloading dbt project...") - with notificationContainer: - ctx.config.target_name = state[TARGET_PROFILE] - ctx.config.target_name = state[TARGET_PROFILE] - with st.spinner("Reloading dbt... ⚙️"): - inject_dbt(state[TARGET_PROFILE]) - # state[RAW_SQL] += " " - state[COMPILED_SQL] = compile_sql(state[RAW_SQL]) - st.experimental_rerun() - - -# TEST LAYOUT -testHeaderContainer = st.container() -test_column_1, _, test_column_2 = st.columns([1, 2, 1]) -testContainer = st.container() -testContainerViewer = testContainer.expander("Result Viewer 🔎", expanded=True) -test_view_1, _, test_view_2 = testContainerViewer.columns([1, 2, 1]) - -downloadBtnContainer, profileBtnContainer, profileOptContainer = st.columns([1, 1, 3]) -profilerContainer = st.container() - -with testHeaderContainer: - st.write("") - st.subheader("Osmosis Query Result Inspector 🔬") - st.write("") - st.markdown( - """Run queries against your datawarehouse leveraging the selected target profile. This is a critical step in - developer productivity 📈 and dbt-osmosis workbench aims to keep it a click away. Additionally, you can leverage the - profiling functionality to get an idea of the dataset you have in memory.""" - ), - st.write(""), st.write("") - -query_limit = test_column_2.number_input( - "Limit Results", min_value=1, max_value=50_000, value=2_000, step=1, key=QUERY_LIMITER -) -test_column_2.caption( - "Limit the number of results returned by the query, the maximum value is 50,000" -) - -if state[COMPILED_SQL]: - test_column_1.button( - "Test Compiled Query", - on_click=run_query, - kwargs={"sql": state[COMPILED_SQL], "limit": query_limit}, - ) - test_column_1.caption("This will run the compiled SQL against your data warehouse") - -with testContainerViewer: - st.write("\n\n\n\n\n") - - if state[SQL_QUERY_STATE] == "success": - test_view_1.write("#### Compiled SQL query results") - elif state[SQL_QUERY_STATE] == "error": - test_view_1.warning(f"SQL query error: {state[SQL_ADAPTER_RESP]}") - if not state[SQL_RESULT].empty: - test_view_2.info(f"Adapter Response: {state[SQL_ADAPTER_RESP]}") - st.dataframe(state[SQL_RESULT]) - else: - st.write("") - st.markdown( - "> The results of your workbench query will show up here. Click `Test Compiled Query`" - " to see the results. " - ) - st.write("") - st.write("") - - -with downloadBtnContainer: - st.download_button( - label="Download data as CSV", - data=convert_df_to_csv(state[SQL_RESULT]), - file_name="dbt_osmosis_workbench.csv", - mime="text/csv", - ) - -with profileBtnContainer: - st.button("Profile Data", key=RUN_PROFILER) - -with profileOptContainer: - st.checkbox("Basic Profiler", key=BASIC_PROFILE_OPT, value=True) - st.caption( - "Useful for larger datasets, use the minimal pandas-profiling option for a simpler report" - ) - -if state[RUN_PROFILER]: - pr = build_profile_report(state[SQL_RESULT], state[BASIC_PROFILE_OPT]) - pr_html = convert_profile_report_to_html(pr) - with profilerContainer: - st.components.v1.html(pr_html, height=650, scrolling=True) - st.download_button( - label="Download profile report", - data=pr_html, - file_name="dbt_osmosis_workbench_profile.html", - mime="text/html", - key=PROFILE_DOWNLOADER, - ) - st.write("") - -st.write(""), st.write("") -footer1, footer2 = st.columns([1, 2]) -footer1.header("Useful Links 🧐") -footer2.header("RSS Feeds 🚨") -footer1.write("") -footer1.markdown( - """ -##### dbt docs -- [docs.getdbt.com](https://docs.getdbt.com/) - -##### dbt core repo -- [github.com/dbt-labs/dbt-core](https://github.com/dbt-labs/dbt-core/) - -##### data team reference material - -- [Gitlab Data Team Wiki](https://about.gitlab.com/handbook/business-technology/data-team/) -- [dbt Best Practices](https://docs.getdbt.com/guides/best-practices/how-we-structure/1-guide-overview) - -""" -) - - -@st.cache(ttl=300.0) -def get_feed(url: str): - return feedparser.parse(url) - - -d = get_feed("http://www.reddit.com/r/python/.rss") -footer2.write("") -rss1 = footer2.expander(f"{d['feed']['title']} ({d['feed']['link']})") -rss1.write() -rss1.caption(d["feed"]["subtitle"]) -for i, item in enumerate(d["entries"]): - rss1.markdown(f"[{item['title']}]({item['link']})") - if i > 5: - rss1.markdown(f"[See all]({d['feed']['link']})") - break -d = get_feed("https://news.ycombinator.com/rss") -rss2 = footer2.expander(f"{d['feed']['title']} ({d['feed']['link']})") -rss2.write() -rss2.caption(d["feed"]["subtitle"]) -for i, item in enumerate(d["entries"]): - rss2.markdown(f"[{item['title']}]({item['link']})") - if i > 5: - rss2.markdown(f"[See all]({d['feed']['link']})") - break -footer2.write("") -footer2.write( - "Catch up on any news! Staying up-to-date is important to keeping sharp in an always evolving" - " world." -) diff --git a/tests/test_column_level_knowledge.py b/tests/test_column_level_knowledge.py index c6b5fd54..359ab018 100644 --- a/tests/test_column_level_knowledge.py +++ b/tests/test_column_level_knowledge.py @@ -1,3 +1,7 @@ +def test_noop(): + pass + + # TODO: refactor this test # from dbt_osmosis.core.column_level_knowledge import get_prior_knowledge # From c7abcacc5ace8e53e737f057eff9189e3db52be1 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Mon, 30 Dec 2024 22:17:17 -0700 Subject: [PATCH 17/46] wip: continue working on functional rewrite --- src/dbt_osmosis/core/osmosis.py | 165 ++++++++++++++++++++++---------- 1 file changed, 115 insertions(+), 50 deletions(-) diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index 3dfe047a..e274618c 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -494,7 +494,7 @@ def filter_models( def f(node: ResultNode) -> bool: """Closure to filter models based on the context settings.""" - if node.resource_type not in (NodeType.Model, NodeType.Source): + if node.resource_type not in (NodeType.Model, NodeType.Source, NodeType.Seed): return False if node.package_name != context.project.config.project_name: return False @@ -711,11 +711,11 @@ def _get_yaml_path_template(context: YamlRefactorContext, node: ResultNode) -> s def get_current_yaml_path(context: YamlRefactorContext, node: ResultNode) -> Path | None: """Get the current yaml path for a dbt model or source node.""" - if node.resource_type == NodeType.Model and getattr(node, "patch_path", None): + if node.resource_type in (NodeType.Model, NodeType.Seed) and getattr(node, "patch_path", None): return Path(context.project.config.project_root).joinpath( t.cast(str, node.patch_path).partition("://")[-1] ) - if node.resource_type == NodeType.Source and hasattr(node, "source_name"): + if node.resource_type == NodeType.Source: return Path(context.project.config.project_root, node.path) return None @@ -742,11 +742,13 @@ def get_target_yaml_path(context: YamlRefactorContext, node: ResultNode) -> Path def build_yaml_file_mapping( - context: YamlRefactorContext, create_missing_sources: bool = True + context: YamlRefactorContext, create_missing_sources: bool = False ) -> dict[str, SchemaFileLocation]: """Build a mapping of dbt model and source nodes to their current and target yaml paths.""" + if create_missing_sources: create_missing_source_yamls(context) + out_map: dict[str, SchemaFileLocation] = {} for uid, node in filter_models(context): current_path = get_current_yaml_path(context, node) @@ -793,7 +795,7 @@ def commit_yamls(context: YamlRefactorContext) -> None: context.register_mutations(1) -def _generate_minimal_model_yaml(node: ModelNode) -> dict[str, t.Any]: +def _generate_minimal_model_yaml(node: ModelNode | SeedNode) -> dict[str, t.Any]: """Generate a minimal model yaml for a dbt model node.""" return {"name": node.name, "columns": []} @@ -817,22 +819,20 @@ def _create_operations_for_node( ops: list[RestructureOperation] = [] if loc.current is None: - if loc.node_type == NodeType.Model: - assert isinstance(node, ModelNode) + if isinstance(node, (ModelNode, SeedNode)): minimal = _generate_minimal_model_yaml(node) ops.append( RestructureOperation( file_path=loc.target, - content={"version": 2, "models": [minimal]}, + content={"version": 2, f"{node.resource_type}s": [minimal]}, ) ) else: - assert isinstance(node, SourceDefinition) - minimal_source = _generate_minimal_source_yaml(node) + minimal = _generate_minimal_source_yaml(t.cast(SourceDefinition, node)) ops.append( RestructureOperation( file_path=loc.target, - content={"version": 2, "sources": [minimal_source]}, + content={"version": 2, "sources": [minimal]}, ) ) else: @@ -840,18 +840,24 @@ def _create_operations_for_node( injectable: dict[str, t.Any] = {"version": 2} injectable.setdefault("models", []) injectable.setdefault("sources", []) + injectable.setdefault("seeds", []) if loc.node_type == NodeType.Model: assert isinstance(node, ModelNode) for obj in existing.get("models", []): if obj["name"] == node.name: injectable["models"].append(obj) break - else: + elif loc.node_type == NodeType.Source: assert isinstance(node, SourceDefinition) for src in existing.get("sources", []): if src["name"] == node.source_name: injectable["sources"].append(src) break + elif loc.node_type == NodeType.Seed: + assert isinstance(node, SeedNode) + for seed in existing.get("seeds", []): + if seed["name"] == node.name: + injectable["seeds"].append(seed) ops.append( RestructureOperation( file_path=loc.target, @@ -899,11 +905,21 @@ def pretty_print_plan(plan: RestructureDeltaPlan) -> None: def _remove_models(existing_doc: dict[str, t.Any], nodes: list[ResultNode]) -> None: """Clean up the existing yaml doc by removing models superseded by the restructure plan.""" to_remove = {n.name for n in nodes if n.resource_type == NodeType.Model} - keep_models = [] - for model_block in existing_doc.get("models", []): - if model_block.get("name") not in to_remove: - keep_models.append(model_block) - existing_doc["models"] = keep_models + keep = [] + for section in existing_doc.get("models", []): + if section.get("name") not in to_remove: + keep.append(section) + existing_doc["models"] = keep + + +def _remove_seeds(existing_doc: dict[str, t.Any], nodes: list[ResultNode]) -> None: + """Clean up the existing yaml doc by removing models superseded by the restructure plan.""" + to_remove = {n.name for n in nodes if n.resource_type == NodeType.Seed} + keep = [] + for section in existing_doc.get("seeds", []): + if section.get("name") not in to_remove: + keep.append(section) + existing_doc["seeds"] = keep def _remove_sources(existing_doc: dict[str, t.Any], nodes: list[ResultNode]) -> None: @@ -912,14 +928,14 @@ def _remove_sources(existing_doc: dict[str, t.Any], nodes: list[ResultNode]) -> (n.source_name, n.name) for n in nodes if n.resource_type == NodeType.Source } keep_sources = [] - for src_block in existing_doc.get("sources", []): + for section in existing_doc.get("sources", []): keep_tables = [] - for tbl in src_block.get("tables", []): - if (src_block["name"], tbl["name"]) not in to_remove_sources: + for tbl in section.get("tables", []): + if (section["name"], tbl["name"]) not in to_remove_sources: keep_tables.append(tbl) if keep_tables: - src_block["tables"] = keep_tables - keep_sources.append(src_block) + section["tables"] = keep_tables + keep_sources.append(section) existing_doc["sources"] = keep_sources @@ -933,6 +949,7 @@ def apply_restructure_plan( if confirm: pretty_print_plan(plan) + while confirm: response = input("Apply the restructure plan? [y/N]: ") if response.lower() in ("y", "yes"): @@ -967,12 +984,17 @@ def apply_restructure_plan( _remove_models(existing_data, nodes) if "sources" in existing_data: _remove_sources(existing_data, nodes) + if "seeds" in existing_data: + _remove_seeds(existing_data, nodes) - if (not existing_data.get("models")) and (not existing_data.get("sources")): + keys = set(existing_data.keys()) - {"version"} + if all(len(existing_data.get(k, [])) == 0 for k in keys): if not context.settings.dry_run: path.unlink(missing_ok=True) if path.parent.exists() and not any(path.parent.iterdir()): path.parent.rmdir() + if path in _YAML_BUFFER_CACHE: + del _YAML_BUFFER_CACHE[path] context.register_mutations(1) logger.info(f"Superseded entire file {path}") else: @@ -980,6 +1002,8 @@ def apply_restructure_plan( _write_yaml(context, path, existing_data) logger.info(f"Migrated doc from {path} -> {op.file_path}") + _ = commit_yamls(context), reload_manifest(context.project) + # Inheritance Logic # ================= @@ -1003,6 +1027,8 @@ def _build_node_ancestor_tree( return tree for dep in getattr(node.depends_on, "nodes", []): + if not dep.startswith(("model.", "seed.", "source.")): + continue if dep not in visited: visited.add(dep) member = manifest.nodes.get(dep, manifest.sources.get(dep)) @@ -1054,7 +1080,7 @@ def _build_column_knowledge_grap( ancestor = context.project.manifest.nodes.get( ancestor_uid, context.project.manifest.sources.get(ancestor_uid) ) - if not ancestor: + if not isinstance(ancestor, (SourceDefinition, SeedNode, ModelNode)): continue for name, metadata in ancestor.columns.items(): @@ -1069,9 +1095,16 @@ def _build_column_knowledge_grap( if context.settings.use_unrendered_descriptions: raw_yaml = _get_member_yaml(context, ancestor) or {} raw_columns = t.cast(list[dict[str, t.Any]], raw_yaml.get("columns", [])) - raw_column_metadata = _find_first(raw_columns, lambda c: c["name"] == name, {}) - if undrendered_description := raw_column_metadata.get("description"): - graph_edge["description"] = undrendered_description + raw_column_metadata = _find_first( + raw_columns, + lambda c: normalize_column_name( + c["name"], context.project.config.credentials.type + ) + == name, + {}, + ) + if unrendered_description := raw_column_metadata.get("description"): + graph_edge["description"] = unrendered_description current_tags = graph_node.get("tags", []) if incoming_tags := (set(graph_edge.pop("tags", [])) | set(current_tags)): @@ -1087,7 +1120,7 @@ def _build_column_knowledge_grap( graph_edge[inheritable] = incoming_val if graph_edge.get("description", EMPTY_STRING) in context.placeholders: - del graph_edge["description"] + _ = graph_edge.pop("description", None) if graph_edge.get("tags") == []: del graph_edge["tags"] if graph_edge.get("meta") == {}: @@ -1098,16 +1131,18 @@ def _build_column_knowledge_grap( graph_node.update(graph_edge) - return {name: meta.to_dict() for name, meta in node.columns.items()} + return column_knowledge_graph def inherit_upstream_column_knowledge( - context: YamlRefactorContext, node: ResultNode -) -> dict[str, dict[str, t.Any]]: - """Inherit column level knowledge from the ancestors of a dbt model or source node producing a column data structure usable in dbt yaml files. + context: YamlRefactorContext, node: ResultNode | None = None +) -> None: + """Inherit column level knowledge from the ancestors of a dbt model or source node.""" + if node is None: + for _, node in filter_models(context): + inherit_upstream_column_knowledge(context, node) + return None - This mutates the manifest node in place and returns the column data structure for use in a dbt yaml file. - """ inheritable = ["description"] if not context.settings.skip_add_tags: inheritable.append("tags") @@ -1116,18 +1151,33 @@ def inherit_upstream_column_knowledge( for extra in context.settings.add_inheritance_for_specified_keys: if extra not in inheritable: inheritable.append(extra) + + yaml_section = _get_member_yaml(context, node) column_knowledge_graph = _build_column_knowledge_grap(context, node) for name, node_column in node.columns.items(): - # NOTE: This is our graph "lookup", and our best [only] opportunity to apply user defined fuzzing - # so we should make the composable and robust + # TODO: This is our graph "lookup", and our primary opportunity to apply user defined fuzzing + # so we should make the composable and robust (maybe a plugin system for fuzzing? it's an important problem) kwargs = column_knowledge_graph.get(name) if kwargs is None: continue - node.columns[name] = node_column.replace( - **{k: v for k, v in kwargs.items() if v is not None and k in inheritable} - ) - return {name: meta.to_dict() for name, meta in node.columns.items()} + updated_metadata = {k: v for k, v in kwargs.items() if v is not None and k in inheritable} + node.columns[name] = node_column.replace(**updated_metadata) + + if not yaml_section: + continue + for column in yaml_section.get("columns", []): + yaml_name = normalize_column_name( + column["name"], context.project.config.credentials.type + ) + if yaml_name == name: + if updated_metadata.get("tags") == []: + del updated_metadata["tags"] + if updated_metadata.get("meta") == {}: + del updated_metadata["meta"] + if updated_metadata.get("description", EMPTY_STRING) in context.placeholders: + _ = updated_metadata.pop("description", None) + column.update(**updated_metadata) def inject_missing_columns(context: YamlRefactorContext, node: ResultNode | None = None) -> None: @@ -1135,7 +1185,7 @@ def inject_missing_columns(context: YamlRefactorContext, node: ResultNode | None if context.settings.skip_add_columns: return if node is None: - for node in context.project.manifest.nodes.values(): + for _, node in filter_models(context): inject_missing_columns(context, node) return yaml_section = _get_member_yaml(context, node) @@ -1165,7 +1215,7 @@ def remove_columns_not_in_database( if context.settings.skip_add_columns: return if node is None: - for node in context.project.manifest.nodes.values(): + for _, node in filter_models(context): remove_columns_not_in_database(context, node) return yaml_section = _get_member_yaml(context, node) @@ -1190,7 +1240,7 @@ def sort_columns_as_in_database( ) -> None: """Sort columns in a dbt node and it's corresponding yaml section as they appear in the database. Changes are implicitly buffered until commit_yamls is called.""" if node is None: - for node in context.project.manifest.nodes.values(): + for _, node in filter_models(context): sort_columns_as_in_database(context, node) return yaml_section = _get_member_yaml(context, node) @@ -1216,7 +1266,7 @@ def sort_columns_alphabetically( ) -> None: """Sort columns in a dbt node and it's corresponding yaml section alphabetically. Changes are implicitly buffered until commit_yamls is called.""" if node is None: - for node in context.project.manifest.nodes.values(): + for _, node in filter_models(context): sort_columns_alphabetically(context, node) return yaml_section = _get_member_yaml(context, node) @@ -1245,11 +1295,26 @@ def run_example_compilation_flow() -> None: if __name__ == "__main__": c = DbtConfiguration(project_dir="demo_duckdb", profiles_dir="demo_duckdb") c.vars = {"dbt-osmosis": {}} + project = create_dbt_project_context(c) - yaml_context = YamlRefactorContext(project) + yaml_context = YamlRefactorContext( + project, settings=YamlRefactorSettings(use_unrendered_descriptions=True) + ) + plan = draft_restructure_delta_plan(yaml_context) - apply_restructure_plan(yaml_context, plan, confirm=True) - inject_missing_columns(yaml_context) - remove_columns_not_in_database(yaml_context) - sort_columns_as_in_database(yaml_context) - commit_yamls(yaml_context) + steps = ( + step + for step in ( + create_missing_source_yamls(yaml_context), + apply_restructure_plan(yaml_context, plan, confirm=True), + inject_missing_columns(yaml_context), + remove_columns_not_in_database(yaml_context), + inherit_upstream_column_knowledge(yaml_context), + sort_columns_as_in_database(yaml_context), + commit_yamls(yaml_context), + ) + ) + + DONE = object() + while next(steps, DONE) is not DONE: + logger.info("Completed step.") From 40920e6b1a3b5b3d8d857c602b4bb1747cb5cc21 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Mon, 30 Dec 2024 22:34:55 -0700 Subject: [PATCH 18/46] feat: start move over to uv --- .github/workflows/constraints.txt | 4 +- .github/workflows/tests.yml | 18 +- .pre-commit-config.yaml | 25 +- Makefile | 0 poetry.lock | 3754 ----------------------------- pyproject.toml | 90 +- requirements.txt | 89 + src/dbt_osmosis/core/osmosis.py | 1 + uv.lock | 2942 ++++++++++++++++++++++ 9 files changed, 3095 insertions(+), 3828 deletions(-) create mode 100644 Makefile delete mode 100644 poetry.lock create mode 100644 requirements.txt create mode 100644 uv.lock diff --git a/.github/workflows/constraints.txt b/.github/workflows/constraints.txt index 41bef00d..061c7ca4 100644 --- a/.github/workflows/constraints.txt +++ b/.github/workflows/constraints.txt @@ -1,3 +1,3 @@ pip==24.0 -poetry==1.3.2 -virtualenv==20.21.0 +uv==0.5.13 + diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 9c69930b..512f0e87 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -31,21 +31,21 @@ jobs: - name: Install Poetry run: | - pip install --constraint=.github/workflows/constraints.txt poetry - poetry --version - - - name: Install dbt-core - run: | - poetry add dbt-core==${{ matrix.dbt-version }} + pip install --constraint=.github/workflows/constraints.txt uv + uv --version - name: Install required packages run: | # install duckdb extras to be able to parse manifest - poetry install -E duckdb + uv sync --extra duckdb + + - name: Install dbt-core + run: | + uv pip install dbt-core==${{ matrix.dbt-version }} - name: Parse manifest run: | - poetry run dbt parse --project-dir demo_duckdb --profiles-dir demo_duckdb -t test + uv run dbt parse --project-dir demo_duckdb --profiles-dir demo_duckdb -t test - name: Run pytest run: | - poetry run python -m pytest + uv tool run pytest diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 68f655da..9624b53e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -16,21 +16,18 @@ repos: rev: v0.10.0.1 hooks: - id: shellcheck - - repo: https://github.com/psf/black - rev: 24.4.2 - hooks: - - id: black - args: ["--config", "pyproject.toml"] - - repo: https://github.com/pycqa/isort - rev: 5.13.2 - hooks: - - id: isort - args: ["-sp", "pyproject.toml"] + # TODO: replace with ruff + # - repo: https://github.com/psf/black + # rev: 24.4.2 + # hooks: + # - id: black + # args: ["--config", "pyproject.toml"] + # - repo: https://github.com/pycqa/isort + # rev: 5.13.2 + # hooks: + # - id: isort + # args: ["-sp", "pyproject.toml"] - repo: https://github.com/hadolint/hadolint rev: v2.13.0-beta hooks: - id: hadolint-docker -# - repo: https://github.com/pycqa/flake8 -# rev: 4.0.1 -# hooks: -# - id: flake8 diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..e69de29b diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index 3b90e235..00000000 --- a/poetry.lock +++ /dev/null @@ -1,3754 +0,0 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. - -[[package]] -name = "agate" -version = "1.9.1" -description = "A data analysis library that is optimized for humans instead of machines." -optional = false -python-versions = "*" -files = [ - {file = "agate-1.9.1-py2.py3-none-any.whl", hash = "sha256:1cf329510b3dde07c4ad1740b7587c9c679abc3dcd92bb1107eabc10c2e03c50"}, - {file = "agate-1.9.1.tar.gz", hash = "sha256:bc60880c2ee59636a2a80cd8603d63f995be64526abf3cbba12f00767bcd5b3d"}, -] - -[package.dependencies] -Babel = ">=2.0" -isodate = ">=0.5.4" -leather = ">=0.3.2" -parsedatetime = ">=2.1,<2.5 || >2.5" -python-slugify = ">=1.2.1" -pytimeparse = ">=1.1.5" -tzdata = {version = ">=2023.3", markers = "platform_system == \"Windows\""} - -[package.extras] -test = ["PyICU (>=2.4.2)", "backports.zoneinfo", "coverage (>=3.7.1)", "cssselect (>=0.9.1)", "lxml (>=3.6.0)", "pytest", "pytest-cov"] - -[[package]] -name = "altair" -version = "5.5.0" -description = "Vega-Altair: A declarative statistical visualization library for Python." -optional = true -python-versions = ">=3.9" -files = [ - {file = "altair-5.5.0-py3-none-any.whl", hash = "sha256:91a310b926508d560fe0148d02a194f38b824122641ef528113d029fcd129f8c"}, - {file = "altair-5.5.0.tar.gz", hash = "sha256:d960ebe6178c56de3855a68c47b516be38640b73fb3b5111c2a9ca90546dd73d"}, -] - -[package.dependencies] -jinja2 = "*" -jsonschema = ">=3.0" -narwhals = ">=1.14.2" -packaging = "*" -typing-extensions = {version = ">=4.10.0", markers = "python_version < \"3.14\""} - -[package.extras] -all = ["altair-tiles (>=0.3.0)", "anywidget (>=0.9.0)", "numpy", "pandas (>=1.1.3)", "pyarrow (>=11)", "vega-datasets (>=0.9.0)", "vegafusion[embed] (>=1.6.6)", "vl-convert-python (>=1.7.0)"] -dev = ["duckdb (>=1.0)", "geopandas", "hatch (>=1.13.0)", "ipython[kernel]", "mistune", "mypy", "pandas (>=1.1.3)", "pandas-stubs", "polars (>=0.20.3)", "pyarrow-stubs", "pytest", "pytest-cov", "pytest-xdist[psutil] (>=3.5,<4.0)", "ruff (>=0.6.0)", "types-jsonschema", "types-setuptools"] -doc = ["docutils", "jinja2", "myst-parser", "numpydoc", "pillow (>=9,<10)", "pydata-sphinx-theme (>=0.14.1)", "scipy", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinxext-altair"] -save = ["vl-convert-python (>=1.7.0)"] - -[[package]] -name = "annotated-types" -version = "0.7.0" -description = "Reusable constraint types to use with typing.Annotated" -optional = false -python-versions = ">=3.8" -files = [ - {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, - {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, -] - -[[package]] -name = "anyio" -version = "4.7.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = true -python-versions = ">=3.9" -files = [ - {file = "anyio-4.7.0-py3-none-any.whl", hash = "sha256:ea60c3723ab42ba6fff7e8ccb0488c898ec538ff4df1f1d5e642c3601d07e352"}, - {file = "anyio-4.7.0.tar.gz", hash = "sha256:2f834749c602966b7d456a7567cafcb309f96482b5081d14ac93ccd457f9dd48"}, -] - -[package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} -idna = ">=2.8" -sniffio = ">=1.1" -typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} - -[package.extras] -doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] -trio = ["trio (>=0.26.1)"] - -[[package]] -name = "astroid" -version = "3.3.6" -description = "An abstract syntax tree for Python with inference support." -optional = false -python-versions = ">=3.9.0" -files = [ - {file = "astroid-3.3.6-py3-none-any.whl", hash = "sha256:db676dc4f3ae6bfe31cda227dc60e03438378d7a896aec57422c95634e8d722f"}, - {file = "astroid-3.3.6.tar.gz", hash = "sha256:6aaea045f938c735ead292204afdb977a36e989522b7833ef6fea94de743f442"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} - -[[package]] -name = "attrs" -version = "24.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, -] - -[package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] - -[[package]] -name = "babel" -version = "2.16.0" -description = "Internationalization utilities" -optional = false -python-versions = ">=3.8" -files = [ - {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, - {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, -] - -[package.extras] -dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] - -[[package]] -name = "black" -version = "24.10.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.9" -files = [ - {file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"}, - {file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"}, - {file = "black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f"}, - {file = "black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e"}, - {file = "black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad"}, - {file = "black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50"}, - {file = "black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392"}, - {file = "black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175"}, - {file = "black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3"}, - {file = "black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65"}, - {file = "black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f"}, - {file = "black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8"}, - {file = "black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981"}, - {file = "black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b"}, - {file = "black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2"}, - {file = "black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b"}, - {file = "black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd"}, - {file = "black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f"}, - {file = "black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800"}, - {file = "black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7"}, - {file = "black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d"}, - {file = "black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.10)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "blinker" -version = "1.9.0" -description = "Fast, simple object-to-object and broadcast signaling" -optional = true -python-versions = ">=3.9" -files = [ - {file = "blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc"}, - {file = "blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf"}, -] - -[[package]] -name = "cachetools" -version = "5.5.0" -description = "Extensible memoizing collections and decorators" -optional = true -python-versions = ">=3.7" -files = [ - {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, - {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, -] - -[[package]] -name = "certifi" -version = "2024.8.30" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, -] - -[[package]] -name = "cfgv" -version = "3.4.0" -description = "Validate configuration and produce human readable error messages." -optional = false -python-versions = ">=3.8" -files = [ - {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, - {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.4.0" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, - {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, - {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, -] - -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "contourpy" -version = "1.3.0" -description = "Python library for calculating contours of 2D quadrilateral grids" -optional = true -python-versions = ">=3.9" -files = [ - {file = "contourpy-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:880ea32e5c774634f9fcd46504bf9f080a41ad855f4fef54f5380f5133d343c7"}, - {file = "contourpy-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:76c905ef940a4474a6289c71d53122a4f77766eef23c03cd57016ce19d0f7b42"}, - {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92f8557cbb07415a4d6fa191f20fd9d2d9eb9c0b61d1b2f52a8926e43c6e9af7"}, - {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36f965570cff02b874773c49bfe85562b47030805d7d8360748f3eca570f4cab"}, - {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cacd81e2d4b6f89c9f8a5b69b86490152ff39afc58a95af002a398273e5ce589"}, - {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69375194457ad0fad3a839b9e29aa0b0ed53bb54db1bfb6c3ae43d111c31ce41"}, - {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a52040312b1a858b5e31ef28c2e865376a386c60c0e248370bbea2d3f3b760d"}, - {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3faeb2998e4fcb256542e8a926d08da08977f7f5e62cf733f3c211c2a5586223"}, - {file = "contourpy-1.3.0-cp310-cp310-win32.whl", hash = "sha256:36e0cff201bcb17a0a8ecc7f454fe078437fa6bda730e695a92f2d9932bd507f"}, - {file = "contourpy-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:87ddffef1dbe5e669b5c2440b643d3fdd8622a348fe1983fad7a0f0ccb1cd67b"}, - {file = "contourpy-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fa4c02abe6c446ba70d96ece336e621efa4aecae43eaa9b030ae5fb92b309ad"}, - {file = "contourpy-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:834e0cfe17ba12f79963861e0f908556b2cedd52e1f75e6578801febcc6a9f49"}, - {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbc4c3217eee163fa3984fd1567632b48d6dfd29216da3ded3d7b844a8014a66"}, - {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4865cd1d419e0c7a7bf6de1777b185eebdc51470800a9f42b9e9decf17762081"}, - {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:303c252947ab4b14c08afeb52375b26781ccd6a5ccd81abcdfc1fafd14cf93c1"}, - {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637f674226be46f6ba372fd29d9523dd977a291f66ab2a74fbeb5530bb3f445d"}, - {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:76a896b2f195b57db25d6b44e7e03f221d32fe318d03ede41f8b4d9ba1bff53c"}, - {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e1fd23e9d01591bab45546c089ae89d926917a66dceb3abcf01f6105d927e2cb"}, - {file = "contourpy-1.3.0-cp311-cp311-win32.whl", hash = "sha256:d402880b84df3bec6eab53cd0cf802cae6a2ef9537e70cf75e91618a3801c20c"}, - {file = "contourpy-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:6cb6cc968059db9c62cb35fbf70248f40994dfcd7aa10444bbf8b3faeb7c2d67"}, - {file = "contourpy-1.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:570ef7cf892f0afbe5b2ee410c507ce12e15a5fa91017a0009f79f7d93a1268f"}, - {file = "contourpy-1.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:da84c537cb8b97d153e9fb208c221c45605f73147bd4cadd23bdae915042aad6"}, - {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0be4d8425bfa755e0fd76ee1e019636ccc7c29f77a7c86b4328a9eb6a26d0639"}, - {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c0da700bf58f6e0b65312d0a5e695179a71d0163957fa381bb3c1f72972537c"}, - {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb8b141bb00fa977d9122636b16aa67d37fd40a3d8b52dd837e536d64b9a4d06"}, - {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3634b5385c6716c258d0419c46d05c8aa7dc8cb70326c9a4fb66b69ad2b52e09"}, - {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0dce35502151b6bd35027ac39ba6e5a44be13a68f55735c3612c568cac3805fd"}, - {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea348f053c645100612b333adc5983d87be69acdc6d77d3169c090d3b01dc35"}, - {file = "contourpy-1.3.0-cp312-cp312-win32.whl", hash = "sha256:90f73a5116ad1ba7174341ef3ea5c3150ddf20b024b98fb0c3b29034752c8aeb"}, - {file = "contourpy-1.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:b11b39aea6be6764f84360fce6c82211a9db32a7c7de8fa6dd5397cf1d079c3b"}, - {file = "contourpy-1.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3e1c7fa44aaae40a2247e2e8e0627f4bea3dd257014764aa644f319a5f8600e3"}, - {file = "contourpy-1.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:364174c2a76057feef647c802652f00953b575723062560498dc7930fc9b1cb7"}, - {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32b238b3b3b649e09ce9aaf51f0c261d38644bdfa35cbaf7b263457850957a84"}, - {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d51fca85f9f7ad0b65b4b9fe800406d0d77017d7270d31ec3fb1cc07358fdea0"}, - {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:732896af21716b29ab3e988d4ce14bc5133733b85956316fb0c56355f398099b"}, - {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d73f659398a0904e125280836ae6f88ba9b178b2fed6884f3b1f95b989d2c8da"}, - {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c6c7c2408b7048082932cf4e641fa3b8ca848259212f51c8c59c45aa7ac18f14"}, - {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f317576606de89da6b7e0861cf6061f6146ead3528acabff9236458a6ba467f8"}, - {file = "contourpy-1.3.0-cp313-cp313-win32.whl", hash = "sha256:31cd3a85dbdf1fc002280c65caa7e2b5f65e4a973fcdf70dd2fdcb9868069294"}, - {file = "contourpy-1.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:4553c421929ec95fb07b3aaca0fae668b2eb5a5203d1217ca7c34c063c53d087"}, - {file = "contourpy-1.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:345af746d7766821d05d72cb8f3845dfd08dd137101a2cb9b24de277d716def8"}, - {file = "contourpy-1.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3bb3808858a9dc68f6f03d319acd5f1b8a337e6cdda197f02f4b8ff67ad2057b"}, - {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:420d39daa61aab1221567b42eecb01112908b2cab7f1b4106a52caaec8d36973"}, - {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4d63ee447261e963af02642ffcb864e5a2ee4cbfd78080657a9880b8b1868e18"}, - {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:167d6c890815e1dac9536dca00828b445d5d0df4d6a8c6adb4a7ec3166812fa8"}, - {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:710a26b3dc80c0e4febf04555de66f5fd17e9cf7170a7b08000601a10570bda6"}, - {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:75ee7cb1a14c617f34a51d11fa7524173e56551646828353c4af859c56b766e2"}, - {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:33c92cdae89ec5135d036e7218e69b0bb2851206077251f04a6c4e0e21f03927"}, - {file = "contourpy-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a11077e395f67ffc2c44ec2418cfebed032cd6da3022a94fc227b6faf8e2acb8"}, - {file = "contourpy-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e8134301d7e204c88ed7ab50028ba06c683000040ede1d617298611f9dc6240c"}, - {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12968fdfd5bb45ffdf6192a590bd8ddd3ba9e58360b29683c6bb71a7b41edca"}, - {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fd2a0fc506eccaaa7595b7e1418951f213cf8255be2600f1ea1b61e46a60c55f"}, - {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4cfb5c62ce023dfc410d6059c936dcf96442ba40814aefbfa575425a3a7f19dc"}, - {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68a32389b06b82c2fdd68276148d7b9275b5f5cf13e5417e4252f6d1a34f72a2"}, - {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:94e848a6b83da10898cbf1311a815f770acc9b6a3f2d646f330d57eb4e87592e"}, - {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d78ab28a03c854a873787a0a42254a0ccb3cb133c672f645c9f9c8f3ae9d0800"}, - {file = "contourpy-1.3.0-cp39-cp39-win32.whl", hash = "sha256:81cb5ed4952aae6014bc9d0421dec7c5835c9c8c31cdf51910b708f548cf58e5"}, - {file = "contourpy-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:14e262f67bd7e6eb6880bc564dcda30b15e351a594657e55b7eec94b6ef72843"}, - {file = "contourpy-1.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fe41b41505a5a33aeaed2a613dccaeaa74e0e3ead6dd6fd3a118fb471644fd6c"}, - {file = "contourpy-1.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca7e17a65f72a5133bdbec9ecf22401c62bcf4821361ef7811faee695799779"}, - {file = "contourpy-1.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ec4dc6bf570f5b22ed0d7efba0dfa9c5b9e0431aeea7581aa217542d9e809a4"}, - {file = "contourpy-1.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:00ccd0dbaad6d804ab259820fa7cb0b8036bda0686ef844d24125d8287178ce0"}, - {file = "contourpy-1.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca947601224119117f7c19c9cdf6b3ab54c5726ef1d906aa4a69dfb6dd58102"}, - {file = "contourpy-1.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6ec93afeb848a0845a18989da3beca3eec2c0f852322efe21af1931147d12cb"}, - {file = "contourpy-1.3.0.tar.gz", hash = "sha256:7ffa0db17717a8ffb127efd0c95a4362d996b892c2904db72428d5b52e1938a4"}, -] - -[package.dependencies] -numpy = ">=1.23" - -[package.extras] -bokeh = ["bokeh", "selenium"] -docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] -mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.11.1)", "types-Pillow"] -test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] -test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist", "wurlitzer"] - -[[package]] -name = "cycler" -version = "0.12.1" -description = "Composable style cycles" -optional = true -python-versions = ">=3.8" -files = [ - {file = "cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30"}, - {file = "cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c"}, -] - -[package.extras] -docs = ["ipython", "matplotlib", "numpydoc", "sphinx"] -tests = ["pytest", "pytest-cov", "pytest-xdist"] - -[[package]] -name = "dacite" -version = "1.8.1" -description = "Simple creation of data classes from dictionaries." -optional = true -python-versions = ">=3.6" -files = [ - {file = "dacite-1.8.1-py3-none-any.whl", hash = "sha256:cc31ad6fdea1f49962ea42db9421772afe01ac5442380d9a99fcf3d188c61afe"}, -] - -[package.extras] -dev = ["black", "coveralls", "mypy", "pre-commit", "pylint", "pytest (>=5)", "pytest-benchmark", "pytest-cov"] - -[[package]] -name = "daff" -version = "1.3.46" -description = "Diff and patch tables" -optional = false -python-versions = "*" -files = [ - {file = "daff-1.3.46.tar.gz", hash = "sha256:22d0da9fd6a3275b54c926a9c97b180f9258aad65113ea18f3fec52cbadcd818"}, -] - -[[package]] -name = "dbt-adapters" -version = "1.10.4" -description = "The set of adapter protocols and base functionality that supports integration with dbt-core" -optional = false -python-versions = ">=3.9.0" -files = [ - {file = "dbt_adapters-1.10.4-py3-none-any.whl", hash = "sha256:fb13ce0c0da5dba19aa7bbe0a6fe6f13c920d19ef4d9fbcf92a86c0e7a987e53"}, - {file = "dbt_adapters-1.10.4.tar.gz", hash = "sha256:a33507fb0ef8b68c365ce2cbbb9f40910924d3268da017684559e867071b8a8a"}, -] - -[package.dependencies] -agate = ">=1.0,<2.0" -dbt-common = ">=1.13,<2.0" -mashumaro = {version = ">=3.9,<3.15", extras = ["msgpack"]} -protobuf = ">=5.0,<6.0" -pytz = ">=2015.7" -typing-extensions = ">=4.0,<5.0" - -[[package]] -name = "dbt-common" -version = "1.14.0" -description = "The shared common utilities that dbt-core and adapter implementations use" -optional = false -python-versions = ">=3.9" -files = [ - {file = "dbt_common-1.14.0-py3-none-any.whl", hash = "sha256:239b568a0dd764a431b93cdfe247628622c975f2eed8abf3bc04f4dc770ad161"}, - {file = "dbt_common-1.14.0.tar.gz", hash = "sha256:2227e24a165780c5368320dedd3c6bc40038dedece48af03daab43c11bf20372"}, -] - -[package.dependencies] -agate = ">=1.7.0,<1.10" -colorama = ">=0.3.9,<0.5" -deepdiff = ">=7.0,<8.0" -isodate = ">=0.6,<0.7" -jinja2 = ">=3.1.3,<4" -jsonschema = ">=4.0,<5.0" -mashumaro = {version = ">=3.9,<4.0", extras = ["msgpack"]} -pathspec = ">=0.9,<0.13" -protobuf = ">=5.0,<6.0" -python-dateutil = ">=2.0,<3.0" -requests = "<3.0.0" -typing-extensions = ">=4.4,<5.0" - -[package.extras] -build = ["check-wheel-contents", "twine", "wheel"] -lint = ["black (>=23.3,<24.0)", "flake8", "flake8-docstrings", "flake8-pyproject", "mypy (>=1.3,<2.0)", "pytest (>=7.3,<8.0)", "types-jinja2 (>=2.11,<3.0)", "types-jsonschema (>=4.17,<5.0)", "types-protobuf (>=5.0,<6.0)", "types-python-dateutil (>=2.8,<3.0)", "types-pyyaml (>=6.0,<7.0)", "types-requests"] -test = ["hypothesis (>=6.87,<7.0)", "pytest (>=7.3,<8.0)", "pytest-cov (>=4.1,<5.0)", "pytest-mock", "pytest-xdist (>=3.2,<4.0)"] - -[[package]] -name = "dbt-core" -version = "1.9.0" -description = "With dbt, data analysts and engineers can build analytics the way engineers build applications." -optional = false -python-versions = ">=3.9" -files = [ - {file = "dbt_core-1.9.0-py3-none-any.whl", hash = "sha256:a4273c242f44d3fdded7a3d412e43e60959f96a659e7e7487d6ec02c9f6729d5"}, - {file = "dbt_core-1.9.0.tar.gz", hash = "sha256:2b4c19f190abff4280837ea8bab4e3e1e70b9a846bed397df970b628dd6c51cc"}, -] - -[package.dependencies] -agate = ">=1.7.0,<1.10" -click = ">=8.0.2,<9.0" -daff = ">=1.3.46" -dbt-adapters = ">=1.10.1,<2.0" -dbt-common = ">=1.13.0,<2.0" -dbt-extractor = ">=0.5.0,<=0.6" -dbt-semantic-interfaces = ">=0.7.4,<0.8" -Jinja2 = ">=3.1.3,<4" -mashumaro = {version = ">=3.9,<3.15", extras = ["msgpack"]} -networkx = ">=2.3,<4.0" -packaging = ">20.9" -pathspec = ">=0.9,<0.13" -protobuf = ">=5.0,<6.0" -pytz = ">=2015.7" -pyyaml = ">=6.0" -requests = "<3.0.0" -snowplow-tracker = ">=1.0.2,<2.0" -sqlparse = ">=0.5.0,<0.6.0" -typing-extensions = ">=4.4" - -[[package]] -name = "dbt-duckdb" -version = "1.9.1" -description = "The duckdb adapter plugin for dbt (data build tool)" -optional = true -python-versions = ">=3.8" -files = [ - {file = "dbt_duckdb-1.9.1-py3-none-any.whl", hash = "sha256:4034e57a0f3ee5283597b447b8fddf5220aa11744e84deb28ee32c977826139b"}, - {file = "dbt_duckdb-1.9.1.tar.gz", hash = "sha256:3d5b5e0372033684f3515b4f152757d330598b517ba7ee9c6612819a2e13e084"}, -] - -[package.dependencies] -dbt-adapters = ">=1,<2" -dbt-common = ">=1,<2" -dbt-core = ">=1.8.0" -duckdb = ">=1.0.0" - -[package.extras] -glue = ["boto3", "mypy-boto3-glue"] -md = ["duckdb (==1.1.1)"] - -[[package]] -name = "dbt-extractor" -version = "0.5.1" -description = "A tool to analyze and extract information from Jinja used in dbt projects." -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "dbt_extractor-0.5.1-cp38-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:3b91e6106b967d908b34f83929d3f50ee2b498876a1be9c055fe060ed728c556"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:3614ce9f83ae4cd0dc95f77730034a793a1c090a52dcf698ba1c94050afe3a8b"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ea4edf33035d0a060b1e01c42fb2d99316457d44c954d6ed4eed9f1948664d87"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3b9bf50eb062b4344d9546fe42038996c6e7e7daa10724aa955d64717260e5d"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c0ce901d4ebf0664977e4e1cbf596d4afc6c1339fcc7d2cf67ce3481566a626f"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:cbe338b76e9ffaa18275456e041af56c21bb517f6fbda7a58308138703da0996"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b25fa7a276ab26aa2d70ff6e0cf4cfb1490d7831fb57ee1337c24d2b0333b84"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c5651e458be910ff567c0da3ea2eb084fd01884cc88888ac2cf1e240dcddacc2"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62e4f040fd338b652683421ce48e903812e27fd6e7af58b1b70a4e1f9f2c79e3"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91e25ad78f1f4feadd27587ebbcc46ad909cfad843118908f30336d08d8400ca"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:cdf9938b36cd098bcdd80f43dc03864da3f69f57d903a9160a32236540d4ddcd"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:475e2c05b17eb4976eff6c8f7635be42bec33f15a74ceb87a40242c94a99cebf"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:100453ba06e169cbdb118234ab3f06f6722a2e0e316089b81c88dea701212abc"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-win32.whl", hash = "sha256:6916aae085fd5f2af069fd6947933e78b742c9e3d2165e1740c2e28ae543309a"}, - {file = "dbt_extractor-0.5.1-cp38-abi3-win_amd64.whl", hash = "sha256:eecc08f3743e802a8ede60c89f7b2bce872acc86120cbc0ae7df229bb8a95083"}, - {file = "dbt_extractor-0.5.1.tar.gz", hash = "sha256:cd5d95576a8dea4190240aaf9936a37fd74b4b7913ca69a3c368fc4472bb7e13"}, -] - -[[package]] -name = "dbt-postgres" -version = "1.9.0" -description = "The set of adapter protocols and base functionality that supports integration with dbt-core" -optional = true -python-versions = ">=3.9.0" -files = [ - {file = "dbt_postgres-1.9.0-py3-none-any.whl", hash = "sha256:c85d1adb419251ac989e5f720fdbb964aa6c280da7739dc8c48d44e6f45d354a"}, - {file = "dbt_postgres-1.9.0.tar.gz", hash = "sha256:b0574e9e1e66d8a5cd627b1d464ec0278eef7342f0b5babe4f987eee9d02a143"}, -] - -[package.dependencies] -agate = ">=1.0,<2.0" -dbt-adapters = ">=1.7.0,<2.0" -dbt-common = ">=1.0.4,<2.0" -dbt-core = ">=1.8.0" -psycopg2-binary = ">=2.9,<3.0" - -[[package]] -name = "dbt-semantic-interfaces" -version = "0.7.4" -description = "The shared semantic layer definitions that dbt-core and MetricFlow use" -optional = false -python-versions = ">=3.8" -files = [ - {file = "dbt_semantic_interfaces-0.7.4-py3-none-any.whl", hash = "sha256:63965478ef27056f20a8c9a0f59b1355ebbc15133c1a6f0d368d93996a31dd5d"}, - {file = "dbt_semantic_interfaces-0.7.4.tar.gz", hash = "sha256:dcedda6702ecabb633aa4e8ab3b1eb7f9c4301dcc0026076a4a0ef64f9e59cf0"}, -] - -[package.dependencies] -click = ">=7.0,<9.0" -importlib-metadata = ">=6.0,<7" -jinja2 = ">=3.1.3,<4" -jsonschema = ">=4.0,<5" -more-itertools = ">=8.0,<11.0" -pydantic = ">=1.10,<3" -python-dateutil = ">=2.0,<3" -pyyaml = ">=6.0,<7" -typing-extensions = ">=4.4,<5" - -[[package]] -name = "dbt-sqlite" -version = "1.4.0" -description = "A SQLite adapter plugin for dbt (data build tool)" -optional = true -python-versions = "*" -files = [ - {file = "dbt-sqlite-1.4.0.tar.gz", hash = "sha256:f1fcacd057b78f0a48c1a84bff7d3ff31cd1c34bbffa02933f2f9d31ddfd0a3b"}, - {file = "dbt_sqlite-1.4.0-py3-none-any.whl", hash = "sha256:4ef86682e6fb4940088b824cfb986465b5d66ed589ebfc3071238d915c37aa4e"}, -] - -[package.dependencies] -dbt-core = ">=1.4.0" - -[[package]] -name = "deepdiff" -version = "7.0.1" -description = "Deep Difference and Search of any Python object/data. Recreate objects by adding adding deltas to each other." -optional = false -python-versions = ">=3.8" -files = [ - {file = "deepdiff-7.0.1-py3-none-any.whl", hash = "sha256:447760081918216aa4fd4ca78a4b6a848b81307b2ea94c810255334b759e1dc3"}, - {file = "deepdiff-7.0.1.tar.gz", hash = "sha256:260c16f052d4badbf60351b4f77e8390bee03a0b516246f6839bc813fb429ddf"}, -] - -[package.dependencies] -ordered-set = ">=4.1.0,<4.2.0" - -[package.extras] -cli = ["click (==8.1.7)", "pyyaml (==6.0.1)"] -optimize = ["orjson"] - -[[package]] -name = "dill" -version = "0.3.9" -description = "serialize all of Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a"}, - {file = "dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c"}, -] - -[package.extras] -graph = ["objgraph (>=1.7.2)"] -profile = ["gprof2dot (>=2022.7.29)"] - -[[package]] -name = "distlib" -version = "0.3.9" -description = "Distribution utilities" -optional = false -python-versions = "*" -files = [ - {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, - {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, -] - -[[package]] -name = "distro" -version = "1.9.0" -description = "Distro - an OS platform information API" -optional = true -python-versions = ">=3.6" -files = [ - {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, - {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, -] - -[[package]] -name = "duckdb" -version = "1.1.3" -description = "DuckDB in-process database" -optional = true -python-versions = ">=3.7.0" -files = [ - {file = "duckdb-1.1.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:1c0226dc43e2ee4cc3a5a4672fddb2d76fd2cf2694443f395c02dd1bea0b7fce"}, - {file = "duckdb-1.1.3-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:7c71169fa804c0b65e49afe423ddc2dc83e198640e3b041028da8110f7cd16f7"}, - {file = "duckdb-1.1.3-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:872d38b65b66e3219d2400c732585c5b4d11b13d7a36cd97908d7981526e9898"}, - {file = "duckdb-1.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25fb02629418c0d4d94a2bc1776edaa33f6f6ccaa00bd84eb96ecb97ae4b50e9"}, - {file = "duckdb-1.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3f5cd604e7c39527e6060f430769b72234345baaa0987f9500988b2814f5e4"}, - {file = "duckdb-1.1.3-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08935700e49c187fe0e9b2b86b5aad8a2ccd661069053e38bfaed3b9ff795efd"}, - {file = "duckdb-1.1.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9b47036945e1db32d70e414a10b1593aec641bd4c5e2056873d971cc21e978b"}, - {file = "duckdb-1.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:35c420f58abc79a68a286a20fd6265636175fadeca1ce964fc8ef159f3acc289"}, - {file = "duckdb-1.1.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:4f0e2e5a6f5a53b79aee20856c027046fba1d73ada6178ed8467f53c3877d5e0"}, - {file = "duckdb-1.1.3-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:911d58c22645bfca4a5a049ff53a0afd1537bc18fedb13bc440b2e5af3c46148"}, - {file = "duckdb-1.1.3-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:c443d3d502335e69fc1e35295fcfd1108f72cb984af54c536adfd7875e79cee5"}, - {file = "duckdb-1.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a55169d2d2e2e88077d91d4875104b58de45eff6a17a59c7dc41562c73df4be"}, - {file = "duckdb-1.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d0767ada9f06faa5afcf63eb7ba1befaccfbcfdac5ff86f0168c673dd1f47aa"}, - {file = "duckdb-1.1.3-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51c6d79e05b4a0933672b1cacd6338f882158f45ef9903aef350c4427d9fc898"}, - {file = "duckdb-1.1.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:183ac743f21c6a4d6adfd02b69013d5fd78e5e2cd2b4db023bc8a95457d4bc5d"}, - {file = "duckdb-1.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:a30dd599b8090ea6eafdfb5a9f1b872d78bac318b6914ada2d35c7974d643640"}, - {file = "duckdb-1.1.3-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:a433ae9e72c5f397c44abdaa3c781d94f94f4065bcbf99ecd39433058c64cb38"}, - {file = "duckdb-1.1.3-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:d08308e0a46c748d9c30f1d67ee1143e9c5ea3fbcccc27a47e115b19e7e78aa9"}, - {file = "duckdb-1.1.3-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:5d57776539211e79b11e94f2f6d63de77885f23f14982e0fac066f2885fcf3ff"}, - {file = "duckdb-1.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e59087dbbb63705f2483544e01cccf07d5b35afa58be8931b224f3221361d537"}, - {file = "duckdb-1.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ebf5f60ddbd65c13e77cddb85fe4af671d31b851f125a4d002a313696af43f1"}, - {file = "duckdb-1.1.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e4ef7ba97a65bd39d66f2a7080e6fb60e7c3e41d4c1e19245f90f53b98e3ac32"}, - {file = "duckdb-1.1.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f58db1b65593ff796c8ea6e63e2e144c944dd3d51c8d8e40dffa7f41693d35d3"}, - {file = "duckdb-1.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:e86006958e84c5c02f08f9b96f4bc26990514eab329b1b4f71049b3727ce5989"}, - {file = "duckdb-1.1.3-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:0897f83c09356206ce462f62157ce064961a5348e31ccb2a557a7531d814e70e"}, - {file = "duckdb-1.1.3-cp313-cp313-macosx_12_0_universal2.whl", hash = "sha256:cddc6c1a3b91dcc5f32493231b3ba98f51e6d3a44fe02839556db2b928087378"}, - {file = "duckdb-1.1.3-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:1d9ab6143e73bcf17d62566e368c23f28aa544feddfd2d8eb50ef21034286f24"}, - {file = "duckdb-1.1.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f073d15d11a328f2e6d5964a704517e818e930800b7f3fa83adea47f23720d3"}, - {file = "duckdb-1.1.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5724fd8a49e24d730be34846b814b98ba7c304ca904fbdc98b47fa95c0b0cee"}, - {file = "duckdb-1.1.3-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51e7dbd968b393343b226ab3f3a7b5a68dee6d3fe59be9d802383bf916775cb8"}, - {file = "duckdb-1.1.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:00cca22df96aa3473fe4584f84888e2cf1c516e8c2dd837210daec44eadba586"}, - {file = "duckdb-1.1.3-cp313-cp313-win_amd64.whl", hash = "sha256:77f26884c7b807c7edd07f95cf0b00e6d47f0de4a534ac1706a58f8bc70d0d31"}, - {file = "duckdb-1.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4748635875fc3c19a7320a6ae7410f9295557450c0ebab6d6712de12640929a"}, - {file = "duckdb-1.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b74e121ab65dbec5290f33ca92301e3a4e81797966c8d9feef6efdf05fc6dafd"}, - {file = "duckdb-1.1.3-cp37-cp37m-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c619e4849837c8c83666f2cd5c6c031300cd2601e9564b47aa5de458ff6e69d"}, - {file = "duckdb-1.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0ba6baa0af33ded836b388b09433a69b8bec00263247f6bf0a05c65c897108d3"}, - {file = "duckdb-1.1.3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:ecb1dc9062c1cc4d2d88a5e5cd8cc72af7818ab5a3c0f796ef0ffd60cfd3efb4"}, - {file = "duckdb-1.1.3-cp38-cp38-macosx_12_0_universal2.whl", hash = "sha256:5ace6e4b1873afdd38bd6cc8fcf90310fb2d454f29c39a61d0c0cf1a24ad6c8d"}, - {file = "duckdb-1.1.3-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:a1fa0c502f257fa9caca60b8b1478ec0f3295f34bb2efdc10776fc731b8a6c5f"}, - {file = "duckdb-1.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6411e21a2128d478efbd023f2bdff12464d146f92bc3e9c49247240448ace5a6"}, - {file = "duckdb-1.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5336939d83837af52731e02b6a78a446794078590aa71fd400eb17f083dda3e"}, - {file = "duckdb-1.1.3-cp38-cp38-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f549af9f7416573ee48db1cf8c9d27aeed245cb015f4b4f975289418c6cf7320"}, - {file = "duckdb-1.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:2141c6b28162199999075d6031b5d63efeb97c1e68fb3d797279d31c65676269"}, - {file = "duckdb-1.1.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:09c68522c30fc38fc972b8a75e9201616b96ae6da3444585f14cf0d116008c95"}, - {file = "duckdb-1.1.3-cp39-cp39-macosx_12_0_universal2.whl", hash = "sha256:8ee97ec337794c162c0638dda3b4a30a483d0587deda22d45e1909036ff0b739"}, - {file = "duckdb-1.1.3-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:a1f83c7217c188b7ab42e6a0963f42070d9aed114f6200e3c923c8899c090f16"}, - {file = "duckdb-1.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1aa3abec8e8995a03ff1a904b0e66282d19919f562dd0a1de02f23169eeec461"}, - {file = "duckdb-1.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80158f4c7c7ada46245837d5b6869a336bbaa28436fbb0537663fa324a2750cd"}, - {file = "duckdb-1.1.3-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:647f17bd126170d96a38a9a6f25fca47ebb0261e5e44881e3782989033c94686"}, - {file = "duckdb-1.1.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:252d9b17d354beb9057098d4e5d5698e091a4f4a0d38157daeea5fc0ec161670"}, - {file = "duckdb-1.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:eeacb598120040e9591f5a4edecad7080853aa8ac27e62d280f151f8c862afa3"}, - {file = "duckdb-1.1.3.tar.gz", hash = "sha256:68c3a46ab08836fe041d15dcbf838f74a990d551db47cb24ab1c4576fc19351c"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.2" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "feedparser" -version = "6.0.11" -description = "Universal feed parser, handles RSS 0.9x, RSS 1.0, RSS 2.0, CDF, Atom 0.3, and Atom 1.0 feeds" -optional = true -python-versions = ">=3.6" -files = [ - {file = "feedparser-6.0.11-py3-none-any.whl", hash = "sha256:0be7ee7b395572b19ebeb1d6aafb0028dee11169f1c934e0ed67d54992f4ad45"}, - {file = "feedparser-6.0.11.tar.gz", hash = "sha256:c9d0407b64c6f2a065d0ebb292c2b35c01050cc0dc33757461aaabdc4c4184d5"}, -] - -[package.dependencies] -sgmllib3k = "*" - -[[package]] -name = "filelock" -version = "3.16.1" -description = "A platform independent file lock." -optional = false -python-versions = ">=3.8" -files = [ - {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, - {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, -] - -[package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] -typing = ["typing-extensions (>=4.12.2)"] - -[[package]] -name = "fonttools" -version = "4.55.2" -description = "Tools to manipulate font files" -optional = true -python-versions = ">=3.8" -files = [ - {file = "fonttools-4.55.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bef0f8603834643b1a6419d57902f18e7d950ec1a998fb70410635c598dc1a1e"}, - {file = "fonttools-4.55.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:944228b86d472612d3b48bcc83b31c25c2271e63fdc74539adfcfa7a96d487fb"}, - {file = "fonttools-4.55.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f0e55f5da594b85f269cfbecd2f6bd3e07d0abba68870bc3f34854de4fa4678"}, - {file = "fonttools-4.55.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b1a6e576db0c83c1b91925bf1363478c4bb968dbe8433147332fb5782ce6190"}, - {file = "fonttools-4.55.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:616368b15716781bc84df5c2191dc0540137aaef56c2771eb4b89b90933f347a"}, - {file = "fonttools-4.55.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7bbae4f3915225c2c37670da68e2bf18a21206060ad31dfb95fec91ef641caa7"}, - {file = "fonttools-4.55.2-cp310-cp310-win32.whl", hash = "sha256:8b02b10648d69d67a7eb055f4d3eedf4a85deb22fb7a19fbd9acbae7c7538199"}, - {file = "fonttools-4.55.2-cp310-cp310-win_amd64.whl", hash = "sha256:bbea0ab841113ac8e8edde067e099b7288ffc6ac2dded538b131c2c0595d5f77"}, - {file = "fonttools-4.55.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d34525e8141286fa976e14806639d32294bfb38d28bbdb5f6be9f46a1cd695a6"}, - {file = "fonttools-4.55.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0ecd1c2b1c2ec46bb73685bc5473c72e16ed0930ef79bc2919ccadc43a99fb16"}, - {file = "fonttools-4.55.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9008438ad59e5a8e403a62fbefef2b2ff377eb3857d90a3f2a5f4d674ff441b2"}, - {file = "fonttools-4.55.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:131591ac8d7a47043aaf29581aba755ae151d46e49d2bf49608601efd71e8b4d"}, - {file = "fonttools-4.55.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4c83381c3e3e3d9caa25527c4300543578341f21aae89e4fbbb4debdda8d82a2"}, - {file = "fonttools-4.55.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:42aca564b575252fd9954ed0d91d97a24de24289a16ce8ff74ed0bdf5ecebf11"}, - {file = "fonttools-4.55.2-cp311-cp311-win32.whl", hash = "sha256:c6457f650ebe15baa17fc06e256227f0a47f46f80f27ec5a0b00160de8dc2c13"}, - {file = "fonttools-4.55.2-cp311-cp311-win_amd64.whl", hash = "sha256:5cfa67414d7414442a5635ff634384101c54f53bb7b0e04aa6a61b013fcce194"}, - {file = "fonttools-4.55.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:18f082445b8fe5e91c53e6184f4c1c73f3f965c8bcc614c6cd6effd573ce6c1a"}, - {file = "fonttools-4.55.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c0f91adbbd706e8acd1db73e3e510118e62d0ffb651864567dccc5b2339f90"}, - {file = "fonttools-4.55.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d8ccce035320d63dba0c35f52499322f5531dbe85bba1514c7cea26297e4c54"}, - {file = "fonttools-4.55.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96e126df9615df214ec7f04bebcf60076297fbc10b75c777ce58b702d7708ffb"}, - {file = "fonttools-4.55.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:508ebb42956a7a931c4092dfa2d9b4ffd4f94cea09b8211199090d2bd082506b"}, - {file = "fonttools-4.55.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c1b9de46ef7b683d50400abf9f1578eaceee271ff51c36bf4b7366f2be29f498"}, - {file = "fonttools-4.55.2-cp312-cp312-win32.whl", hash = "sha256:2df61d9fc15199cc86dad29f64dd686874a3a52dda0c2d8597d21f509f95c332"}, - {file = "fonttools-4.55.2-cp312-cp312-win_amd64.whl", hash = "sha256:d337ec087da8216a828574aa0525d869df0a2ac217a2efc1890974ddd1fbc5b9"}, - {file = "fonttools-4.55.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:10aff204e2edee1d312fa595c06f201adf8d528a3b659cfb34cd47eceaaa6a26"}, - {file = "fonttools-4.55.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:09fe922a3eff181fd07dd724cdb441fb6b9fc355fd1c0f1aa79aca60faf1fbdd"}, - {file = "fonttools-4.55.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:487e1e8b524143a799bda0169c48b44a23a6027c1bb1957d5a172a7d3a1dd704"}, - {file = "fonttools-4.55.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b1726872e09268bbedb14dc02e58b7ea31ecdd1204c6073eda4911746b44797"}, - {file = "fonttools-4.55.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6fc88cfb58b0cd7b48718c3e61dd0d0a3ee8e2c86b973342967ce09fbf1db6d4"}, - {file = "fonttools-4.55.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e857fe1859901ad8c5cab32e0eebc920adb09f413d2d73b74b677cf47b28590c"}, - {file = "fonttools-4.55.2-cp313-cp313-win32.whl", hash = "sha256:81ccd2b3a420b8050c7d9db3be0555d71662973b3ef2a1d921a2880b58957db8"}, - {file = "fonttools-4.55.2-cp313-cp313-win_amd64.whl", hash = "sha256:d559eb1744c7dcfa90ae60cb1a4b3595e898e48f4198738c321468c01180cd83"}, - {file = "fonttools-4.55.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6b5917ef79cac8300b88fd6113003fd01bbbbea2ea060a27b95d8f77cb4c65c2"}, - {file = "fonttools-4.55.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:663eba5615d6abaaf616432354eb7ce951d518e43404371bcc2b0694ef21e8d6"}, - {file = "fonttools-4.55.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:803d5cef5fc47f44f5084d154aa3d6f069bb1b60e32390c225f897fa19b0f939"}, - {file = "fonttools-4.55.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8bc5f100de0173cc39102c0399bd6c3bd544bbdf224957933f10ee442d43cddd"}, - {file = "fonttools-4.55.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:3d9bbc1e380fdaf04ad9eabd8e3e6a4301eaf3487940893e9fd98537ea2e283b"}, - {file = "fonttools-4.55.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:42a9afedff07b6f75aa0f39b5e49922ac764580ef3efce035ca30284b2ee65c8"}, - {file = "fonttools-4.55.2-cp38-cp38-win32.whl", hash = "sha256:f1c76f423f1a241df08f87614364dff6e0b7ce23c962c1b74bd995ec7c0dad13"}, - {file = "fonttools-4.55.2-cp38-cp38-win_amd64.whl", hash = "sha256:25062b6ca03464dd5179fc2040fb19e03391b7cc49b9cc4f879312e638605c5c"}, - {file = "fonttools-4.55.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d1100d8e665fe386a79cab59446992de881ea74d0d6c191bb988642692aa2421"}, - {file = "fonttools-4.55.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dbdc251c5e472e5ae6bc816f9b82718b8e93ff7992e7331d6cf3562b96aa268e"}, - {file = "fonttools-4.55.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0bf24d2b02dbc9376d795a63062632ff73e3e9e60c0229373f500aed7e86dd7"}, - {file = "fonttools-4.55.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4ff250ed4ff05015dfd9cf2adf7570c7a383ca80f4d9732ac484a5ed0d8453c"}, - {file = "fonttools-4.55.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:44cf2a98aa661dbdeb8c03f5e405b074e2935196780bb729888639f5276067d9"}, - {file = "fonttools-4.55.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22ef222740eb89d189bf0612eb98fbae592c61d7efeac51bfbc2a1592d469557"}, - {file = "fonttools-4.55.2-cp39-cp39-win32.whl", hash = "sha256:93f439ca27e55f585e7aaa04a74990acd983b5f2245e41d6b79f0a8b44e684d8"}, - {file = "fonttools-4.55.2-cp39-cp39-win_amd64.whl", hash = "sha256:627cf10d6f5af5bec6324c18a2670f134c29e1b7dce3fb62e8ef88baa6cba7a9"}, - {file = "fonttools-4.55.2-py3-none-any.whl", hash = "sha256:8e2d89fbe9b08d96e22c7a81ec04a4e8d8439c31223e2dc6f2f9fc8ff14bdf9f"}, - {file = "fonttools-4.55.2.tar.gz", hash = "sha256:45947e7b3f9673f91df125d375eb57b9a23f2a603f438a1aebf3171bffa7a205"}, -] - -[package.extras] -all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "pycairo", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=15.1.0)", "xattr", "zopfli (>=0.1.4)"] -graphite = ["lz4 (>=1.7.4.2)"] -interpolatable = ["munkres", "pycairo", "scipy"] -lxml = ["lxml (>=4.0)"] -pathops = ["skia-pathops (>=0.5.0)"] -plot = ["matplotlib"] -repacker = ["uharfbuzz (>=0.23.0)"] -symfont = ["sympy"] -type1 = ["xattr"] -ufo = ["fs (>=2.2.0,<3)"] -unicode = ["unicodedata2 (>=15.1.0)"] -woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] - -[[package]] -name = "gitdb" -version = "4.0.11" -description = "Git Object Database" -optional = false -python-versions = ">=3.7" -files = [ - {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"}, - {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"}, -] - -[package.dependencies] -smmap = ">=3.0.1,<6" - -[[package]] -name = "gitpython" -version = "3.1.43" -description = "GitPython is a Python library used to interact with Git repositories" -optional = false -python-versions = ">=3.7" -files = [ - {file = "GitPython-3.1.43-py3-none-any.whl", hash = "sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff"}, - {file = "GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c"}, -] - -[package.dependencies] -gitdb = ">=4.0.1,<5" - -[package.extras] -doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"] -test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"] - -[[package]] -name = "h11" -version = "0.14.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = true -python-versions = ">=3.7" -files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] - -[[package]] -name = "htmlmin" -version = "0.1.12" -description = "An HTML Minifier" -optional = true -python-versions = "*" -files = [ - {file = "htmlmin-0.1.12.tar.gz", hash = "sha256:50c1ef4630374a5d723900096a961cff426dff46b48f34d194a81bbe14eca178"}, -] - -[[package]] -name = "httpcore" -version = "1.0.7" -description = "A minimal low-level HTTP client." -optional = true -python-versions = ">=3.8" -files = [ - {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, - {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, -] - -[package.dependencies] -certifi = "*" -h11 = ">=0.13,<0.15" - -[package.extras] -asyncio = ["anyio (>=4.0,<5.0)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<1.0)"] - -[[package]] -name = "httpx" -version = "0.28.1" -description = "The next generation HTTP client." -optional = true -python-versions = ">=3.8" -files = [ - {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, - {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, -] - -[package.dependencies] -anyio = "*" -certifi = "*" -httpcore = "==1.*" -idna = "*" - -[package.extras] -brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "identify" -version = "2.6.3" -description = "File identification library for Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "identify-2.6.3-py2.py3-none-any.whl", hash = "sha256:9edba65473324c2ea9684b1f944fe3191db3345e50b6d04571d10ed164f8d7bd"}, - {file = "identify-2.6.3.tar.gz", hash = "sha256:62f5dae9b5fef52c84cc188514e9ea4f3f636b1d8799ab5ebc475471f9e47a02"}, -] - -[package.extras] -license = ["ukkonen"] - -[[package]] -name = "idna" -version = "3.10" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.6" -files = [ - {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, - {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, -] - -[package.extras] -all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] - -[[package]] -name = "imagehash" -version = "4.3.1" -description = "Image Hashing library" -optional = true -python-versions = "*" -files = [ - {file = "ImageHash-4.3.1-py2.py3-none-any.whl", hash = "sha256:5ad9a5cde14fe255745a8245677293ac0d67f09c330986a351f34b614ba62fb5"}, - {file = "ImageHash-4.3.1.tar.gz", hash = "sha256:7038d1b7f9e0585beb3dd8c0a956f02b95a346c0b5f24a9e8cc03ebadaf0aa70"}, -] - -[package.dependencies] -numpy = "*" -pillow = "*" -PyWavelets = "*" -scipy = "*" - -[[package]] -name = "importlib-metadata" -version = "6.11.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b"}, - {file = "importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443"}, -] - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] - -[[package]] -name = "importlib-resources" -version = "6.4.5" -description = "Read resources from Python packages" -optional = true -python-versions = ">=3.8" -files = [ - {file = "importlib_resources-6.4.5-py3-none-any.whl", hash = "sha256:ac29d5f956f01d5e4bb63102a5a19957f1b9175e45649977264a1416783bb717"}, - {file = "importlib_resources-6.4.5.tar.gz", hash = "sha256:980862a1d16c9e147a59603677fa2aa5fd82b87f223b6cb870695bcfce830065"}, -] - -[package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["jaraco.test (>=5.4)", "pytest (>=6,!=8.1.*)", "zipp (>=3.17)"] -type = ["pytest-mypy"] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "isodate" -version = "0.6.1" -description = "An ISO 8601 date/time/duration parser and formatter" -optional = false -python-versions = "*" -files = [ - {file = "isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96"}, - {file = "isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9"}, -] - -[package.dependencies] -six = "*" - -[[package]] -name = "isort" -version = "5.13.2" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, - {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, -] - -[package.extras] -colors = ["colorama (>=0.4.6)"] - -[[package]] -name = "jinja2" -version = "3.1.4" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "jiter" -version = "0.8.2" -description = "Fast iterable JSON parser." -optional = true -python-versions = ">=3.8" -files = [ - {file = "jiter-0.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ca8577f6a413abe29b079bc30f907894d7eb07a865c4df69475e868d73e71c7b"}, - {file = "jiter-0.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b25bd626bde7fb51534190c7e3cb97cee89ee76b76d7585580e22f34f5e3f393"}, - {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c826a221851a8dc028eb6d7d6429ba03184fa3c7e83ae01cd6d3bd1d4bd17d"}, - {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d35c864c2dff13dfd79fb070fc4fc6235d7b9b359efe340e1261deb21b9fcb66"}, - {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f557c55bc2b7676e74d39d19bcb8775ca295c7a028246175d6a8b431e70835e5"}, - {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:580ccf358539153db147e40751a0b41688a5ceb275e6f3e93d91c9467f42b2e3"}, - {file = "jiter-0.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af102d3372e917cffce49b521e4c32c497515119dc7bd8a75665e90a718bbf08"}, - {file = "jiter-0.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cadcc978f82397d515bb2683fc0d50103acff2a180552654bb92d6045dec2c49"}, - {file = "jiter-0.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ba5bdf56969cad2019d4e8ffd3f879b5fdc792624129741d3d83fc832fef8c7d"}, - {file = "jiter-0.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3b94a33a241bee9e34b8481cdcaa3d5c2116f575e0226e421bed3f7a6ea71cff"}, - {file = "jiter-0.8.2-cp310-cp310-win32.whl", hash = "sha256:6e5337bf454abddd91bd048ce0dca5134056fc99ca0205258766db35d0a2ea43"}, - {file = "jiter-0.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:4a9220497ca0cb1fe94e3f334f65b9b5102a0b8147646118f020d8ce1de70105"}, - {file = "jiter-0.8.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2dd61c5afc88a4fda7d8b2cf03ae5947c6ac7516d32b7a15bf4b49569a5c076b"}, - {file = "jiter-0.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a6c710d657c8d1d2adbbb5c0b0c6bfcec28fd35bd6b5f016395f9ac43e878a15"}, - {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9584de0cd306072635fe4b89742bf26feae858a0683b399ad0c2509011b9dc0"}, - {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5a90a923338531b7970abb063cfc087eebae6ef8ec8139762007188f6bc69a9f"}, - {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21974d246ed0181558087cd9f76e84e8321091ebfb3a93d4c341479a736f099"}, - {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32475a42b2ea7b344069dc1e81445cfc00b9d0e3ca837f0523072432332e9f74"}, - {file = "jiter-0.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b9931fd36ee513c26b5bf08c940b0ac875de175341cbdd4fa3be109f0492586"}, - {file = "jiter-0.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ce0820f4a3a59ddced7fce696d86a096d5cc48d32a4183483a17671a61edfddc"}, - {file = "jiter-0.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8ffc86ae5e3e6a93765d49d1ab47b6075a9c978a2b3b80f0f32628f39caa0c88"}, - {file = "jiter-0.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5127dc1abd809431172bc3fbe8168d6b90556a30bb10acd5ded41c3cfd6f43b6"}, - {file = "jiter-0.8.2-cp311-cp311-win32.whl", hash = "sha256:66227a2c7b575720c1871c8800d3a0122bb8ee94edb43a5685aa9aceb2782d44"}, - {file = "jiter-0.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:cde031d8413842a1e7501e9129b8e676e62a657f8ec8166e18a70d94d4682855"}, - {file = "jiter-0.8.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e6ec2be506e7d6f9527dae9ff4b7f54e68ea44a0ef6b098256ddf895218a2f8f"}, - {file = "jiter-0.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76e324da7b5da060287c54f2fabd3db5f76468006c811831f051942bf68c9d44"}, - {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:180a8aea058f7535d1c84183c0362c710f4750bef66630c05f40c93c2b152a0f"}, - {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025337859077b41548bdcbabe38698bcd93cfe10b06ff66617a48ff92c9aec60"}, - {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecff0dc14f409599bbcafa7e470c00b80f17abc14d1405d38ab02e4b42e55b57"}, - {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffd9fee7d0775ebaba131f7ca2e2d83839a62ad65e8e02fe2bd8fc975cedeb9e"}, - {file = "jiter-0.8.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14601dcac4889e0a1c75ccf6a0e4baf70dbc75041e51bcf8d0e9274519df6887"}, - {file = "jiter-0.8.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92249669925bc1c54fcd2ec73f70f2c1d6a817928480ee1c65af5f6b81cdf12d"}, - {file = "jiter-0.8.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e725edd0929fa79f8349ab4ec7f81c714df51dc4e991539a578e5018fa4a7152"}, - {file = "jiter-0.8.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bf55846c7b7a680eebaf9c3c48d630e1bf51bdf76c68a5f654b8524335b0ad29"}, - {file = "jiter-0.8.2-cp312-cp312-win32.whl", hash = "sha256:7efe4853ecd3d6110301665a5178b9856be7e2a9485f49d91aa4d737ad2ae49e"}, - {file = "jiter-0.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:83c0efd80b29695058d0fd2fa8a556490dbce9804eac3e281f373bbc99045f6c"}, - {file = "jiter-0.8.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ca1f08b8e43dc3bd0594c992fb1fd2f7ce87f7bf0d44358198d6da8034afdf84"}, - {file = "jiter-0.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5672a86d55416ccd214c778efccf3266b84f87b89063b582167d803246354be4"}, - {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58dc9bc9767a1101f4e5e22db1b652161a225874d66f0e5cb8e2c7d1c438b587"}, - {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:37b2998606d6dadbb5ccda959a33d6a5e853252d921fec1792fc902351bb4e2c"}, - {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ab9a87f3784eb0e098f84a32670cfe4a79cb6512fd8f42ae3d0709f06405d18"}, - {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:79aec8172b9e3c6d05fd4b219d5de1ac616bd8da934107325a6c0d0e866a21b6"}, - {file = "jiter-0.8.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:711e408732d4e9a0208008e5892c2966b485c783cd2d9a681f3eb147cf36c7ef"}, - {file = "jiter-0.8.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:653cf462db4e8c41995e33d865965e79641ef45369d8a11f54cd30888b7e6ff1"}, - {file = "jiter-0.8.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:9c63eaef32b7bebac8ebebf4dabebdbc6769a09c127294db6babee38e9f405b9"}, - {file = "jiter-0.8.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:eb21aaa9a200d0a80dacc7a81038d2e476ffe473ffdd9c91eb745d623561de05"}, - {file = "jiter-0.8.2-cp313-cp313-win32.whl", hash = "sha256:789361ed945d8d42850f919342a8665d2dc79e7e44ca1c97cc786966a21f627a"}, - {file = "jiter-0.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:ab7f43235d71e03b941c1630f4b6e3055d46b6cb8728a17663eaac9d8e83a865"}, - {file = "jiter-0.8.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b426f72cd77da3fec300ed3bc990895e2dd6b49e3bfe6c438592a3ba660e41ca"}, - {file = "jiter-0.8.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2dd880785088ff2ad21ffee205e58a8c1ddabc63612444ae41e5e4b321b39c0"}, - {file = "jiter-0.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:3ac9f578c46f22405ff7f8b1f5848fb753cc4b8377fbec8470a7dc3997ca7566"}, - {file = "jiter-0.8.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:9e1fa156ee9454642adb7e7234a383884452532bc9d53d5af2d18d98ada1d79c"}, - {file = "jiter-0.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cf5dfa9956d96ff2efb0f8e9c7d055904012c952539a774305aaaf3abdf3d6c"}, - {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e52bf98c7e727dd44f7c4acb980cb988448faeafed8433c867888268899b298b"}, - {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a2ecaa3c23e7a7cf86d00eda3390c232f4d533cd9ddea4b04f5d0644faf642c5"}, - {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:08d4c92bf480e19fc3f2717c9ce2aa31dceaa9163839a311424b6862252c943e"}, - {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99d9a1eded738299ba8e106c6779ce5c3893cffa0e32e4485d680588adae6db8"}, - {file = "jiter-0.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d20be8b7f606df096e08b0b1b4a3c6f0515e8dac296881fe7461dfa0fb5ec817"}, - {file = "jiter-0.8.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d33f94615fcaf872f7fd8cd98ac3b429e435c77619777e8a449d9d27e01134d1"}, - {file = "jiter-0.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:317b25e98a35ffec5c67efe56a4e9970852632c810d35b34ecdd70cc0e47b3b6"}, - {file = "jiter-0.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fc9043259ee430ecd71d178fccabd8c332a3bf1e81e50cae43cc2b28d19e4cb7"}, - {file = "jiter-0.8.2-cp38-cp38-win32.whl", hash = "sha256:fc5adda618205bd4678b146612ce44c3cbfdee9697951f2c0ffdef1f26d72b63"}, - {file = "jiter-0.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:cd646c827b4f85ef4a78e4e58f4f5854fae0caf3db91b59f0d73731448a970c6"}, - {file = "jiter-0.8.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e41e75344acef3fc59ba4765df29f107f309ca9e8eace5baacabd9217e52a5ee"}, - {file = "jiter-0.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f22b16b35d5c1df9dfd58843ab2cd25e6bf15191f5a236bed177afade507bfc"}, - {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7200b8f7619d36aa51c803fd52020a2dfbea36ffec1b5e22cab11fd34d95a6d"}, - {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:70bf4c43652cc294040dbb62256c83c8718370c8b93dd93d934b9a7bf6c4f53c"}, - {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9d471356dc16f84ed48768b8ee79f29514295c7295cb41e1133ec0b2b8d637d"}, - {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:859e8eb3507894093d01929e12e267f83b1d5f6221099d3ec976f0c995cb6bd9"}, - {file = "jiter-0.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaa58399c01db555346647a907b4ef6d4f584b123943be6ed5588c3f2359c9f4"}, - {file = "jiter-0.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8f2d5ed877f089862f4c7aacf3a542627c1496f972a34d0474ce85ee7d939c27"}, - {file = "jiter-0.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:03c9df035d4f8d647f8c210ddc2ae0728387275340668fb30d2421e17d9a0841"}, - {file = "jiter-0.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8bd2a824d08d8977bb2794ea2682f898ad3d8837932e3a74937e93d62ecbb637"}, - {file = "jiter-0.8.2-cp39-cp39-win32.whl", hash = "sha256:ca29b6371ebc40e496995c94b988a101b9fbbed48a51190a4461fcb0a68b4a36"}, - {file = "jiter-0.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:1c0dfbd1be3cbefc7510102370d86e35d1d53e5a93d48519688b1bf0f761160a"}, - {file = "jiter-0.8.2.tar.gz", hash = "sha256:cd73d3e740666d0e639f678adb176fad25c1bcbdae88d8d7b857e1783bb4212d"}, -] - -[[package]] -name = "joblib" -version = "1.4.2" -description = "Lightweight pipelining with Python functions" -optional = true -python-versions = ">=3.8" -files = [ - {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, - {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, -] - -[[package]] -name = "jsonschema" -version = "4.23.0" -description = "An implementation of JSON Schema validation for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566"}, - {file = "jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -jsonschema-specifications = ">=2023.03.6" -referencing = ">=0.28.4" -rpds-py = ">=0.7.1" - -[package.extras] -format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] -format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=24.6.0)"] - -[[package]] -name = "jsonschema-specifications" -version = "2024.10.1" -description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" -optional = false -python-versions = ">=3.9" -files = [ - {file = "jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf"}, - {file = "jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272"}, -] - -[package.dependencies] -referencing = ">=0.31.0" - -[[package]] -name = "kiwisolver" -version = "1.4.7" -description = "A fast implementation of the Cassowary constraint solver" -optional = true -python-versions = ">=3.8" -files = [ - {file = "kiwisolver-1.4.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8a9c83f75223d5e48b0bc9cb1bf2776cf01563e00ade8775ffe13b0b6e1af3a6"}, - {file = "kiwisolver-1.4.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:58370b1ffbd35407444d57057b57da5d6549d2d854fa30249771775c63b5fe17"}, - {file = "kiwisolver-1.4.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aa0abdf853e09aff551db11fce173e2177d00786c688203f52c87ad7fcd91ef9"}, - {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8d53103597a252fb3ab8b5845af04c7a26d5e7ea8122303dd7a021176a87e8b9"}, - {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:88f17c5ffa8e9462fb79f62746428dd57b46eb931698e42e990ad63103f35e6c"}, - {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a9ca9c710d598fd75ee5de59d5bda2684d9db36a9f50b6125eaea3969c2599"}, - {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f4d742cb7af1c28303a51b7a27aaee540e71bb8e24f68c736f6f2ffc82f2bf05"}, - {file = "kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e28c7fea2196bf4c2f8d46a0415c77a1c480cc0724722f23d7410ffe9842c407"}, - {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e968b84db54f9d42046cf154e02911e39c0435c9801681e3fc9ce8a3c4130278"}, - {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0c18ec74c0472de033e1bebb2911c3c310eef5649133dd0bedf2a169a1b269e5"}, - {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8f0ea6da6d393d8b2e187e6a5e3fb81f5862010a40c3945e2c6d12ae45cfb2ad"}, - {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:f106407dda69ae456dd1227966bf445b157ccc80ba0dff3802bb63f30b74e895"}, - {file = "kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:84ec80df401cfee1457063732d90022f93951944b5b58975d34ab56bb150dfb3"}, - {file = "kiwisolver-1.4.7-cp310-cp310-win32.whl", hash = "sha256:71bb308552200fb2c195e35ef05de12f0c878c07fc91c270eb3d6e41698c3bcc"}, - {file = "kiwisolver-1.4.7-cp310-cp310-win_amd64.whl", hash = "sha256:44756f9fd339de0fb6ee4f8c1696cfd19b2422e0d70b4cefc1cc7f1f64045a8c"}, - {file = "kiwisolver-1.4.7-cp310-cp310-win_arm64.whl", hash = "sha256:78a42513018c41c2ffd262eb676442315cbfe3c44eed82385c2ed043bc63210a"}, - {file = "kiwisolver-1.4.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d2b0e12a42fb4e72d509fc994713d099cbb15ebf1103545e8a45f14da2dfca54"}, - {file = "kiwisolver-1.4.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2a8781ac3edc42ea4b90bc23e7d37b665d89423818e26eb6df90698aa2287c95"}, - {file = "kiwisolver-1.4.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:46707a10836894b559e04b0fd143e343945c97fd170d69a2d26d640b4e297935"}, - {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef97b8df011141c9b0f6caf23b29379f87dd13183c978a30a3c546d2c47314cb"}, - {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab58c12a2cd0fc769089e6d38466c46d7f76aced0a1f54c77652446733d2d02"}, - {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:803b8e1459341c1bb56d1c5c010406d5edec8a0713a0945851290a7930679b51"}, - {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9a9e8a507420fe35992ee9ecb302dab68550dedc0da9e2880dd88071c5fb052"}, - {file = "kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18077b53dc3bb490e330669a99920c5e6a496889ae8c63b58fbc57c3d7f33a18"}, - {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6af936f79086a89b3680a280c47ea90b4df7047b5bdf3aa5c524bbedddb9e545"}, - {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3abc5b19d24af4b77d1598a585b8a719beb8569a71568b66f4ebe1fb0449460b"}, - {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:933d4de052939d90afbe6e9d5273ae05fb836cc86c15b686edd4b3560cc0ee36"}, - {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:65e720d2ab2b53f1f72fb5da5fb477455905ce2c88aaa671ff0a447c2c80e8e3"}, - {file = "kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3bf1ed55088f214ba6427484c59553123fdd9b218a42bbc8c6496d6754b1e523"}, - {file = "kiwisolver-1.4.7-cp311-cp311-win32.whl", hash = "sha256:4c00336b9dd5ad96d0a558fd18a8b6f711b7449acce4c157e7343ba92dd0cf3d"}, - {file = "kiwisolver-1.4.7-cp311-cp311-win_amd64.whl", hash = "sha256:929e294c1ac1e9f615c62a4e4313ca1823ba37326c164ec720a803287c4c499b"}, - {file = "kiwisolver-1.4.7-cp311-cp311-win_arm64.whl", hash = "sha256:e33e8fbd440c917106b237ef1a2f1449dfbb9b6f6e1ce17c94cd6a1e0d438376"}, - {file = "kiwisolver-1.4.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:5360cc32706dab3931f738d3079652d20982511f7c0ac5711483e6eab08efff2"}, - {file = "kiwisolver-1.4.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942216596dc64ddb25adb215c3c783215b23626f8d84e8eff8d6d45c3f29f75a"}, - {file = "kiwisolver-1.4.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:48b571ecd8bae15702e4f22d3ff6a0f13e54d3d00cd25216d5e7f658242065ee"}, - {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad42ba922c67c5f219097b28fae965e10045ddf145d2928bfac2eb2e17673640"}, - {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:612a10bdae23404a72941a0fc8fa2660c6ea1217c4ce0dbcab8a8f6543ea9e7f"}, - {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e838bba3a3bac0fe06d849d29772eb1afb9745a59710762e4ba3f4cb8424483"}, - {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:22f499f6157236c19f4bbbd472fa55b063db77a16cd74d49afe28992dff8c258"}, - {file = "kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693902d433cf585133699972b6d7c42a8b9f8f826ebcaf0132ff55200afc599e"}, - {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4e77f2126c3e0b0d055f44513ed349038ac180371ed9b52fe96a32aa071a5107"}, - {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:657a05857bda581c3656bfc3b20e353c232e9193eb167766ad2dc58b56504948"}, - {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4bfa75a048c056a411f9705856abfc872558e33c055d80af6a380e3658766038"}, - {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:34ea1de54beef1c104422d210c47c7d2a4999bdecf42c7b5718fbe59a4cac383"}, - {file = "kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:90da3b5f694b85231cf93586dad5e90e2d71b9428f9aad96952c99055582f520"}, - {file = "kiwisolver-1.4.7-cp312-cp312-win32.whl", hash = "sha256:18e0cca3e008e17fe9b164b55735a325140a5a35faad8de92dd80265cd5eb80b"}, - {file = "kiwisolver-1.4.7-cp312-cp312-win_amd64.whl", hash = "sha256:58cb20602b18f86f83a5c87d3ee1c766a79c0d452f8def86d925e6c60fbf7bfb"}, - {file = "kiwisolver-1.4.7-cp312-cp312-win_arm64.whl", hash = "sha256:f5a8b53bdc0b3961f8b6125e198617c40aeed638b387913bf1ce78afb1b0be2a"}, - {file = "kiwisolver-1.4.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2e6039dcbe79a8e0f044f1c39db1986a1b8071051efba3ee4d74f5b365f5226e"}, - {file = "kiwisolver-1.4.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a1ecf0ac1c518487d9d23b1cd7139a6a65bc460cd101ab01f1be82ecf09794b6"}, - {file = "kiwisolver-1.4.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7ab9ccab2b5bd5702ab0803676a580fffa2aa178c2badc5557a84cc943fcf750"}, - {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f816dd2277f8d63d79f9c8473a79fe54047bc0467754962840782c575522224d"}, - {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf8bcc23ceb5a1b624572a1623b9f79d2c3b337c8c455405ef231933a10da379"}, - {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dea0bf229319828467d7fca8c7c189780aa9ff679c94539eed7532ebe33ed37c"}, - {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c06a4c7cf15ec739ce0e5971b26c93638730090add60e183530d70848ebdd34"}, - {file = "kiwisolver-1.4.7-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:913983ad2deb14e66d83c28b632fd35ba2b825031f2fa4ca29675e665dfecbe1"}, - {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5337ec7809bcd0f424c6b705ecf97941c46279cf5ed92311782c7c9c2026f07f"}, - {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4c26ed10c4f6fa6ddb329a5120ba3b6db349ca192ae211e882970bfc9d91420b"}, - {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c619b101e6de2222c1fcb0531e1b17bbffbe54294bfba43ea0d411d428618c27"}, - {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:073a36c8273647592ea332e816e75ef8da5c303236ec0167196793eb1e34657a"}, - {file = "kiwisolver-1.4.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3ce6b2b0231bda412463e152fc18335ba32faf4e8c23a754ad50ffa70e4091ee"}, - {file = "kiwisolver-1.4.7-cp313-cp313-win32.whl", hash = "sha256:f4c9aee212bc89d4e13f58be11a56cc8036cabad119259d12ace14b34476fd07"}, - {file = "kiwisolver-1.4.7-cp313-cp313-win_amd64.whl", hash = "sha256:8a3ec5aa8e38fc4c8af308917ce12c536f1c88452ce554027e55b22cbbfbff76"}, - {file = "kiwisolver-1.4.7-cp313-cp313-win_arm64.whl", hash = "sha256:76c8094ac20ec259471ac53e774623eb62e6e1f56cd8690c67ce6ce4fcb05650"}, - {file = "kiwisolver-1.4.7-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5d5abf8f8ec1f4e22882273c423e16cae834c36856cac348cfbfa68e01c40f3a"}, - {file = "kiwisolver-1.4.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:aeb3531b196ef6f11776c21674dba836aeea9d5bd1cf630f869e3d90b16cfade"}, - {file = "kiwisolver-1.4.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b7d755065e4e866a8086c9bdada157133ff466476a2ad7861828e17b6026e22c"}, - {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08471d4d86cbaec61f86b217dd938a83d85e03785f51121e791a6e6689a3be95"}, - {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7bbfcb7165ce3d54a3dfbe731e470f65739c4c1f85bb1018ee912bae139e263b"}, - {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5d34eb8494bea691a1a450141ebb5385e4b69d38bb8403b5146ad279f4b30fa3"}, - {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9242795d174daa40105c1d86aba618e8eab7bf96ba8c3ee614da8302a9f95503"}, - {file = "kiwisolver-1.4.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a0f64a48bb81af7450e641e3fe0b0394d7381e342805479178b3d335d60ca7cf"}, - {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8e045731a5416357638d1700927529e2b8ab304811671f665b225f8bf8d8f933"}, - {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:4322872d5772cae7369f8351da1edf255a604ea7087fe295411397d0cfd9655e"}, - {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:e1631290ee9271dffe3062d2634c3ecac02c83890ada077d225e081aca8aab89"}, - {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:edcfc407e4eb17e037bca59be0e85a2031a2ac87e4fed26d3e9df88b4165f92d"}, - {file = "kiwisolver-1.4.7-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4d05d81ecb47d11e7f8932bd8b61b720bf0b41199358f3f5e36d38e28f0532c5"}, - {file = "kiwisolver-1.4.7-cp38-cp38-win32.whl", hash = "sha256:b38ac83d5f04b15e515fd86f312479d950d05ce2368d5413d46c088dda7de90a"}, - {file = "kiwisolver-1.4.7-cp38-cp38-win_amd64.whl", hash = "sha256:d83db7cde68459fc803052a55ace60bea2bae361fc3b7a6d5da07e11954e4b09"}, - {file = "kiwisolver-1.4.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f9362ecfca44c863569d3d3c033dbe8ba452ff8eed6f6b5806382741a1334bd"}, - {file = "kiwisolver-1.4.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e8df2eb9b2bac43ef8b082e06f750350fbbaf2887534a5be97f6cf07b19d9583"}, - {file = "kiwisolver-1.4.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f32d6edbc638cde7652bd690c3e728b25332acbadd7cad670cc4a02558d9c417"}, - {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e2e6c39bd7b9372b0be21456caab138e8e69cc0fc1190a9dfa92bd45a1e6e904"}, - {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dda56c24d869b1193fcc763f1284b9126550eaf84b88bbc7256e15028f19188a"}, - {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79849239c39b5e1fd906556c474d9b0439ea6792b637511f3fe3a41158d89ca8"}, - {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e3bc157fed2a4c02ec468de4ecd12a6e22818d4f09cde2c31ee3226ffbefab2"}, - {file = "kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3da53da805b71e41053dc670f9a820d1157aae77b6b944e08024d17bcd51ef88"}, - {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8705f17dfeb43139a692298cb6637ee2e59c0194538153e83e9ee0c75c2eddde"}, - {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:82a5c2f4b87c26bb1a0ef3d16b5c4753434633b83d365cc0ddf2770c93829e3c"}, - {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce8be0466f4c0d585cdb6c1e2ed07232221df101a4c6f28821d2aa754ca2d9e2"}, - {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:409afdfe1e2e90e6ee7fc896f3df9a7fec8e793e58bfa0d052c8a82f99c37abb"}, - {file = "kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5b9c3f4ee0b9a439d2415012bd1b1cc2df59e4d6a9939f4d669241d30b414327"}, - {file = "kiwisolver-1.4.7-cp39-cp39-win32.whl", hash = "sha256:a79ae34384df2b615eefca647a2873842ac3b596418032bef9a7283675962644"}, - {file = "kiwisolver-1.4.7-cp39-cp39-win_amd64.whl", hash = "sha256:cf0438b42121a66a3a667de17e779330fc0f20b0d97d59d2f2121e182b0505e4"}, - {file = "kiwisolver-1.4.7-cp39-cp39-win_arm64.whl", hash = "sha256:764202cc7e70f767dab49e8df52c7455e8de0df5d858fa801a11aa0d882ccf3f"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:94252291e3fe68001b1dd747b4c0b3be12582839b95ad4d1b641924d68fd4643"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b7dfa3b546da08a9f622bb6becdb14b3e24aaa30adba66749d38f3cc7ea9706"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd3de6481f4ed8b734da5df134cd5a6a64fe32124fe83dde1e5b5f29fe30b1e6"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a91b5f9f1205845d488c928e8570dcb62b893372f63b8b6e98b863ebd2368ff2"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40fa14dbd66b8b8f470d5fc79c089a66185619d31645f9b0773b88b19f7223c4"}, - {file = "kiwisolver-1.4.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:eb542fe7933aa09d8d8f9d9097ef37532a7df6497819d16efe4359890a2f417a"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bfa1acfa0c54932d5607e19a2c24646fb4c1ae2694437789129cf099789a3b00"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:eee3ea935c3d227d49b4eb85660ff631556841f6e567f0f7bda972df6c2c9935"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f3160309af4396e0ed04db259c3ccbfdc3621b5559b5453075e5de555e1f3a1b"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a17f6a29cf8935e587cc8a4dbfc8368c55edc645283db0ce9801016f83526c2d"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10849fb2c1ecbfae45a693c070e0320a91b35dd4bcf58172c023b994283a124d"}, - {file = "kiwisolver-1.4.7-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:ac542bf38a8a4be2dc6b15248d36315ccc65f0743f7b1a76688ffb6b5129a5c2"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8b01aac285f91ca889c800042c35ad3b239e704b150cfd3382adfc9dcc780e39"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:48be928f59a1f5c8207154f935334d374e79f2b5d212826307d072595ad76a2e"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f37cfe618a117e50d8c240555331160d73d0411422b59b5ee217843d7b693608"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:599b5c873c63a1f6ed7eead644a8a380cfbdf5db91dcb6f85707aaab213b1674"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:801fa7802e5cfabe3ab0c81a34c323a319b097dfb5004be950482d882f3d7225"}, - {file = "kiwisolver-1.4.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0c6c43471bc764fad4bc99c5c2d6d16a676b1abf844ca7c8702bdae92df01ee0"}, - {file = "kiwisolver-1.4.7.tar.gz", hash = "sha256:9893ff81bd7107f7b685d3017cc6583daadb4fc26e4a888350df530e41980a60"}, -] - -[[package]] -name = "leather" -version = "0.4.0" -description = "Python charting for 80% of humans." -optional = false -python-versions = "*" -files = [ - {file = "leather-0.4.0-py2.py3-none-any.whl", hash = "sha256:18290bc93749ae39039af5e31e871fcfad74d26c4c3ea28ea4f681f4571b3a2b"}, - {file = "leather-0.4.0.tar.gz", hash = "sha256:f964bec2086f3153a6c16e707f20cb718f811f57af116075f4c0f4805c608b95"}, -] - -[package.extras] -test = ["cssselect (>=0.9.1)", "lxml (>=3.6.0)", "pytest", "pytest-cov"] - -[[package]] -name = "llvmlite" -version = "0.43.0" -description = "lightweight wrapper around basic LLVM functionality" -optional = true -python-versions = ">=3.9" -files = [ - {file = "llvmlite-0.43.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a289af9a1687c6cf463478f0fa8e8aa3b6fb813317b0d70bf1ed0759eab6f761"}, - {file = "llvmlite-0.43.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d4fd101f571a31acb1559ae1af30f30b1dc4b3186669f92ad780e17c81e91bc"}, - {file = "llvmlite-0.43.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d434ec7e2ce3cc8f452d1cd9a28591745de022f931d67be688a737320dfcead"}, - {file = "llvmlite-0.43.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6912a87782acdff6eb8bf01675ed01d60ca1f2551f8176a300a886f09e836a6a"}, - {file = "llvmlite-0.43.0-cp310-cp310-win_amd64.whl", hash = "sha256:14f0e4bf2fd2d9a75a3534111e8ebeb08eda2f33e9bdd6dfa13282afacdde0ed"}, - {file = "llvmlite-0.43.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3e8d0618cb9bfe40ac38a9633f2493d4d4e9fcc2f438d39a4e854f39cc0f5f98"}, - {file = "llvmlite-0.43.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0a9a1a39d4bf3517f2af9d23d479b4175ead205c592ceeb8b89af48a327ea57"}, - {file = "llvmlite-0.43.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1da416ab53e4f7f3bc8d4eeba36d801cc1894b9fbfbf2022b29b6bad34a7df2"}, - {file = "llvmlite-0.43.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977525a1e5f4059316b183fb4fd34fa858c9eade31f165427a3977c95e3ee749"}, - {file = "llvmlite-0.43.0-cp311-cp311-win_amd64.whl", hash = "sha256:d5bd550001d26450bd90777736c69d68c487d17bf371438f975229b2b8241a91"}, - {file = "llvmlite-0.43.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f99b600aa7f65235a5a05d0b9a9f31150c390f31261f2a0ba678e26823ec38f7"}, - {file = "llvmlite-0.43.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:35d80d61d0cda2d767f72de99450766250560399edc309da16937b93d3b676e7"}, - {file = "llvmlite-0.43.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eccce86bba940bae0d8d48ed925f21dbb813519169246e2ab292b5092aba121f"}, - {file = "llvmlite-0.43.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df6509e1507ca0760787a199d19439cc887bfd82226f5af746d6977bd9f66844"}, - {file = "llvmlite-0.43.0-cp312-cp312-win_amd64.whl", hash = "sha256:7a2872ee80dcf6b5dbdc838763d26554c2a18aa833d31a2635bff16aafefb9c9"}, - {file = "llvmlite-0.43.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9cd2a7376f7b3367019b664c21f0c61766219faa3b03731113ead75107f3b66c"}, - {file = "llvmlite-0.43.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18e9953c748b105668487b7c81a3e97b046d8abf95c4ddc0cd3c94f4e4651ae8"}, - {file = "llvmlite-0.43.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74937acd22dc11b33946b67dca7680e6d103d6e90eeaaaf932603bec6fe7b03a"}, - {file = "llvmlite-0.43.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9efc739cc6ed760f795806f67889923f7274276f0eb45092a1473e40d9b867"}, - {file = "llvmlite-0.43.0-cp39-cp39-win_amd64.whl", hash = "sha256:47e147cdda9037f94b399bf03bfd8a6b6b1f2f90be94a454e3386f006455a9b4"}, - {file = "llvmlite-0.43.0.tar.gz", hash = "sha256:ae2b5b5c3ef67354824fb75517c8db5fbe93bc02cd9671f3c62271626bc041d5"}, -] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -description = "Python port of markdown-it. Markdown parsing, done right!" -optional = false -python-versions = ">=3.8" -files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] - -[package.dependencies] -mdurl = ">=0.1,<1.0" - -[package.extras] -benchmarking = ["psutil", "pytest", "pytest-benchmark"] -code-style = ["pre-commit (>=3.0,<4.0)"] -compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] -linkify = ["linkify-it-py (>=1,<3)"] -plugins = ["mdit-py-plugins"] -profiling = ["gprof2dot"] -rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] - -[[package]] -name = "markupsafe" -version = "3.0.2" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.9" -files = [ - {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, - {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, -] - -[[package]] -name = "mashumaro" -version = "3.14" -description = "Fast and well tested serialization library" -optional = false -python-versions = ">=3.8" -files = [ - {file = "mashumaro-3.14-py3-none-any.whl", hash = "sha256:c12a649599a8f7b1a0b35d18f12e678423c3066189f7bc7bd8dd431c5c8132c3"}, - {file = "mashumaro-3.14.tar.gz", hash = "sha256:5ef6f2b963892cbe9a4ceb3441dfbea37f8c3412523f25d42e9b3a7186555f1d"}, -] - -[package.dependencies] -msgpack = {version = ">=0.5.6", optional = true, markers = "extra == \"msgpack\""} -typing-extensions = ">=4.1.0" - -[package.extras] -msgpack = ["msgpack (>=0.5.6)"] -orjson = ["orjson"] -toml = ["tomli (>=1.1.0)", "tomli-w (>=1.0)"] -yaml = ["pyyaml (>=3.13)"] - -[[package]] -name = "matplotlib" -version = "3.9.3" -description = "Python plotting package" -optional = true -python-versions = ">=3.9" -files = [ - {file = "matplotlib-3.9.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:41b016e3be4e740b66c79a031a0a6e145728dbc248142e751e8dab4f3188ca1d"}, - {file = "matplotlib-3.9.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e0143975fc2a6d7136c97e19c637321288371e8f09cff2564ecd73e865ea0b9"}, - {file = "matplotlib-3.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f459c8ee2c086455744723628264e43c884be0c7d7b45d84b8cd981310b4815"}, - {file = "matplotlib-3.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:687df7ceff57b8f070d02b4db66f75566370e7ae182a0782b6d3d21b0d6917dc"}, - {file = "matplotlib-3.9.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:edd14cf733fdc4f6e6fe3f705af97676a7e52859bf0044aa2c84e55be739241c"}, - {file = "matplotlib-3.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:1c40c244221a1adbb1256692b1133c6fb89418df27bf759a31a333e7912a4010"}, - {file = "matplotlib-3.9.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:cf2a60daf6cecff6828bc608df00dbc794380e7234d2411c0ec612811f01969d"}, - {file = "matplotlib-3.9.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:213d6dc25ce686516208d8a3e91120c6a4fdae4a3e06b8505ced5b716b50cc04"}, - {file = "matplotlib-3.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c52f48eb75fcc119a4fdb68ba83eb5f71656999420375df7c94cc68e0e14686e"}, - {file = "matplotlib-3.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3c93796b44fa111049b88a24105e947f03c01966b5c0cc782e2ee3887b790a3"}, - {file = "matplotlib-3.9.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cd1077b9a09b16d8c3c7075a8add5ffbfe6a69156a57e290c800ed4d435bef1d"}, - {file = "matplotlib-3.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:c96eeeb8c68b662c7747f91a385688d4b449687d29b691eff7068a4602fe6dc4"}, - {file = "matplotlib-3.9.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0a361bd5583bf0bcc08841df3c10269617ee2a36b99ac39d455a767da908bbbc"}, - {file = "matplotlib-3.9.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e14485bb1b83eeb3d55b6878f9560240981e7bbc7a8d4e1e8c38b9bd6ec8d2de"}, - {file = "matplotlib-3.9.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a8d279f78844aad213c4935c18f8292a9432d51af2d88bca99072c903948045"}, - {file = "matplotlib-3.9.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6c12514329ac0d03128cf1dcceb335f4fbf7c11da98bca68dca8dcb983153a9"}, - {file = "matplotlib-3.9.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6e9de2b390d253a508dd497e9b5579f3a851f208763ed67fdca5dc0c3ea6849c"}, - {file = "matplotlib-3.9.3-cp312-cp312-win_amd64.whl", hash = "sha256:d796272408f8567ff7eaa00eb2856b3a00524490e47ad505b0b4ca6bb8a7411f"}, - {file = "matplotlib-3.9.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:203d18df84f5288973b2d56de63d4678cc748250026ca9e1ad8f8a0fd8a75d83"}, - {file = "matplotlib-3.9.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:b651b0d3642991259109dc0351fc33ad44c624801367bb8307be9bfc35e427ad"}, - {file = "matplotlib-3.9.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:66d7b171fecf96940ce069923a08ba3df33ef542de82c2ff4fe8caa8346fa95a"}, - {file = "matplotlib-3.9.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6be0ba61f6ff2e6b68e4270fb63b6813c9e7dec3d15fc3a93f47480444fd72f0"}, - {file = "matplotlib-3.9.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d6b2e8856dec3a6db1ae51aec85c82223e834b228c1d3228aede87eee2b34f9"}, - {file = "matplotlib-3.9.3-cp313-cp313-win_amd64.whl", hash = "sha256:90a85a004fefed9e583597478420bf904bb1a065b0b0ee5b9d8d31b04b0f3f70"}, - {file = "matplotlib-3.9.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3119b2f16de7f7b9212ba76d8fe6a0e9f90b27a1e04683cd89833a991682f639"}, - {file = "matplotlib-3.9.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:87ad73763d93add1b6c1f9fcd33af662fd62ed70e620c52fcb79f3ac427cf3a6"}, - {file = "matplotlib-3.9.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:026bdf3137ab6022c866efa4813b6bbeddc2ed4c9e7e02f0e323a7bca380dfa0"}, - {file = "matplotlib-3.9.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:760a5e89ebbb172989e8273024a1024b0f084510b9105261b3b00c15e9c9f006"}, - {file = "matplotlib-3.9.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a42b9dc42de2cfe357efa27d9c50c7833fc5ab9b2eb7252ccd5d5f836a84e1e4"}, - {file = "matplotlib-3.9.3-cp313-cp313t-win_amd64.whl", hash = "sha256:e0fcb7da73fbf67b5f4bdaa57d85bb585a4e913d4a10f3e15b32baea56a67f0a"}, - {file = "matplotlib-3.9.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:031b7f5b8e595cc07def77ec5b58464e9bb67dc5760be5d6f26d9da24892481d"}, - {file = "matplotlib-3.9.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9fa6e193c14d6944e0685cdb527cb6b38b0e4a518043e7212f214113af7391da"}, - {file = "matplotlib-3.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e6eefae6effa0c35bbbc18c25ee6e0b1da44d2359c3cd526eb0c9e703cf055d"}, - {file = "matplotlib-3.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10d3e5c7a99bd28afb957e1ae661323b0800d75b419f24d041ed1cc5d844a764"}, - {file = "matplotlib-3.9.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:816a966d5d376bf24c92af8f379e78e67278833e4c7cbc9fa41872eec629a060"}, - {file = "matplotlib-3.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fb0b37c896172899a4a93d9442ffdc6f870165f59e05ce2e07c6fded1c15749"}, - {file = "matplotlib-3.9.3-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5f2a4ea08e6876206d511365b0bc234edc813d90b930be72c3011bbd7898796f"}, - {file = "matplotlib-3.9.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9b081dac96ab19c54fd8558fac17c9d2c9cb5cc4656e7ed3261ddc927ba3e2c5"}, - {file = "matplotlib-3.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a0a63cb8404d1d1f94968ef35738900038137dab8af836b6c21bb6f03d75465"}, - {file = "matplotlib-3.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:896774766fd6be4571a43bc2fcbcb1dcca0807e53cab4a5bf88c4aa861a08e12"}, - {file = "matplotlib-3.9.3.tar.gz", hash = "sha256:cd5dbbc8e25cad5f706845c4d100e2c8b34691b412b93717ce38d8ae803bcfa5"}, -] - -[package.dependencies] -contourpy = ">=1.0.1" -cycler = ">=0.10" -fonttools = ">=4.22.0" -importlib-resources = {version = ">=3.2.0", markers = "python_version < \"3.10\""} -kiwisolver = ">=1.3.1" -numpy = ">=1.23" -packaging = ">=20.0" -pillow = ">=8" -pyparsing = ">=2.3.1" -python-dateutil = ">=2.7" - -[package.extras] -dev = ["meson-python (>=0.13.1)", "numpy (>=1.25)", "pybind11 (>=2.6,!=2.13.3)", "setuptools (>=64)", "setuptools_scm (>=7)"] - -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - -[[package]] -name = "mdurl" -version = "0.1.2" -description = "Markdown URL utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, -] - -[[package]] -name = "more-itertools" -version = "10.5.0" -description = "More routines for operating on iterables, beyond itertools" -optional = false -python-versions = ">=3.8" -files = [ - {file = "more-itertools-10.5.0.tar.gz", hash = "sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6"}, - {file = "more_itertools-10.5.0-py3-none-any.whl", hash = "sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef"}, -] - -[[package]] -name = "msgpack" -version = "1.1.0" -description = "MessagePack serializer" -optional = false -python-versions = ">=3.8" -files = [ - {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd"}, - {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d"}, - {file = "msgpack-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:914571a2a5b4e7606997e169f64ce53a8b1e06f2cf2c3a7273aa106236d43dd5"}, - {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c921af52214dcbb75e6bdf6a661b23c3e6417f00c603dd2070bccb5c3ef499f5"}, - {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8ce0b22b890be5d252de90d0e0d119f363012027cf256185fc3d474c44b1b9e"}, - {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73322a6cc57fcee3c0c57c4463d828e9428275fb85a27aa2aa1a92fdc42afd7b"}, - {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e1f3c3d21f7cf67bcf2da8e494d30a75e4cf60041d98b3f79875afb5b96f3a3f"}, - {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64fc9068d701233effd61b19efb1485587560b66fe57b3e50d29c5d78e7fef68"}, - {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:42f754515e0f683f9c79210a5d1cad631ec3d06cea5172214d2176a42e67e19b"}, - {file = "msgpack-1.1.0-cp310-cp310-win32.whl", hash = "sha256:3df7e6b05571b3814361e8464f9304c42d2196808e0119f55d0d3e62cd5ea044"}, - {file = "msgpack-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:685ec345eefc757a7c8af44a3032734a739f8c45d1b0ac45efc5d8977aa4720f"}, - {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d364a55082fb2a7416f6c63ae383fbd903adb5a6cf78c5b96cc6316dc1cedc7"}, - {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79ec007767b9b56860e0372085f8504db5d06bd6a327a335449508bbee9648fa"}, - {file = "msgpack-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6ad622bf7756d5a497d5b6836e7fc3752e2dd6f4c648e24b1803f6048596f701"}, - {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e59bca908d9ca0de3dc8684f21ebf9a690fe47b6be93236eb40b99af28b6ea6"}, - {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1da8f11a3dd397f0a32c76165cf0c4eb95b31013a94f6ecc0b280c05c91b59"}, - {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452aff037287acb1d70a804ffd022b21fa2bb7c46bee884dbc864cc9024128a0"}, - {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8da4bf6d54ceed70e8861f833f83ce0814a2b72102e890cbdfe4b34764cdd66e"}, - {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:41c991beebf175faf352fb940bf2af9ad1fb77fd25f38d9142053914947cdbf6"}, - {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a52a1f3a5af7ba1c9ace055b659189f6c669cf3657095b50f9602af3a3ba0fe5"}, - {file = "msgpack-1.1.0-cp311-cp311-win32.whl", hash = "sha256:58638690ebd0a06427c5fe1a227bb6b8b9fdc2bd07701bec13c2335c82131a88"}, - {file = "msgpack-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd2906780f25c8ed5d7b323379f6138524ba793428db5d0e9d226d3fa6aa1788"}, - {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d46cf9e3705ea9485687aa4001a76e44748b609d260af21c4ceea7f2212a501d"}, - {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5dbad74103df937e1325cc4bfeaf57713be0b4f15e1c2da43ccdd836393e2ea2"}, - {file = "msgpack-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58dfc47f8b102da61e8949708b3eafc3504509a5728f8b4ddef84bd9e16ad420"}, - {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676e5be1b472909b2ee6356ff425ebedf5142427842aa06b4dfd5117d1ca8a2"}, - {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17fb65dd0bec285907f68b15734a993ad3fc94332b5bb21b0435846228de1f39"}, - {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a51abd48c6d8ac89e0cfd4fe177c61481aca2d5e7ba42044fd218cfd8ea9899f"}, - {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2137773500afa5494a61b1208619e3871f75f27b03bcfca7b3a7023284140247"}, - {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:398b713459fea610861c8a7b62a6fec1882759f308ae0795b5413ff6a160cf3c"}, - {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06f5fd2f6bb2a7914922d935d3b8bb4a7fff3a9a91cfce6d06c13bc42bec975b"}, - {file = "msgpack-1.1.0-cp312-cp312-win32.whl", hash = "sha256:ad33e8400e4ec17ba782f7b9cf868977d867ed784a1f5f2ab46e7ba53b6e1e1b"}, - {file = "msgpack-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:115a7af8ee9e8cddc10f87636767857e7e3717b7a2e97379dc2054712693e90f"}, - {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:071603e2f0771c45ad9bc65719291c568d4edf120b44eb36324dcb02a13bfddf"}, - {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0f92a83b84e7c0749e3f12821949d79485971f087604178026085f60ce109330"}, - {file = "msgpack-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1964df7b81285d00a84da4e70cb1383f2e665e0f1f2a7027e683956d04b734"}, - {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59caf6a4ed0d164055ccff8fe31eddc0ebc07cf7326a2aaa0dbf7a4001cd823e"}, - {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0907e1a7119b337971a689153665764adc34e89175f9a34793307d9def08e6ca"}, - {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65553c9b6da8166e819a6aa90ad15288599b340f91d18f60b2061f402b9a4915"}, - {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7a946a8992941fea80ed4beae6bff74ffd7ee129a90b4dd5cf9c476a30e9708d"}, - {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4b51405e36e075193bc051315dbf29168d6141ae2500ba8cd80a522964e31434"}, - {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4c01941fd2ff87c2a934ee6055bda4ed353a7846b8d4f341c428109e9fcde8c"}, - {file = "msgpack-1.1.0-cp313-cp313-win32.whl", hash = "sha256:7c9a35ce2c2573bada929e0b7b3576de647b0defbd25f5139dcdaba0ae35a4cc"}, - {file = "msgpack-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:bce7d9e614a04d0883af0b3d4d501171fbfca038f12c77fa838d9f198147a23f"}, - {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c40ffa9a15d74e05ba1fe2681ea33b9caffd886675412612d93ab17b58ea2fec"}, - {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1ba6136e650898082d9d5a5217d5906d1e138024f836ff48691784bbe1adf96"}, - {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0856a2b7e8dcb874be44fea031d22e5b3a19121be92a1e098f46068a11b0870"}, - {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:471e27a5787a2e3f974ba023f9e265a8c7cfd373632247deb225617e3100a3c7"}, - {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:646afc8102935a388ffc3914b336d22d1c2d6209c773f3eb5dd4d6d3b6f8c1cb"}, - {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13599f8829cfbe0158f6456374e9eea9f44eee08076291771d8ae93eda56607f"}, - {file = "msgpack-1.1.0-cp38-cp38-win32.whl", hash = "sha256:8a84efb768fb968381e525eeeb3d92857e4985aacc39f3c47ffd00eb4509315b"}, - {file = "msgpack-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:879a7b7b0ad82481c52d3c7eb99bf6f0645dbdec5134a4bddbd16f3506947feb"}, - {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:53258eeb7a80fc46f62fd59c876957a2d0e15e6449a9e71842b6d24419d88ca1"}, - {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e7b853bbc44fb03fbdba34feb4bd414322180135e2cb5164f20ce1c9795ee48"}, - {file = "msgpack-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3e9b4936df53b970513eac1758f3882c88658a220b58dcc1e39606dccaaf01c"}, - {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46c34e99110762a76e3911fc923222472c9d681f1094096ac4102c18319e6468"}, - {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a706d1e74dd3dea05cb54580d9bd8b2880e9264856ce5068027eed09680aa74"}, - {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:534480ee5690ab3cbed89d4c8971a5c631b69a8c0883ecfea96c19118510c846"}, - {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8cf9e8c3a2153934a23ac160cc4cba0ec035f6867c8013cc6077a79823370346"}, - {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3180065ec2abbe13a4ad37688b61b99d7f9e012a535b930e0e683ad6bc30155b"}, - {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c5a91481a3cc573ac8c0d9aace09345d989dc4a0202b7fcb312c88c26d4e71a8"}, - {file = "msgpack-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f80bc7d47f76089633763f952e67f8214cb7b3ee6bfa489b3cb6a84cfac114cd"}, - {file = "msgpack-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:4d1b7ff2d6146e16e8bd665ac726a89c74163ef8cd39fa8c1087d4e52d3a2325"}, - {file = "msgpack-1.1.0.tar.gz", hash = "sha256:dd432ccc2c72b914e4cb77afce64aab761c1137cc698be3984eee260bcb2896e"}, -] - -[[package]] -name = "multimethod" -version = "1.12" -description = "Multiple argument dispatching." -optional = true -python-versions = ">=3.9" -files = [ - {file = "multimethod-1.12-py3-none-any.whl", hash = "sha256:fd0c473c43558908d97cc06e4d68e8f69202f167db46f7b4e4058893e7dbdf60"}, - {file = "multimethod-1.12.tar.gz", hash = "sha256:8db8ef2a8d2a247e3570cc23317680892fdf903d84c8c1053667c8e8f7671a67"}, -] - -[[package]] -name = "mypy" -version = "1.13.0" -description = "Optional static typing for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, - {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, - {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, - {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, - {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, - {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, - {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, - {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, - {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, - {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, - {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, - {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, - {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, - {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, - {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, - {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, - {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, - {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, - {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, - {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, - {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, - {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, - {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, - {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, - {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, - {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, - {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, - {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, - {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, - {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, - {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, - {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, -] - -[package.dependencies] -mypy-extensions = ">=1.0.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.6.0" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -faster-cache = ["orjson"] -install-types = ["pip"] -mypyc = ["setuptools (>=50)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "narwhals" -version = "1.16.0" -description = "Extremely lightweight compatibility layer between dataframe libraries" -optional = true -python-versions = ">=3.8" -files = [ - {file = "narwhals-1.16.0-py3-none-any.whl", hash = "sha256:e5b764b1b571e25b08bb87db6feb89ff824d76f5d7b7e12e6d837c1c503763ae"}, - {file = "narwhals-1.16.0.tar.gz", hash = "sha256:1ea3ac269df8126f1f79c1ba7b8f78c73b032a86861b2cc2f08c48010ef9f6f8"}, -] - -[package.extras] -cudf = ["cudf (>=23.08.00)"] -dask = ["dask[dataframe] (>=2024.7)"] -modin = ["modin"] -pandas = ["pandas (>=0.25.3)"] -polars = ["polars (>=0.20.3)"] -pyarrow = ["pyarrow (>=11.0.0)"] -pyspark = ["pyspark (>=3.3.0)"] - -[[package]] -name = "networkx" -version = "3.2.1" -description = "Python package for creating and manipulating graphs and networks" -optional = false -python-versions = ">=3.9" -files = [ - {file = "networkx-3.2.1-py3-none-any.whl", hash = "sha256:f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2"}, - {file = "networkx-3.2.1.tar.gz", hash = "sha256:9f1bb5cf3409bf324e0a722c20bdb4c20ee39bf1c30ce8ae499c8502b0b5e0c6"}, -] - -[package.extras] -default = ["matplotlib (>=3.5)", "numpy (>=1.22)", "pandas (>=1.4)", "scipy (>=1.9,!=1.11.0,!=1.11.1)"] -developer = ["changelist (==0.4)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"] -doc = ["nb2plots (>=0.7)", "nbconvert (<7.9)", "numpydoc (>=1.6)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.14)", "sphinx (>=7)", "sphinx-gallery (>=0.14)", "texext (>=0.6.7)"] -extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.11)", "sympy (>=1.10)"] -test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] - -[[package]] -name = "nodeenv" -version = "1.9.1" -description = "Node.js virtual environment builder" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, - {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, -] - -[[package]] -name = "numba" -version = "0.60.0" -description = "compiling Python code using LLVM" -optional = true -python-versions = ">=3.9" -files = [ - {file = "numba-0.60.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d761de835cd38fb400d2c26bb103a2726f548dc30368853121d66201672e651"}, - {file = "numba-0.60.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:159e618ef213fba758837f9837fb402bbe65326e60ba0633dbe6c7f274d42c1b"}, - {file = "numba-0.60.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1527dc578b95c7c4ff248792ec33d097ba6bef9eda466c948b68dfc995c25781"}, - {file = "numba-0.60.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe0b28abb8d70f8160798f4de9d486143200f34458d34c4a214114e445d7124e"}, - {file = "numba-0.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:19407ced081d7e2e4b8d8c36aa57b7452e0283871c296e12d798852bc7d7f198"}, - {file = "numba-0.60.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a17b70fc9e380ee29c42717e8cc0bfaa5556c416d94f9aa96ba13acb41bdece8"}, - {file = "numba-0.60.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3fb02b344a2a80efa6f677aa5c40cd5dd452e1b35f8d1c2af0dfd9ada9978e4b"}, - {file = "numba-0.60.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5f4fde652ea604ea3c86508a3fb31556a6157b2c76c8b51b1d45eb40c8598703"}, - {file = "numba-0.60.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4142d7ac0210cc86432b818338a2bc368dc773a2f5cf1e32ff7c5b378bd63ee8"}, - {file = "numba-0.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:cac02c041e9b5bc8cf8f2034ff6f0dbafccd1ae9590dc146b3a02a45e53af4e2"}, - {file = "numba-0.60.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7da4098db31182fc5ffe4bc42c6f24cd7d1cb8a14b59fd755bfee32e34b8404"}, - {file = "numba-0.60.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38d6ea4c1f56417076ecf8fc327c831ae793282e0ff51080c5094cb726507b1c"}, - {file = "numba-0.60.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:62908d29fb6a3229c242e981ca27e32a6e606cc253fc9e8faeb0e48760de241e"}, - {file = "numba-0.60.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0ebaa91538e996f708f1ab30ef4d3ddc344b64b5227b67a57aa74f401bb68b9d"}, - {file = "numba-0.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:f75262e8fe7fa96db1dca93d53a194a38c46da28b112b8a4aca168f0df860347"}, - {file = "numba-0.60.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:01ef4cd7d83abe087d644eaa3d95831b777aa21d441a23703d649e06b8e06b74"}, - {file = "numba-0.60.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:819a3dfd4630d95fd574036f99e47212a1af41cbcb019bf8afac63ff56834449"}, - {file = "numba-0.60.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0b983bd6ad82fe868493012487f34eae8bf7dd94654951404114f23c3466d34b"}, - {file = "numba-0.60.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c151748cd269ddeab66334bd754817ffc0cabd9433acb0f551697e5151917d25"}, - {file = "numba-0.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:3031547a015710140e8c87226b4cfe927cac199835e5bf7d4fe5cb64e814e3ab"}, - {file = "numba-0.60.0.tar.gz", hash = "sha256:5df6158e5584eece5fc83294b949fd30b9f1125df7708862205217e068aabf16"}, -] - -[package.dependencies] -llvmlite = "==0.43.*" -numpy = ">=1.22,<2.1" - -[[package]] -name = "numpy" -version = "2.0.2" -description = "Fundamental package for array computing in Python" -optional = true -python-versions = ">=3.9" -files = [ - {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}, - {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}, - {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66"}, - {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b"}, - {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd"}, - {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318"}, - {file = "numpy-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8"}, - {file = "numpy-2.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326"}, - {file = "numpy-2.0.2-cp310-cp310-win32.whl", hash = "sha256:984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97"}, - {file = "numpy-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57"}, - {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a"}, - {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669"}, - {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951"}, - {file = "numpy-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9"}, - {file = "numpy-2.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15"}, - {file = "numpy-2.0.2-cp311-cp311-win32.whl", hash = "sha256:a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4"}, - {file = "numpy-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:df55d490dea7934f330006d0f81e8551ba6010a5bf035a249ef61a94f21c500b"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8df823f570d9adf0978347d1f926b2a867d5608f434a7cff7f7908c6570dcf5e"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9a92ae5c14811e390f3767053ff54eaee3bf84576d99a2456391401323f4ec2c"}, - {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a842d573724391493a97a62ebbb8e731f8a5dcc5d285dfc99141ca15a3302d0c"}, - {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05e238064fc0610c840d1cf6a13bf63d7e391717d247f1bf0318172e759e692"}, - {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a"}, - {file = "numpy-2.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:96a55f64139912d61de9137f11bf39a55ec8faec288c75a54f93dfd39f7eb40c"}, - {file = "numpy-2.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec9852fb39354b5a45a80bdab5ac02dd02b15f44b3804e9f00c556bf24b4bded"}, - {file = "numpy-2.0.2-cp312-cp312-win32.whl", hash = "sha256:671bec6496f83202ed2d3c8fdc486a8fc86942f2e69ff0e986140339a63bcbe5"}, - {file = "numpy-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:cfd41e13fdc257aa5778496b8caa5e856dc4896d4ccf01841daee1d96465467a"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b"}, - {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729"}, - {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1"}, - {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd"}, - {file = "numpy-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d"}, - {file = "numpy-2.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d"}, - {file = "numpy-2.0.2-cp39-cp39-win32.whl", hash = "sha256:905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa"}, - {file = "numpy-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c"}, - {file = "numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385"}, - {file = "numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78"}, -] - -[[package]] -name = "openai" -version = "1.57.1" -description = "The official Python library for the openai API" -optional = true -python-versions = ">=3.8" -files = [ - {file = "openai-1.57.1-py3-none-any.whl", hash = "sha256:3865686c927e93492d1145938d4a24b634951531c4b2769d43ca5dbd4b25d8fd"}, - {file = "openai-1.57.1.tar.gz", hash = "sha256:a95f22e04ab3df26e64a15d958342265e802314131275908b3b3e36f8c5d4377"}, -] - -[package.dependencies] -anyio = ">=3.5.0,<5" -distro = ">=1.7.0,<2" -httpx = ">=0.23.0,<1" -jiter = ">=0.4.0,<1" -pydantic = ">=1.9.0,<3" -sniffio = "*" -tqdm = ">4" -typing-extensions = ">=4.11,<5" - -[package.extras] -datalib = ["numpy (>=1)", "pandas (>=1.2.3)", "pandas-stubs (>=1.1.0.11)"] - -[[package]] -name = "ordered-set" -version = "4.1.0" -description = "An OrderedSet is a custom MutableSet that remembers its order, so that every" -optional = false -python-versions = ">=3.7" -files = [ - {file = "ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8"}, - {file = "ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562"}, -] - -[package.extras] -dev = ["black", "mypy", "pytest"] - -[[package]] -name = "packaging" -version = "24.2" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, -] - -[[package]] -name = "pandas" -version = "2.2.3" -description = "Powerful data structures for data analysis, time series, and statistics" -optional = true -python-versions = ">=3.9" -files = [ - {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, - {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, - {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, - {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, - {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, - {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, - {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, - {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, - {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, - {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, - {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, - {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, - {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, - {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, - {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, - {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, - {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, - {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, - {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, - {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, - {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, - {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, - {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, - {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, - {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, - {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, - {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, - {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, - {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, - {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, - {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, - {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, - {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, - {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, - {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, - {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, - {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, - {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, - {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, - {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, - {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, - {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, -] - -[package.dependencies] -numpy = [ - {version = ">=1.22.4", markers = "python_version < \"3.11\""}, - {version = ">=1.23.2", markers = "python_version == \"3.11\""}, -] -python-dateutil = ">=2.8.2" -pytz = ">=2020.1" -tzdata = ">=2022.7" - -[package.extras] -all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] -aws = ["s3fs (>=2022.11.0)"] -clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] -compression = ["zstandard (>=0.19.0)"] -computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] -consortium-standard = ["dataframe-api-compat (>=0.1.7)"] -excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] -feather = ["pyarrow (>=10.0.1)"] -fss = ["fsspec (>=2022.11.0)"] -gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] -hdf5 = ["tables (>=3.8.0)"] -html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] -mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] -output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] -parquet = ["pyarrow (>=10.0.1)"] -performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] -plot = ["matplotlib (>=3.6.3)"] -postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] -pyarrow = ["pyarrow (>=10.0.1)"] -spss = ["pyreadstat (>=1.2.0)"] -sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] -test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] -xml = ["lxml (>=4.9.2)"] - -[[package]] -name = "parsedatetime" -version = "2.6" -description = "Parse human-readable date/time text." -optional = false -python-versions = "*" -files = [ - {file = "parsedatetime-2.6-py3-none-any.whl", hash = "sha256:cb96edd7016872f58479e35879294258c71437195760746faffedb692aef000b"}, - {file = "parsedatetime-2.6.tar.gz", hash = "sha256:4cb368fbb18a0b7231f4d76119165451c8d2e35951455dfee97c62a87b04d455"}, -] - -[[package]] -name = "pathspec" -version = "0.12.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] - -[[package]] -name = "patsy" -version = "1.0.1" -description = "A Python package for describing statistical models and for building design matrices." -optional = true -python-versions = ">=3.6" -files = [ - {file = "patsy-1.0.1-py2.py3-none-any.whl", hash = "sha256:751fb38f9e97e62312e921a1954b81e1bb2bcda4f5eeabaf94db251ee791509c"}, - {file = "patsy-1.0.1.tar.gz", hash = "sha256:e786a9391eec818c054e359b737bbce692f051aee4c661f4141cc88fb459c0c4"}, -] - -[package.dependencies] -numpy = ">=1.4" - -[package.extras] -test = ["pytest", "pytest-cov", "scipy"] - -[[package]] -name = "phik" -version = "0.12.4" -description = "Phi_K correlation analyzer library" -optional = true -python-versions = ">=3.8" -files = [ - {file = "phik-0.12.4-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:778d00e33762c1e85681f65ef011933faabdc80ab53262f221cccf75eea535d5"}, - {file = "phik-0.12.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d92cc961ee60b317896589bab087901440b2bc749dbd5e266bc3dfe25dbff19a"}, - {file = "phik-0.12.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f48d0dd94323401ed069bbaa673a879f3f002e5ef6fabda19eb3d0a5f8e3947f"}, - {file = "phik-0.12.4-cp310-cp310-win_amd64.whl", hash = "sha256:ea5030640fda8380d7db9ea28fbde37a1565c0b1699bcb7152d6772a6ad278af"}, - {file = "phik-0.12.4-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:2b2f518310c6f3144a5e3d1bc3489c8be17ebe4da6b8520f4e01fa3e544b0fed"}, - {file = "phik-0.12.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f7a6614184eac1b55100c4a7c9899f370ae97599b41b2982f59f7e1da9511cd"}, - {file = "phik-0.12.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ea158b31d51e34153241bd3cac24c9a9a463af575c063abb8ca8d30352b4b12"}, - {file = "phik-0.12.4-cp311-cp311-win_amd64.whl", hash = "sha256:f315699c695e5646b29911b577d584ae76d0fcc1dee539634e512518fcd4108d"}, - {file = "phik-0.12.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:951b06ed32fa0fe6ee73f98407e4d435f90a1750ecb0f250df46eb75741a33bf"}, - {file = "phik-0.12.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b6ba2fa65c4b2a3c36aded0f47333c3069c0520bb426c3f937656a58a5041957"}, - {file = "phik-0.12.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3868a8f9277ab338eacb634bb06dd83278344dc19154f77e06c9cb8712959404"}, - {file = "phik-0.12.4-cp312-cp312-win_amd64.whl", hash = "sha256:247ea90b2d067bb360e798e5645dbcea7753b3bf78436287d92247285c4aa58a"}, - {file = "phik-0.12.4-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:6b38483f02c8a2d471dd14ebc367f83cd619a3672033f1ce52382815cdb9382d"}, - {file = "phik-0.12.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0df90db67dadae940973ffd0692c2e9a207da46b8764e200cb7e6f2552d43154"}, - {file = "phik-0.12.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85c329bd206bfdca689f72f1bb270707f19d5533882b3cde560ce0cbf4b27551"}, - {file = "phik-0.12.4-cp38-cp38-win_amd64.whl", hash = "sha256:eb43bd2b3b6b068b4d2f85a303cfdc256294637f3a598234058cfdbdc75d8538"}, - {file = "phik-0.12.4-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:c2c7482e8ca1e9f688eacd69baccf838fc535b9d3c13523b2d3b53b4aff04c5d"}, - {file = "phik-0.12.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7eb9c0a22d01007a4c51d48489c4f3ebe738461e092061c90da7c1ccf8d51e60"}, - {file = "phik-0.12.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1dd26c71de023852aa452897e41a55176d6d87c268323d0814514cd32a9fadc1"}, - {file = "phik-0.12.4-cp39-cp39-win_amd64.whl", hash = "sha256:c15e987d90d34990fee0ef157fb00c9c69befdf520689ac5f320ff0ab74fa399"}, - {file = "phik-0.12.4.tar.gz", hash = "sha256:d4d53274685e56fb08088505b4eec70be07f2f8044e7961ca02b399e42c37025"}, -] - -[package.dependencies] -joblib = ">=0.14.1" -matplotlib = ">=2.2.3" -numpy = ">=1.18.0" -pandas = ">=0.25.1" -scipy = ">=1.5.2" - -[package.extras] -test = ["pytest (>=4.0.2)", "pytest-pylint (>=0.13.0)"] - -[[package]] -name = "pillow" -version = "11.0.0" -description = "Python Imaging Library (Fork)" -optional = true -python-versions = ">=3.9" -files = [ - {file = "pillow-11.0.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:6619654954dc4936fcff82db8eb6401d3159ec6be81e33c6000dfd76ae189947"}, - {file = "pillow-11.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b3c5ac4bed7519088103d9450a1107f76308ecf91d6dabc8a33a2fcfb18d0fba"}, - {file = "pillow-11.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a65149d8ada1055029fcb665452b2814fe7d7082fcb0c5bed6db851cb69b2086"}, - {file = "pillow-11.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88a58d8ac0cc0e7f3a014509f0455248a76629ca9b604eca7dc5927cc593c5e9"}, - {file = "pillow-11.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c26845094b1af3c91852745ae78e3ea47abf3dbcd1cf962f16b9a5fbe3ee8488"}, - {file = "pillow-11.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:1a61b54f87ab5786b8479f81c4b11f4d61702830354520837f8cc791ebba0f5f"}, - {file = "pillow-11.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:674629ff60030d144b7bca2b8330225a9b11c482ed408813924619c6f302fdbb"}, - {file = "pillow-11.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:598b4e238f13276e0008299bd2482003f48158e2b11826862b1eb2ad7c768b97"}, - {file = "pillow-11.0.0-cp310-cp310-win32.whl", hash = "sha256:9a0f748eaa434a41fccf8e1ee7a3eed68af1b690e75328fd7a60af123c193b50"}, - {file = "pillow-11.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:a5629742881bcbc1f42e840af185fd4d83a5edeb96475a575f4da50d6ede337c"}, - {file = "pillow-11.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:ee217c198f2e41f184f3869f3e485557296d505b5195c513b2bfe0062dc537f1"}, - {file = "pillow-11.0.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1c1d72714f429a521d8d2d018badc42414c3077eb187a59579f28e4270b4b0fc"}, - {file = "pillow-11.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:499c3a1b0d6fc8213519e193796eb1a86a1be4b1877d678b30f83fd979811d1a"}, - {file = "pillow-11.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8b2351c85d855293a299038e1f89db92a2f35e8d2f783489c6f0b2b5f3fe8a3"}, - {file = "pillow-11.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f4dba50cfa56f910241eb7f883c20f1e7b1d8f7d91c750cd0b318bad443f4d5"}, - {file = "pillow-11.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:5ddbfd761ee00c12ee1be86c9c0683ecf5bb14c9772ddbd782085779a63dd55b"}, - {file = "pillow-11.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:45c566eb10b8967d71bf1ab8e4a525e5a93519e29ea071459ce517f6b903d7fa"}, - {file = "pillow-11.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b4fd7bd29610a83a8c9b564d457cf5bd92b4e11e79a4ee4716a63c959699b306"}, - {file = "pillow-11.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cb929ca942d0ec4fac404cbf520ee6cac37bf35be479b970c4ffadf2b6a1cad9"}, - {file = "pillow-11.0.0-cp311-cp311-win32.whl", hash = "sha256:006bcdd307cc47ba43e924099a038cbf9591062e6c50e570819743f5607404f5"}, - {file = "pillow-11.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:52a2d8323a465f84faaba5236567d212c3668f2ab53e1c74c15583cf507a0291"}, - {file = "pillow-11.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:16095692a253047fe3ec028e951fa4221a1f3ed3d80c397e83541a3037ff67c9"}, - {file = "pillow-11.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2c0a187a92a1cb5ef2c8ed5412dd8d4334272617f532d4ad4de31e0495bd923"}, - {file = "pillow-11.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:084a07ef0821cfe4858fe86652fffac8e187b6ae677e9906e192aafcc1b69903"}, - {file = "pillow-11.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8069c5179902dcdce0be9bfc8235347fdbac249d23bd90514b7a47a72d9fecf4"}, - {file = "pillow-11.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f02541ef64077f22bf4924f225c0fd1248c168f86e4b7abdedd87d6ebaceab0f"}, - {file = "pillow-11.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:fcb4621042ac4b7865c179bb972ed0da0218a076dc1820ffc48b1d74c1e37fe9"}, - {file = "pillow-11.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:00177a63030d612148e659b55ba99527803288cea7c75fb05766ab7981a8c1b7"}, - {file = "pillow-11.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8853a3bf12afddfdf15f57c4b02d7ded92c7a75a5d7331d19f4f9572a89c17e6"}, - {file = "pillow-11.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3107c66e43bda25359d5ef446f59c497de2b5ed4c7fdba0894f8d6cf3822dafc"}, - {file = "pillow-11.0.0-cp312-cp312-win32.whl", hash = "sha256:86510e3f5eca0ab87429dd77fafc04693195eec7fd6a137c389c3eeb4cfb77c6"}, - {file = "pillow-11.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:8ec4a89295cd6cd4d1058a5e6aec6bf51e0eaaf9714774e1bfac7cfc9051db47"}, - {file = "pillow-11.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:27a7860107500d813fcd203b4ea19b04babe79448268403172782754870dac25"}, - {file = "pillow-11.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcd1fb5bb7b07f64c15618c89efcc2cfa3e95f0e3bcdbaf4642509de1942a699"}, - {file = "pillow-11.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0e038b0745997c7dcaae350d35859c9715c71e92ffb7e0f4a8e8a16732150f38"}, - {file = "pillow-11.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ae08bd8ffc41aebf578c2af2f9d8749d91f448b3bfd41d7d9ff573d74f2a6b2"}, - {file = "pillow-11.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d69bfd8ec3219ae71bcde1f942b728903cad25fafe3100ba2258b973bd2bc1b2"}, - {file = "pillow-11.0.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:61b887f9ddba63ddf62fd02a3ba7add935d053b6dd7d58998c630e6dbade8527"}, - {file = "pillow-11.0.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:c6a660307ca9d4867caa8d9ca2c2658ab685de83792d1876274991adec7b93fa"}, - {file = "pillow-11.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:73e3a0200cdda995c7e43dd47436c1548f87a30bb27fb871f352a22ab8dcf45f"}, - {file = "pillow-11.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fba162b8872d30fea8c52b258a542c5dfd7b235fb5cb352240c8d63b414013eb"}, - {file = "pillow-11.0.0-cp313-cp313-win32.whl", hash = "sha256:f1b82c27e89fffc6da125d5eb0ca6e68017faf5efc078128cfaa42cf5cb38798"}, - {file = "pillow-11.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:8ba470552b48e5835f1d23ecb936bb7f71d206f9dfeee64245f30c3270b994de"}, - {file = "pillow-11.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:846e193e103b41e984ac921b335df59195356ce3f71dcfd155aa79c603873b84"}, - {file = "pillow-11.0.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4ad70c4214f67d7466bea6a08061eba35c01b1b89eaa098040a35272a8efb22b"}, - {file = "pillow-11.0.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:6ec0d5af64f2e3d64a165f490d96368bb5dea8b8f9ad04487f9ab60dc4bb6003"}, - {file = "pillow-11.0.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c809a70e43c7977c4a42aefd62f0131823ebf7dd73556fa5d5950f5b354087e2"}, - {file = "pillow-11.0.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:4b60c9520f7207aaf2e1d94de026682fc227806c6e1f55bba7606d1c94dd623a"}, - {file = "pillow-11.0.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:1e2688958a840c822279fda0086fec1fdab2f95bf2b717b66871c4ad9859d7e8"}, - {file = "pillow-11.0.0-cp313-cp313t-win32.whl", hash = "sha256:607bbe123c74e272e381a8d1957083a9463401f7bd01287f50521ecb05a313f8"}, - {file = "pillow-11.0.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5c39ed17edea3bc69c743a8dd3e9853b7509625c2462532e62baa0732163a904"}, - {file = "pillow-11.0.0-cp313-cp313t-win_arm64.whl", hash = "sha256:75acbbeb05b86bc53cbe7b7e6fe00fbcf82ad7c684b3ad82e3d711da9ba287d3"}, - {file = "pillow-11.0.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2e46773dc9f35a1dd28bd6981332fd7f27bec001a918a72a79b4133cf5291dba"}, - {file = "pillow-11.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2679d2258b7f1192b378e2893a8a0a0ca472234d4c2c0e6bdd3380e8dfa21b6a"}, - {file = "pillow-11.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda2616eb2313cbb3eebbe51f19362eb434b18e3bb599466a1ffa76a033fb916"}, - {file = "pillow-11.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ec184af98a121fb2da42642dea8a29ec80fc3efbaefb86d8fdd2606619045d"}, - {file = "pillow-11.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:8594f42df584e5b4bb9281799698403f7af489fba84c34d53d1c4bfb71b7c4e7"}, - {file = "pillow-11.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:c12b5ae868897c7338519c03049a806af85b9b8c237b7d675b8c5e089e4a618e"}, - {file = "pillow-11.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:70fbbdacd1d271b77b7721fe3cdd2d537bbbd75d29e6300c672ec6bb38d9672f"}, - {file = "pillow-11.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5178952973e588b3f1360868847334e9e3bf49d19e169bbbdfaf8398002419ae"}, - {file = "pillow-11.0.0-cp39-cp39-win32.whl", hash = "sha256:8c676b587da5673d3c75bd67dd2a8cdfeb282ca38a30f37950511766b26858c4"}, - {file = "pillow-11.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:94f3e1780abb45062287b4614a5bc0874519c86a777d4a7ad34978e86428b8dd"}, - {file = "pillow-11.0.0-cp39-cp39-win_arm64.whl", hash = "sha256:290f2cc809f9da7d6d622550bbf4c1e57518212da51b6a30fe8e0a270a5b78bd"}, - {file = "pillow-11.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1187739620f2b365de756ce086fdb3604573337cc28a0d3ac4a01ab6b2d2a6d2"}, - {file = "pillow-11.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fbbcb7b57dc9c794843e3d1258c0fbf0f48656d46ffe9e09b63bbd6e8cd5d0a2"}, - {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d203af30149ae339ad1b4f710d9844ed8796e97fda23ffbc4cc472968a47d0b"}, - {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a0d3b115009ebb8ac3d2ebec5c2982cc693da935f4ab7bb5c8ebe2f47d36f2"}, - {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:73853108f56df97baf2bb8b522f3578221e56f646ba345a372c78326710d3830"}, - {file = "pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e58876c91f97b0952eb766123bfef372792ab3f4e3e1f1a2267834c2ab131734"}, - {file = "pillow-11.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:224aaa38177597bb179f3ec87eeefcce8e4f85e608025e9cfac60de237ba6316"}, - {file = "pillow-11.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5bd2d3bdb846d757055910f0a59792d33b555800813c3b39ada1829c372ccb06"}, - {file = "pillow-11.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:375b8dd15a1f5d2feafff536d47e22f69625c1aa92f12b339ec0b2ca40263273"}, - {file = "pillow-11.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:daffdf51ee5db69a82dd127eabecce20729e21f7a3680cf7cbb23f0829189790"}, - {file = "pillow-11.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7326a1787e3c7b0429659e0a944725e1b03eeaa10edd945a86dead1913383944"}, - {file = "pillow-11.0.0.tar.gz", hash = "sha256:72bacbaf24ac003fea9bff9837d1eedb6088758d41e100c1552930151f677739"}, -] - -[package.extras] -docs = ["furo", "olefile", "sphinx (>=8.1)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] -fpx = ["olefile"] -mic = ["olefile"] -tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] -typing = ["typing-extensions"] -xmp = ["defusedxml"] - -[[package]] -name = "platformdirs" -version = "4.3.6" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, - {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, -] - -[package.extras] -docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] -type = ["mypy (>=1.11.2)"] - -[[package]] -name = "pluggy" -version = "1.5.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "pre-commit" -version = "4.0.1" -description = "A framework for managing and maintaining multi-language pre-commit hooks." -optional = false -python-versions = ">=3.9" -files = [ - {file = "pre_commit-4.0.1-py2.py3-none-any.whl", hash = "sha256:efde913840816312445dc98787724647c65473daefe420785f885e8ed9a06878"}, - {file = "pre_commit-4.0.1.tar.gz", hash = "sha256:80905ac375958c0444c65e9cebebd948b3cdb518f335a091a670a89d652139d2"}, -] - -[package.dependencies] -cfgv = ">=2.0.0" -identify = ">=1.0.0" -nodeenv = ">=0.11.1" -pyyaml = ">=5.1" -virtualenv = ">=20.10.0" - -[[package]] -name = "protobuf" -version = "5.29.1" -description = "" -optional = false -python-versions = ">=3.8" -files = [ - {file = "protobuf-5.29.1-cp310-abi3-win32.whl", hash = "sha256:22c1f539024241ee545cbcb00ee160ad1877975690b16656ff87dde107b5f110"}, - {file = "protobuf-5.29.1-cp310-abi3-win_amd64.whl", hash = "sha256:1fc55267f086dd4050d18ef839d7bd69300d0d08c2a53ca7df3920cc271a3c34"}, - {file = "protobuf-5.29.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:d473655e29c0c4bbf8b69e9a8fb54645bc289dead6d753b952e7aa660254ae18"}, - {file = "protobuf-5.29.1-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:b5ba1d0e4c8a40ae0496d0e2ecfdbb82e1776928a205106d14ad6985a09ec155"}, - {file = "protobuf-5.29.1-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:8ee1461b3af56145aca2800e6a3e2f928108c749ba8feccc6f5dd0062c410c0d"}, - {file = "protobuf-5.29.1-cp38-cp38-win32.whl", hash = "sha256:50879eb0eb1246e3a5eabbbe566b44b10348939b7cc1b267567e8c3d07213853"}, - {file = "protobuf-5.29.1-cp38-cp38-win_amd64.whl", hash = "sha256:027fbcc48cea65a6b17028510fdd054147057fa78f4772eb547b9274e5219331"}, - {file = "protobuf-5.29.1-cp39-cp39-win32.whl", hash = "sha256:5a41deccfa5e745cef5c65a560c76ec0ed8e70908a67cc8f4da5fce588b50d57"}, - {file = "protobuf-5.29.1-cp39-cp39-win_amd64.whl", hash = "sha256:012ce28d862ff417fd629285aca5d9772807f15ceb1a0dbd15b88f58c776c98c"}, - {file = "protobuf-5.29.1-py3-none-any.whl", hash = "sha256:32600ddb9c2a53dedc25b8581ea0f1fd8ea04956373c0c07577ce58d312522e0"}, - {file = "protobuf-5.29.1.tar.gz", hash = "sha256:683be02ca21a6ffe80db6dd02c0b5b2892322c59ca57fd6c872d652cb80549cb"}, -] - -[[package]] -name = "psycopg2-binary" -version = "2.9.10" -description = "psycopg2 - Python-PostgreSQL Database Adapter" -optional = true -python-versions = ">=3.8" -files = [ - {file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:3e9c76f0ac6f92ecfc79516a8034a544926430f7b080ec5a0537bca389ee0906"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad26b467a405c798aaa1458ba09d7e2b6e5f96b1ce0ac15d82fd9f95dc38a92"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:270934a475a0e4b6925b5f804e3809dd5f90f8613621d062848dd82f9cd62007"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48b338f08d93e7be4ab2b5f1dbe69dc5e9ef07170fe1f86514422076d9c010d0"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4152f8f76d2023aac16285576a9ecd2b11a9895373a1f10fd9db54b3ff06b4"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32581b3020c72d7a421009ee1c6bf4a131ef5f0a968fab2e2de0c9d2bb4577f1"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2ce3e21dc3437b1d960521eca599d57408a695a0d3c26797ea0f72e834c7ffe5"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e984839e75e0b60cfe75e351db53d6db750b00de45644c5d1f7ee5d1f34a1ce5"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c4745a90b78e51d9ba06e2088a2fe0c693ae19cc8cb051ccda44e8df8a6eb53"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-win32.whl", hash = "sha256:e5720a5d25e3b99cd0dc5c8a440570469ff82659bb09431c1439b92caf184d3b"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:3c18f74eb4386bf35e92ab2354a12c17e5eb4d9798e4c0ad3a00783eae7cd9f1"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0"}, - {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d"}, - {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb"}, - {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7"}, - {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d"}, - {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73"}, - {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673"}, - {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f"}, - {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"}, - {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"}, - {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"}, - {file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"}, - {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"}, - {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"}, - {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:056470c3dc57904bbf63d6f534988bafc4e970ffd50f6271fc4ee7daad9498a5"}, - {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aa0e31fa4bb82578f3a6c74a73c273367727de397a7a0f07bd83cbea696baa"}, - {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8de718c0e1c4b982a54b41779667242bc630b2197948405b7bd8ce16bcecac92"}, - {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5c370b1e4975df846b0277b4deba86419ca77dbc25047f535b0bb03d1a544d44"}, - {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ffe8ed017e4ed70f68b7b371d84b7d4a790368db9203dfc2d222febd3a9c8863"}, - {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8aecc5e80c63f7459a1a2ab2c64df952051df196294d9f739933a9f6687e86b3"}, - {file = "psycopg2_binary-2.9.10-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:7a813c8bdbaaaab1f078014b9b0b13f5de757e2b5d9be6403639b298a04d218b"}, - {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00924255d7fc916ef66e4bf22f354a940c67179ad3fd7067d7a0a9c84d2fbfc"}, - {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7559bce4b505762d737172556a4e6ea8a9998ecac1e39b5233465093e8cee697"}, - {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8b58f0a96e7a1e341fc894f62c1177a7c83febebb5ff9123b579418fdc8a481"}, - {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b269105e59ac96aba877c1707c600ae55711d9dcd3fc4b5012e4af68e30c648"}, - {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:79625966e176dc97ddabc142351e0409e28acf4660b88d1cf6adb876d20c490d"}, - {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8aabf1c1a04584c168984ac678a668094d831f152859d06e055288fa515e4d30"}, - {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:19721ac03892001ee8fdd11507e6a2e01f4e37014def96379411ca99d78aeb2c"}, - {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7f5d859928e635fa3ce3477704acee0f667b3a3d3e4bb109f2b18d4005f38287"}, - {file = "psycopg2_binary-2.9.10-cp39-cp39-win32.whl", hash = "sha256:3216ccf953b3f267691c90c6fe742e45d890d8272326b4a8b20850a03d05b7b8"}, - {file = "psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5"}, -] - -[[package]] -name = "pyarrow" -version = "18.1.0" -description = "Python library for Apache Arrow" -optional = true -python-versions = ">=3.9" -files = [ - {file = "pyarrow-18.1.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e21488d5cfd3d8b500b3238a6c4b075efabc18f0f6d80b29239737ebd69caa6c"}, - {file = "pyarrow-18.1.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:b516dad76f258a702f7ca0250885fc93d1fa5ac13ad51258e39d402bd9e2e1e4"}, - {file = "pyarrow-18.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f443122c8e31f4c9199cb23dca29ab9427cef990f283f80fe15b8e124bcc49b"}, - {file = "pyarrow-18.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a03da7f2758645d17b7b4f83c8bffeae5bbb7f974523fe901f36288d2eab71"}, - {file = "pyarrow-18.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ba17845efe3aa358ec266cf9cc2800fa73038211fb27968bfa88acd09261a470"}, - {file = "pyarrow-18.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:3c35813c11a059056a22a3bef520461310f2f7eea5c8a11ef9de7062a23f8d56"}, - {file = "pyarrow-18.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9736ba3c85129d72aefa21b4f3bd715bc4190fe4426715abfff90481e7d00812"}, - {file = "pyarrow-18.1.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:eaeabf638408de2772ce3d7793b2668d4bb93807deed1725413b70e3156a7854"}, - {file = "pyarrow-18.1.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:3b2e2239339c538f3464308fd345113f886ad031ef8266c6f004d49769bb074c"}, - {file = "pyarrow-18.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f39a2e0ed32a0970e4e46c262753417a60c43a3246972cfc2d3eb85aedd01b21"}, - {file = "pyarrow-18.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31e9417ba9c42627574bdbfeada7217ad8a4cbbe45b9d6bdd4b62abbca4c6f6"}, - {file = "pyarrow-18.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:01c034b576ce0eef554f7c3d8c341714954be9b3f5d5bc7117006b85fcf302fe"}, - {file = "pyarrow-18.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:f266a2c0fc31995a06ebd30bcfdb7f615d7278035ec5b1cd71c48d56daaf30b0"}, - {file = "pyarrow-18.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:d4f13eee18433f99adefaeb7e01d83b59f73360c231d4782d9ddfaf1c3fbde0a"}, - {file = "pyarrow-18.1.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:9f3a76670b263dc41d0ae877f09124ab96ce10e4e48f3e3e4257273cee61ad0d"}, - {file = "pyarrow-18.1.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:da31fbca07c435be88a0c321402c4e31a2ba61593ec7473630769de8346b54ee"}, - {file = "pyarrow-18.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:543ad8459bc438efc46d29a759e1079436290bd583141384c6f7a1068ed6f992"}, - {file = "pyarrow-18.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0743e503c55be0fdb5c08e7d44853da27f19dc854531c0570f9f394ec9671d54"}, - {file = "pyarrow-18.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:d4b3d2a34780645bed6414e22dda55a92e0fcd1b8a637fba86800ad737057e33"}, - {file = "pyarrow-18.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c52f81aa6f6575058d8e2c782bf79d4f9fdc89887f16825ec3a66607a5dd8e30"}, - {file = "pyarrow-18.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:0ad4892617e1a6c7a551cfc827e072a633eaff758fa09f21c4ee548c30bcaf99"}, - {file = "pyarrow-18.1.0-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:84e314d22231357d473eabec709d0ba285fa706a72377f9cc8e1cb3c8013813b"}, - {file = "pyarrow-18.1.0-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:f591704ac05dfd0477bb8f8e0bd4b5dc52c1cadf50503858dce3a15db6e46ff2"}, - {file = "pyarrow-18.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:acb7564204d3c40babf93a05624fc6a8ec1ab1def295c363afc40b0c9e66c191"}, - {file = "pyarrow-18.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74de649d1d2ccb778f7c3afff6085bd5092aed4c23df9feeb45dd6b16f3811aa"}, - {file = "pyarrow-18.1.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:f96bd502cb11abb08efea6dab09c003305161cb6c9eafd432e35e76e7fa9b90c"}, - {file = "pyarrow-18.1.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:36ac22d7782554754a3b50201b607d553a8d71b78cdf03b33c1125be4b52397c"}, - {file = "pyarrow-18.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:25dbacab8c5952df0ca6ca0af28f50d45bd31c1ff6fcf79e2d120b4a65ee7181"}, - {file = "pyarrow-18.1.0-cp313-cp313t-macosx_12_0_arm64.whl", hash = "sha256:6a276190309aba7bc9d5bd2933230458b3521a4317acfefe69a354f2fe59f2bc"}, - {file = "pyarrow-18.1.0-cp313-cp313t-macosx_12_0_x86_64.whl", hash = "sha256:ad514dbfcffe30124ce655d72771ae070f30bf850b48bc4d9d3b25993ee0e386"}, - {file = "pyarrow-18.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aebc13a11ed3032d8dd6e7171eb6e86d40d67a5639d96c35142bd568b9299324"}, - {file = "pyarrow-18.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6cf5c05f3cee251d80e98726b5c7cc9f21bab9e9783673bac58e6dfab57ecc8"}, - {file = "pyarrow-18.1.0-cp313-cp313t-manylinux_2_28_aarch64.whl", hash = "sha256:11b676cd410cf162d3f6a70b43fb9e1e40affbc542a1e9ed3681895f2962d3d9"}, - {file = "pyarrow-18.1.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:b76130d835261b38f14fc41fdfb39ad8d672afb84c447126b84d5472244cfaba"}, - {file = "pyarrow-18.1.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:0b331e477e40f07238adc7ba7469c36b908f07c89b95dd4bd3a0ec84a3d1e21e"}, - {file = "pyarrow-18.1.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:2c4dd0c9010a25ba03e198fe743b1cc03cd33c08190afff371749c52ccbbaf76"}, - {file = "pyarrow-18.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f97b31b4c4e21ff58c6f330235ff893cc81e23da081b1a4b1c982075e0ed4e9"}, - {file = "pyarrow-18.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a4813cb8ecf1809871fd2d64a8eff740a1bd3691bbe55f01a3cf6c5ec869754"}, - {file = "pyarrow-18.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:05a5636ec3eb5cc2a36c6edb534a38ef57b2ab127292a716d00eabb887835f1e"}, - {file = "pyarrow-18.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:73eeed32e724ea3568bb06161cad5fa7751e45bc2228e33dcb10c614044165c7"}, - {file = "pyarrow-18.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:a1880dd6772b685e803011a6b43a230c23b566859a6e0c9a276c1e0faf4f4052"}, - {file = "pyarrow-18.1.0.tar.gz", hash = "sha256:9386d3ca9c145b5539a1cfc75df07757dff870168c959b473a0bccbc3abc8c73"}, -] - -[package.extras] -test = ["cffi", "hypothesis", "pandas", "pytest", "pytz"] - -[[package]] -name = "pydantic" -version = "2.10.3" -description = "Data validation using Python type hints" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic-2.10.3-py3-none-any.whl", hash = "sha256:be04d85bbc7b65651c5f8e6b9976ed9c6f41782a55524cef079a34a0bb82144d"}, - {file = "pydantic-2.10.3.tar.gz", hash = "sha256:cb5ac360ce894ceacd69c403187900a02c4b20b693a9dd1d643e1effab9eadf9"}, -] - -[package.dependencies] -annotated-types = ">=0.6.0" -pydantic-core = "2.27.1" -typing-extensions = ">=4.12.2" - -[package.extras] -email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata"] - -[[package]] -name = "pydantic-core" -version = "2.27.1" -description = "Core functionality for Pydantic validation and serialization" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:71a5e35c75c021aaf400ac048dacc855f000bdfed91614b4a726f7432f1f3d6a"}, - {file = "pydantic_core-2.27.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f82d068a2d6ecfc6e054726080af69a6764a10015467d7d7b9f66d6ed5afa23b"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:121ceb0e822f79163dd4699e4c54f5ad38b157084d97b34de8b232bcaad70278"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4603137322c18eaf2e06a4495f426aa8d8388940f3c457e7548145011bb68e05"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a33cd6ad9017bbeaa9ed78a2e0752c5e250eafb9534f308e7a5f7849b0b1bfb4"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:15cc53a3179ba0fcefe1e3ae50beb2784dede4003ad2dfd24f81bba4b23a454f"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45d9c5eb9273aa50999ad6adc6be5e0ecea7e09dbd0d31bd0c65a55a2592ca08"}, - {file = "pydantic_core-2.27.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8bf7b66ce12a2ac52d16f776b31d16d91033150266eb796967a7e4621707e4f6"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:655d7dd86f26cb15ce8a431036f66ce0318648f8853d709b4167786ec2fa4807"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:5556470f1a2157031e676f776c2bc20acd34c1990ca5f7e56f1ebf938b9ab57c"}, - {file = "pydantic_core-2.27.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f69ed81ab24d5a3bd93861c8c4436f54afdf8e8cc421562b0c7504cf3be58206"}, - {file = "pydantic_core-2.27.1-cp310-none-win32.whl", hash = "sha256:f5a823165e6d04ccea61a9f0576f345f8ce40ed533013580e087bd4d7442b52c"}, - {file = "pydantic_core-2.27.1-cp310-none-win_amd64.whl", hash = "sha256:57866a76e0b3823e0b56692d1a0bf722bffb324839bb5b7226a7dbd6c9a40b17"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8"}, - {file = "pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025"}, - {file = "pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c"}, - {file = "pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc"}, - {file = "pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9"}, - {file = "pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5"}, - {file = "pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f"}, - {file = "pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35"}, - {file = "pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb"}, - {file = "pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae"}, - {file = "pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c"}, - {file = "pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16"}, - {file = "pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073"}, - {file = "pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51"}, - {file = "pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960"}, - {file = "pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23"}, - {file = "pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05"}, - {file = "pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337"}, - {file = "pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5"}, - {file = "pydantic_core-2.27.1-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:5897bec80a09b4084aee23f9b73a9477a46c3304ad1d2d07acca19723fb1de62"}, - {file = "pydantic_core-2.27.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d0165ab2914379bd56908c02294ed8405c252250668ebcb438a55494c69f44ab"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b9af86e1d8e4cfc82c2022bfaa6f459381a50b94a29e95dcdda8442d6d83864"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f6c8a66741c5f5447e047ab0ba7a1c61d1e95580d64bce852e3df1f895c4067"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a42d6a8156ff78981f8aa56eb6394114e0dedb217cf8b729f438f643608cbcd"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64c65f40b4cd8b0e049a8edde07e38b476da7e3aaebe63287c899d2cff253fa5"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdcf339322a3fae5cbd504edcefddd5a50d9ee00d968696846f089b4432cf78"}, - {file = "pydantic_core-2.27.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bf99c8404f008750c846cb4ac4667b798a9f7de673ff719d705d9b2d6de49c5f"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8f1edcea27918d748c7e5e4d917297b2a0ab80cad10f86631e488b7cddf76a36"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:159cac0a3d096f79ab6a44d77a961917219707e2a130739c64d4dd46281f5c2a"}, - {file = "pydantic_core-2.27.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:029d9757eb621cc6e1848fa0b0310310de7301057f623985698ed7ebb014391b"}, - {file = "pydantic_core-2.27.1-cp38-none-win32.whl", hash = "sha256:a28af0695a45f7060e6f9b7092558a928a28553366519f64083c63a44f70e618"}, - {file = "pydantic_core-2.27.1-cp38-none-win_amd64.whl", hash = "sha256:2d4567c850905d5eaaed2f7a404e61012a51caf288292e016360aa2b96ff38d4"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e9386266798d64eeb19dd3677051f5705bf873e98e15897ddb7d76f477131967"}, - {file = "pydantic_core-2.27.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4228b5b646caa73f119b1ae756216b59cc6e2267201c27d3912b592c5e323b60"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b3dfe500de26c52abe0477dde16192ac39c98f05bf2d80e76102d394bd13854"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:aee66be87825cdf72ac64cb03ad4c15ffef4143dbf5c113f64a5ff4f81477bf9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b748c44bb9f53031c8cbc99a8a061bc181c1000c60a30f55393b6e9c45cc5bd"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ca038c7f6a0afd0b2448941b6ef9d5e1949e999f9e5517692eb6da58e9d44be"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e0bd57539da59a3e4671b90a502da9a28c72322a4f17866ba3ac63a82c4498e"}, - {file = "pydantic_core-2.27.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ac6c2c45c847bbf8f91930d88716a0fb924b51e0c6dad329b793d670ec5db792"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b94d4ba43739bbe8b0ce4262bcc3b7b9f31459ad120fb595627eaeb7f9b9ca01"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:00e6424f4b26fe82d44577b4c842d7df97c20be6439e8e685d0d715feceb9fb9"}, - {file = "pydantic_core-2.27.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:38de0a70160dd97540335b7ad3a74571b24f1dc3ed33f815f0880682e6880131"}, - {file = "pydantic_core-2.27.1-cp39-none-win32.whl", hash = "sha256:7ccebf51efc61634f6c2344da73e366c75e735960b5654b63d7e6f69a5885fa3"}, - {file = "pydantic_core-2.27.1-cp39-none-win_amd64.whl", hash = "sha256:a57847b090d7892f123726202b7daa20df6694cbd583b67a592e856bff603d6c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3fa80ac2bd5856580e242dbc202db873c60a01b20309c8319b5c5986fbe53ce6"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d950caa237bb1954f1b8c9227b5065ba6875ac9771bb8ec790d956a699b78676"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e4216e64d203e39c62df627aa882f02a2438d18a5f21d7f721621f7a5d3611d"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02a3d637bd387c41d46b002f0e49c52642281edacd2740e5a42f7017feea3f2c"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:161c27ccce13b6b0c8689418da3885d3220ed2eae2ea5e9b2f7f3d48f1d52c27"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:19910754e4cc9c63bc1c7f6d73aa1cfee82f42007e407c0f413695c2f7ed777f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:e173486019cc283dc9778315fa29a363579372fe67045e971e89b6365cc035ed"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:af52d26579b308921b73b956153066481f064875140ccd1dfd4e77db89dbb12f"}, - {file = "pydantic_core-2.27.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:981fb88516bd1ae8b0cbbd2034678a39dedc98752f264ac9bc5839d3923fa04c"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5fde892e6c697ce3e30c61b239330fc5d569a71fefd4eb6512fc6caec9dd9e2f"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:816f5aa087094099fff7edabb5e01cc370eb21aa1a1d44fe2d2aefdfb5599b31"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c10c309e18e443ddb108f0ef64e8729363adbfd92d6d57beec680f6261556f3"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98476c98b02c8e9b2eec76ac4156fd006628b1b2d0ef27e548ffa978393fd154"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c3027001c28434e7ca5a6e1e527487051136aa81803ac812be51802150d880dd"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:7699b1df36a48169cdebda7ab5a2bac265204003f153b4bd17276153d997670a"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1c39b07d90be6b48968ddc8c19e7585052088fd7ec8d568bb31ff64c70ae3c97"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:46ccfe3032b3915586e469d4972973f893c0a2bb65669194a5bdea9bacc088c2"}, - {file = "pydantic_core-2.27.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:62ba45e21cf6571d7f716d903b5b7b6d2617e2d5d67c0923dc47b9d41369f840"}, - {file = "pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - -[[package]] -name = "pydeck" -version = "0.9.1" -description = "Widget for deck.gl maps" -optional = true -python-versions = ">=3.8" -files = [ - {file = "pydeck-0.9.1-py2.py3-none-any.whl", hash = "sha256:b3f75ba0d273fc917094fa61224f3f6076ca8752b93d46faf3bcfd9f9d59b038"}, - {file = "pydeck-0.9.1.tar.gz", hash = "sha256:f74475ae637951d63f2ee58326757f8d4f9cd9f2a457cf42950715003e2cb605"}, -] - -[package.dependencies] -jinja2 = ">=2.10.1" -numpy = ">=1.16.4" - -[package.extras] -carto = ["pydeck-carto"] -jupyter = ["ipykernel (>=5.1.2)", "ipython (>=5.8.0)", "ipywidgets (>=7,<8)", "traitlets (>=4.3.2)"] - -[[package]] -name = "pygments" -version = "2.18.0" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, -] - -[package.extras] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pylint" -version = "3.3.2" -description = "python code static checker" -optional = false -python-versions = ">=3.9.0" -files = [ - {file = "pylint-3.3.2-py3-none-any.whl", hash = "sha256:77f068c287d49b8683cd7c6e624243c74f92890f767f106ffa1ddf3c0a54cb7a"}, - {file = "pylint-3.3.2.tar.gz", hash = "sha256:9ec054ec992cd05ad30a6df1676229739a73f8feeabf3912c995d17601052b01"}, -] - -[package.dependencies] -astroid = ">=3.3.5,<=3.4.0-dev0" -colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -dill = [ - {version = ">=0.2", markers = "python_version < \"3.11\""}, - {version = ">=0.3.6", markers = "python_version >= \"3.11\""}, -] -isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" -mccabe = ">=0.6,<0.8" -platformdirs = ">=2.2.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -tomlkit = ">=0.10.1" -typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} - -[package.extras] -spelling = ["pyenchant (>=3.2,<4.0)"] -testutils = ["gitpython (>3)"] - -[[package]] -name = "pyparsing" -version = "3.2.0" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -optional = true -python-versions = ">=3.9" -files = [ - {file = "pyparsing-3.2.0-py3-none-any.whl", hash = "sha256:93d9577b88da0bbea8cc8334ee8b918ed014968fd2ec383e868fb8afb1ccef84"}, - {file = "pyparsing-3.2.0.tar.gz", hash = "sha256:cbf74e27246d595d9a74b186b810f6fbb86726dbf3b9532efb343f6d7294fe9c"}, -] - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - -[[package]] -name = "pytest" -version = "7.4.4" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, - {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "python-slugify" -version = "8.0.4" -description = "A Python slugify application that also handles Unicode" -optional = false -python-versions = ">=3.7" -files = [ - {file = "python-slugify-8.0.4.tar.gz", hash = "sha256:59202371d1d05b54a9e7720c5e038f928f45daaffe41dd10822f3907b937c856"}, - {file = "python_slugify-8.0.4-py2.py3-none-any.whl", hash = "sha256:276540b79961052b66b7d116620b36518847f52d5fd9e3a70164fc8c50faa6b8"}, -] - -[package.dependencies] -text-unidecode = ">=1.3" - -[package.extras] -unidecode = ["Unidecode (>=1.1.1)"] - -[[package]] -name = "pytimeparse" -version = "1.1.8" -description = "Time expression parser" -optional = false -python-versions = "*" -files = [ - {file = "pytimeparse-1.1.8-py2.py3-none-any.whl", hash = "sha256:04b7be6cc8bd9f5647a6325444926c3ac34ee6bc7e69da4367ba282f076036bd"}, - {file = "pytimeparse-1.1.8.tar.gz", hash = "sha256:e86136477be924d7e670646a98561957e8ca7308d44841e21f5ddea757556a0a"}, -] - -[[package]] -name = "pytz" -version = "2024.2" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, - {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, -] - -[[package]] -name = "pywavelets" -version = "1.6.0" -description = "PyWavelets, wavelet transform module" -optional = true -python-versions = ">=3.9" -files = [ - {file = "pywavelets-1.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ddc1ff5ad706313d930f857f9656f565dfb81b85bbe58a9db16ad8fa7d1537c5"}, - {file = "pywavelets-1.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:78feab4e0c25fa32034b6b64cb854c6ce15663b4f0ffb25d8f0ee58915300f9b"}, - {file = "pywavelets-1.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be36f08efe9bc3abf40cf40cd2ee0aa0db26e4894e13ce5ac178442864161e8c"}, - {file = "pywavelets-1.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0595c51472c9c5724fe087cb73e2797053fd25c788d6553fdad6ff61abc60e91"}, - {file = "pywavelets-1.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:058a750477dde633ac53b8806f835af3559d52db6532fb2b93c1f4b5441365b8"}, - {file = "pywavelets-1.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:538795d9c4181152b414285b5a7f72ac52581ecdcdce74b6cca3fa0b8a5ab0aa"}, - {file = "pywavelets-1.6.0-cp310-cp310-win32.whl", hash = "sha256:47de024ba4f9df97e98b5f540340e1a9edd82d2c477450bef8c9b5381487128e"}, - {file = "pywavelets-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:e2c44760c0906ddf2176920a2613287f6eea947f166ce7eee9546081b06a6835"}, - {file = "pywavelets-1.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d91aaaf6de53b758bcdc96c81cdb5a8607758602be49f691188c0e108cf1e738"}, - {file = "pywavelets-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3b5302edb6d1d1ff6636d37c9ff29c4892f2a3648d736cc1df01f3f36e25c8cf"}, - {file = "pywavelets-1.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5e655446e37a3c87213d5c6386b86f65c4d61736b4432d720171e7dd6523d6a"}, - {file = "pywavelets-1.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ec7d69b746a0eaa327b829a3252a63619f2345e263177be5dd9bf30d7933c8d"}, - {file = "pywavelets-1.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:97ea9613bd6b7108ebb44b709060adc7e2d5fac73be7152342bdd5513d75f84e"}, - {file = "pywavelets-1.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:48b3813c6d1a7a8194f37dbb5dbbdf2fe1112152c91445ea2e54f64ff6350c36"}, - {file = "pywavelets-1.6.0-cp311-cp311-win32.whl", hash = "sha256:4ffb484d096a5eb10af7121e0203546a03e1369328df321a33ef91f67bac40cf"}, - {file = "pywavelets-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:274bc47b289585383aa65519b3fcae5b4dee5e31db3d4198d4fad701a70e59f7"}, - {file = "pywavelets-1.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d6ec113386a432e04103f95e351d2657b42145bd1e1ed26513423391bcb5f011"}, - {file = "pywavelets-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ab652112d3932d21f020e281e06926a751354c2b5629fb716f5eb9d0104b84e5"}, - {file = "pywavelets-1.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47b0314a22616c5f3f08760f0e00b4a15b7c7dadca5e39bb701cf7869a4207c5"}, - {file = "pywavelets-1.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138471513bc0a4cd2ddc4e50c7ec04e3468c268e101a0d02f698f6aedd1d5e79"}, - {file = "pywavelets-1.6.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:67936491ae3e5f957c428e34fdaed21f131535b8d60c7c729a1b539ce8864837"}, - {file = "pywavelets-1.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:dd798cee3d28fb3d32a26a00d9831a20bf316c36d685e4ced01b4e4a8f36f5ce"}, - {file = "pywavelets-1.6.0-cp312-cp312-win32.whl", hash = "sha256:e772f7f0c16bfc3be8ac3cd10d29a9920bb7a39781358856223c491b899e6e79"}, - {file = "pywavelets-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:4ef15a63a72afa67ae9f4f3b06c95c5382730fb3075e668d49a880e65f2f089c"}, - {file = "pywavelets-1.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:627df378e63e9c789b6f2e7060cb4264ebae6f6b0efc1da287a2c060de454a1f"}, - {file = "pywavelets-1.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a413b51dc19e05243fe0b0864a8e8a16b5ca9bf2e4713da00a95b1b5747a5367"}, - {file = "pywavelets-1.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be615c6c1873e189c265d4a76d1751ec49b17e29725e6dd2e9c74f1868f590b7"}, - {file = "pywavelets-1.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4021ef69ec9f3862f66580fc4417be728bd78722914394594b48212fd1fcaf21"}, - {file = "pywavelets-1.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8fbf7b61b28b5457693c034e58a01622756d1fd60a80ae13ac5888b1d3e57e80"}, - {file = "pywavelets-1.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f58ddbb0a6cd243928876edfc463b990763a24fb94498607d6fea690e32cca4c"}, - {file = "pywavelets-1.6.0-cp39-cp39-win32.whl", hash = "sha256:42a22e68e345b6de7d387ef752111ab4530c98048d2b4bdac8ceefb078b4ead6"}, - {file = "pywavelets-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:32198de321892743c1a3d1957fe1cd8a8ecc078bfbba6b8f3982518e897271d7"}, - {file = "pywavelets-1.6.0.tar.gz", hash = "sha256:ea027c70977122c5fc27b2510f0a0d9528f9c3df6ea3e4c577ca55fd00325a5b"}, -] - -[package.dependencies] -numpy = ">=1.22.4,<3" - -[[package]] -name = "pyyaml" -version = "6.0.2" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, - {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, - {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, - {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, - {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, - {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, - {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, - {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, - {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, - {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, - {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, - {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, - {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, - {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, - {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, - {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, - {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, -] - -[[package]] -name = "referencing" -version = "0.35.1" -description = "JSON Referencing + Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de"}, - {file = "referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -rpds-py = ">=0.7.0" - -[[package]] -name = "requests" -version = "2.32.3" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "rich" -version = "13.9.4" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, - {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, -] - -[package.dependencies] -markdown-it-py = ">=2.2.0" -pygments = ">=2.13.0,<3.0.0" -typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<9)"] - -[[package]] -name = "rpds-py" -version = "0.22.3" -description = "Python bindings to Rust's persistent data structures (rpds)" -optional = false -python-versions = ">=3.9" -files = [ - {file = "rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967"}, - {file = "rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70eb60b3ae9245ddea20f8a4190bd79c705a22f8028aaf8bbdebe4716c3fab24"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4041711832360a9b75cfb11b25a6a97c8fb49c07b8bd43d0d02b45d0b499a4ff"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64607d4cbf1b7e3c3c8a14948b99345eda0e161b852e122c6bb71aab6d1d798c"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e69b0a0e2537f26d73b4e43ad7bc8c8efb39621639b4434b76a3de50c6966e"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc27863442d388870c1809a87507727b799c8460573cfbb6dc0eeaef5a11b5ec"}, - {file = "rpds_py-0.22.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e79dd39f1e8c3504be0607e5fc6e86bb60fe3584bec8b782578c3b0fde8d932c"}, - {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e0fa2d4ec53dc51cf7d3bb22e0aa0143966119f42a0c3e4998293a3dd2856b09"}, - {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fda7cb070f442bf80b642cd56483b5548e43d366fe3f39b98e67cce780cded00"}, - {file = "rpds_py-0.22.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cff63a0272fcd259dcc3be1657b07c929c466b067ceb1c20060e8d10af56f5bf"}, - {file = "rpds_py-0.22.3-cp310-cp310-win32.whl", hash = "sha256:9bd7228827ec7bb817089e2eb301d907c0d9827a9e558f22f762bb690b131652"}, - {file = "rpds_py-0.22.3-cp310-cp310-win_amd64.whl", hash = "sha256:9beeb01d8c190d7581a4d59522cd3d4b6887040dcfc744af99aa59fef3e041a8"}, - {file = "rpds_py-0.22.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d20cfb4e099748ea39e6f7b16c91ab057989712d31761d3300d43134e26e165f"}, - {file = "rpds_py-0.22.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68049202f67380ff9aa52f12e92b1c30115f32e6895cd7198fa2a7961621fc5a"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb4f868f712b2dd4bcc538b0a0c1f63a2b1d584c925e69a224d759e7070a12d5"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc51abd01f08117283c5ebf64844a35144a0843ff7b2983e0648e4d3d9f10dbb"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3cec041684de9a4684b1572fe28c7267410e02450f4561700ca5a3bc6695a2"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ef9d9da710be50ff6809fed8f1963fecdfecc8b86656cadfca3bc24289414b0"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59f4a79c19232a5774aee369a0c296712ad0e77f24e62cad53160312b1c1eaa1"}, - {file = "rpds_py-0.22.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a60bce91f81ddaac922a40bbb571a12c1070cb20ebd6d49c48e0b101d87300d"}, - {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e89391e6d60251560f0a8f4bd32137b077a80d9b7dbe6d5cab1cd80d2746f648"}, - {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e3fb866d9932a3d7d0c82da76d816996d1667c44891bd861a0f97ba27e84fc74"}, - {file = "rpds_py-0.22.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1352ae4f7c717ae8cba93421a63373e582d19d55d2ee2cbb184344c82d2ae55a"}, - {file = "rpds_py-0.22.3-cp311-cp311-win32.whl", hash = "sha256:b0b4136a252cadfa1adb705bb81524eee47d9f6aab4f2ee4fa1e9d3cd4581f64"}, - {file = "rpds_py-0.22.3-cp311-cp311-win_amd64.whl", hash = "sha256:8bd7c8cfc0b8247c8799080fbff54e0b9619e17cdfeb0478ba7295d43f635d7c"}, - {file = "rpds_py-0.22.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:27e98004595899949bd7a7b34e91fa7c44d7a97c40fcaf1d874168bb652ec67e"}, - {file = "rpds_py-0.22.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1978d0021e943aae58b9b0b196fb4895a25cc53d3956b8e35e0b7682eefb6d56"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655ca44a831ecb238d124e0402d98f6212ac527a0ba6c55ca26f616604e60a45"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:feea821ee2a9273771bae61194004ee2fc33f8ec7db08117ef9147d4bbcbca8e"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22bebe05a9ffc70ebfa127efbc429bc26ec9e9b4ee4d15a740033efda515cf3d"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3af6e48651c4e0d2d166dc1b033b7042ea3f871504b6805ba5f4fe31581d8d38"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ba3c290821343c192f7eae1d8fd5999ca2dc99994114643e2f2d3e6138b15"}, - {file = "rpds_py-0.22.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02fbb9c288ae08bcb34fb41d516d5eeb0455ac35b5512d03181d755d80810059"}, - {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f56a6b404f74ab372da986d240e2e002769a7d7102cc73eb238a4f72eec5284e"}, - {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0a0461200769ab3b9ab7e513f6013b7a97fdeee41c29b9db343f3c5a8e2b9e61"}, - {file = "rpds_py-0.22.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8633e471c6207a039eff6aa116e35f69f3156b3989ea3e2d755f7bc41754a4a7"}, - {file = "rpds_py-0.22.3-cp312-cp312-win32.whl", hash = "sha256:593eba61ba0c3baae5bc9be2f5232430453fb4432048de28399ca7376de9c627"}, - {file = "rpds_py-0.22.3-cp312-cp312-win_amd64.whl", hash = "sha256:d115bffdd417c6d806ea9069237a4ae02f513b778e3789a359bc5856e0404cc4"}, - {file = "rpds_py-0.22.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:ea7433ce7e4bfc3a85654aeb6747babe3f66eaf9a1d0c1e7a4435bbdf27fea84"}, - {file = "rpds_py-0.22.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6dd9412824c4ce1aca56c47b0991e65bebb7ac3f4edccfd3f156150c96a7bf25"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20070c65396f7373f5df4005862fa162db5d25d56150bddd0b3e8214e8ef45b4"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0b09865a9abc0ddff4e50b5ef65467cd94176bf1e0004184eb915cbc10fc05c5"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3453e8d41fe5f17d1f8e9c383a7473cd46a63661628ec58e07777c2fff7196dc"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f5d36399a1b96e1a5fdc91e0522544580dbebeb1f77f27b2b0ab25559e103b8b"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:009de23c9c9ee54bf11303a966edf4d9087cd43a6003672e6aa7def643d06518"}, - {file = "rpds_py-0.22.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1aef18820ef3e4587ebe8b3bc9ba6e55892a6d7b93bac6d29d9f631a3b4befbd"}, - {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f60bd8423be1d9d833f230fdbccf8f57af322d96bcad6599e5a771b151398eb2"}, - {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:62d9cfcf4948683a18a9aff0ab7e1474d407b7bab2ca03116109f8464698ab16"}, - {file = "rpds_py-0.22.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9253fc214112405f0afa7db88739294295f0e08466987f1d70e29930262b4c8f"}, - {file = "rpds_py-0.22.3-cp313-cp313-win32.whl", hash = "sha256:fb0ba113b4983beac1a2eb16faffd76cb41e176bf58c4afe3e14b9c681f702de"}, - {file = "rpds_py-0.22.3-cp313-cp313-win_amd64.whl", hash = "sha256:c58e2339def52ef6b71b8f36d13c3688ea23fa093353f3a4fee2556e62086ec9"}, - {file = "rpds_py-0.22.3-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f82a116a1d03628a8ace4859556fb39fd1424c933341a08ea3ed6de1edb0283b"}, - {file = "rpds_py-0.22.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3dfcbc95bd7992b16f3f7ba05af8a64ca694331bd24f9157b49dadeeb287493b"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59259dc58e57b10e7e18ce02c311804c10c5a793e6568f8af4dead03264584d1"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5725dd9cc02068996d4438d397e255dcb1df776b7ceea3b9cb972bdb11260a83"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:99b37292234e61325e7a5bb9689e55e48c3f5f603af88b1642666277a81f1fbd"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:27b1d3b3915a99208fee9ab092b8184c420f2905b7d7feb4aeb5e4a9c509b8a1"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f612463ac081803f243ff13cccc648578e2279295048f2a8d5eb430af2bae6e3"}, - {file = "rpds_py-0.22.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f73d3fef726b3243a811121de45193c0ca75f6407fe66f3f4e183c983573e130"}, - {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:3f21f0495edea7fdbaaa87e633a8689cd285f8f4af5c869f27bc8074638ad69c"}, - {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:1e9663daaf7a63ceccbbb8e3808fe90415b0757e2abddbfc2e06c857bf8c5e2b"}, - {file = "rpds_py-0.22.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a76e42402542b1fae59798fab64432b2d015ab9d0c8c47ba7addddbaf7952333"}, - {file = "rpds_py-0.22.3-cp313-cp313t-win32.whl", hash = "sha256:69803198097467ee7282750acb507fba35ca22cc3b85f16cf45fb01cb9097730"}, - {file = "rpds_py-0.22.3-cp313-cp313t-win_amd64.whl", hash = "sha256:f5cf2a0c2bdadf3791b5c205d55a37a54025c6e18a71c71f82bb536cf9a454bf"}, - {file = "rpds_py-0.22.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:378753b4a4de2a7b34063d6f95ae81bfa7b15f2c1a04a9518e8644e81807ebea"}, - {file = "rpds_py-0.22.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3445e07bf2e8ecfeef6ef67ac83de670358abf2996916039b16a218e3d95e97e"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b2513ba235829860b13faa931f3b6846548021846ac808455301c23a101689d"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eaf16ae9ae519a0e237a0f528fd9f0197b9bb70f40263ee57ae53c2b8d48aeb3"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:583f6a1993ca3369e0f80ba99d796d8e6b1a3a2a442dd4e1a79e652116413091"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4617e1915a539a0d9a9567795023de41a87106522ff83fbfaf1f6baf8e85437e"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c150c7a61ed4a4f4955a96626574e9baf1adf772c2fb61ef6a5027e52803543"}, - {file = "rpds_py-0.22.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fa4331c200c2521512595253f5bb70858b90f750d39b8cbfd67465f8d1b596d"}, - {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:214b7a953d73b5e87f0ebece4a32a5bd83c60a3ecc9d4ec8f1dca968a2d91e99"}, - {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f47ad3d5f3258bd7058d2d506852217865afefe6153a36eb4b6928758041d831"}, - {file = "rpds_py-0.22.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f276b245347e6e36526cbd4a266a417796fc531ddf391e43574cf6466c492520"}, - {file = "rpds_py-0.22.3-cp39-cp39-win32.whl", hash = "sha256:bbb232860e3d03d544bc03ac57855cd82ddf19c7a07651a7c0fdb95e9efea8b9"}, - {file = "rpds_py-0.22.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfbc454a2880389dbb9b5b398e50d439e2e58669160f27b60e5eca11f68ae17c"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d48424e39c2611ee1b84ad0f44fb3b2b53d473e65de061e3f460fc0be5f1939d"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:24e8abb5878e250f2eb0d7859a8e561846f98910326d06c0d51381fed59357bd"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b232061ca880db21fa14defe219840ad9b74b6158adb52ddf0e87bead9e8493"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac0a03221cdb5058ce0167ecc92a8c89e8d0decdc9e99a2ec23380793c4dcb96"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb0c341fa71df5a4595f9501df4ac5abfb5a09580081dffbd1ddd4654e6e9123"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf9db5488121b596dbfc6718c76092fda77b703c1f7533a226a5a9f65248f8ad"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8db6b5b2d4491ad5b6bdc2bc7c017eec108acbf4e6785f42a9eb0ba234f4c9"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d504047aba448d70cf6fa22e06cb09f7cbd761939fdd47604f5e007675c24e"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e61b02c3f7a1e0b75e20c3978f7135fd13cb6cf551bf4a6d29b999a88830a338"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e35ba67d65d49080e8e5a1dd40101fccdd9798adb9b050ff670b7d74fa41c566"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:26fd7cac7dd51011a245f29a2cc6489c4608b5a8ce8d75661bb4a1066c52dfbe"}, - {file = "rpds_py-0.22.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:177c7c0fce2855833819c98e43c262007f42ce86651ffbb84f37883308cb0e7d"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bb47271f60660803ad11f4c61b42242b8c1312a31c98c578f79ef9387bbde21c"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:70fb28128acbfd264eda9bf47015537ba3fe86e40d046eb2963d75024be4d055"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d61b4b7d0c2c9ac019c314e52d7cbda0ae31078aabd0f22e583af3e0d79723"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0e260eaf54380380ac3808aa4ebe2d8ca28b9087cf411649f96bad6900c728"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b25bc607423935079e05619d7de556c91fb6adeae9d5f80868dde3468657994b"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb6116dfb8d1925cbdb52595560584db42a7f664617a1f7d7f6e32f138cdf37d"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a63cbdd98acef6570c62b92a1e43266f9e8b21e699c363c0fef13bd530799c11"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b8f60e1b739a74bab7e01fcbe3dddd4657ec685caa04681df9d562ef15b625f"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2e8b55d8517a2fda8d95cb45d62a5a8bbf9dd0ad39c5b25c8833efea07b880ca"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:2de29005e11637e7a2361fa151f780ff8eb2543a0da1413bb951e9f14b699ef3"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:666ecce376999bf619756a24ce15bb14c5bfaf04bf00abc7e663ce17c3f34fe7"}, - {file = "rpds_py-0.22.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5246b14ca64a8675e0a7161f7af68fe3e910e6b90542b4bfb5439ba752191df6"}, - {file = "rpds_py-0.22.3.tar.gz", hash = "sha256:e32fee8ab45d3c2db6da19a5323bc3362237c8b653c70194414b892fd06a080d"}, -] - -[[package]] -name = "ruamel-yaml" -version = "0.18.6" -description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" -optional = false -python-versions = ">=3.7" -files = [ - {file = "ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636"}, - {file = "ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b"}, -] - -[package.dependencies] -"ruamel.yaml.clib" = {version = ">=0.2.7", markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\""} - -[package.extras] -docs = ["mercurial (>5.7)", "ryd"] -jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] - -[[package]] -name = "ruamel-yaml-clib" -version = "0.2.12" -description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" -optional = false -python-versions = ">=3.9" -files = [ - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:11f891336688faf5156a36293a9c362bdc7c88f03a8a027c2c1d8e0bcde998e5"}, - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:a606ef75a60ecf3d924613892cc603b154178ee25abb3055db5062da811fd969"}, - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd5415dded15c3822597455bc02bcd66e81ef8b7a48cb71a33628fc9fdde39df"}, - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"}, - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"}, - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"}, - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"}, - {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:d84318609196d6bd6da0edfa25cedfbabd8dbde5140a0a23af29ad4b8f91fb1e"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb43a269eb827806502c7c8efb7ae7e9e9d0573257a46e8e952f4d4caba4f31e"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"}, - {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:943f32bc9dedb3abff9879edc134901df92cfce2c3d5c9348f172f62eb2d771d"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95c3829bb364fdb8e0332c9931ecf57d9be3519241323c5274bd82f709cebc0c"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"}, - {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:e7e3736715fbf53e9be2a79eb4db68e4ed857017344d697e8b9749444ae57475"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b7e75b4965e1d4690e93021adfcecccbca7d61c7bddd8e22406ef2ff20d74ef"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"}, - {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bc5f1e1c28e966d61d2519f2a3d451ba989f9ea0f2307de7bc45baa526de9e45"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a0e060aace4c24dcaf71023bbd7d42674e3b230f7e7b97317baf1e953e5b519"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"}, - {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"}, - {file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"}, -] - -[[package]] -name = "scipy" -version = "1.13.1" -description = "Fundamental algorithms for scientific computing in Python" -optional = true -python-versions = ">=3.9" -files = [ - {file = "scipy-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:20335853b85e9a49ff7572ab453794298bcf0354d8068c5f6775a0eabf350aca"}, - {file = "scipy-1.13.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d605e9c23906d1994f55ace80e0125c587f96c020037ea6aa98d01b4bd2e222f"}, - {file = "scipy-1.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfa31f1def5c819b19ecc3a8b52d28ffdcc7ed52bb20c9a7589669dd3c250989"}, - {file = "scipy-1.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26264b282b9da0952a024ae34710c2aff7d27480ee91a2e82b7b7073c24722f"}, - {file = "scipy-1.13.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:eccfa1906eacc02de42d70ef4aecea45415f5be17e72b61bafcfd329bdc52e94"}, - {file = "scipy-1.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:2831f0dc9c5ea9edd6e51e6e769b655f08ec6db6e2e10f86ef39bd32eb11da54"}, - {file = "scipy-1.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:27e52b09c0d3a1d5b63e1105f24177e544a222b43611aaf5bc44d4a0979e32f9"}, - {file = "scipy-1.13.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:54f430b00f0133e2224c3ba42b805bfd0086fe488835effa33fa291561932326"}, - {file = "scipy-1.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e89369d27f9e7b0884ae559a3a956e77c02114cc60a6058b4e5011572eea9299"}, - {file = "scipy-1.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a78b4b3345f1b6f68a763c6e25c0c9a23a9fd0f39f5f3d200efe8feda560a5fa"}, - {file = "scipy-1.13.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45484bee6d65633752c490404513b9ef02475b4284c4cfab0ef946def50b3f59"}, - {file = "scipy-1.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:5713f62f781eebd8d597eb3f88b8bf9274e79eeabf63afb4a737abc6c84ad37b"}, - {file = "scipy-1.13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5d72782f39716b2b3509cd7c33cdc08c96f2f4d2b06d51e52fb45a19ca0c86a1"}, - {file = "scipy-1.13.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:017367484ce5498445aade74b1d5ab377acdc65e27095155e448c88497755a5d"}, - {file = "scipy-1.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:949ae67db5fa78a86e8fa644b9a6b07252f449dcf74247108c50e1d20d2b4627"}, - {file = "scipy-1.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de3ade0e53bc1f21358aa74ff4830235d716211d7d077e340c7349bc3542e884"}, - {file = "scipy-1.13.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2ac65fb503dad64218c228e2dc2d0a0193f7904747db43014645ae139c8fad16"}, - {file = "scipy-1.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:cdd7dacfb95fea358916410ec61bbc20440f7860333aee6d882bb8046264e949"}, - {file = "scipy-1.13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:436bbb42a94a8aeef855d755ce5a465479c721e9d684de76bf61a62e7c2b81d5"}, - {file = "scipy-1.13.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:8335549ebbca860c52bf3d02f80784e91a004b71b059e3eea9678ba994796a24"}, - {file = "scipy-1.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d533654b7d221a6a97304ab63c41c96473ff04459e404b83275b60aa8f4b7004"}, - {file = "scipy-1.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637e98dcf185ba7f8e663e122ebf908c4702420477ae52a04f9908707456ba4d"}, - {file = "scipy-1.13.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a014c2b3697bde71724244f63de2476925596c24285c7a637364761f8710891c"}, - {file = "scipy-1.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:392e4ec766654852c25ebad4f64e4e584cf19820b980bc04960bca0b0cd6eaa2"}, - {file = "scipy-1.13.1.tar.gz", hash = "sha256:095a87a0312b08dfd6a6155cbbd310a8c51800fc931b8c0b84003014b874ed3c"}, -] - -[package.dependencies] -numpy = ">=1.22.4,<2.3" - -[package.extras] -dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] -doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.12.0)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"] -test = ["array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] - -[[package]] -name = "seaborn" -version = "0.13.2" -description = "Statistical data visualization" -optional = true -python-versions = ">=3.8" -files = [ - {file = "seaborn-0.13.2-py3-none-any.whl", hash = "sha256:636f8336facf092165e27924f223d3c62ca560b1f2bb5dff7ab7fad265361987"}, - {file = "seaborn-0.13.2.tar.gz", hash = "sha256:93e60a40988f4d65e9f4885df477e2fdaff6b73a9ded434c1ab356dd57eefff7"}, -] - -[package.dependencies] -matplotlib = ">=3.4,<3.6.1 || >3.6.1" -numpy = ">=1.20,<1.24.0 || >1.24.0" -pandas = ">=1.2" - -[package.extras] -dev = ["flake8", "flit", "mypy", "pandas-stubs", "pre-commit", "pytest", "pytest-cov", "pytest-xdist"] -docs = ["ipykernel", "nbconvert", "numpydoc", "pydata_sphinx_theme (==0.10.0rc2)", "pyyaml", "sphinx (<6.0.0)", "sphinx-copybutton", "sphinx-design", "sphinx-issues"] -stats = ["scipy (>=1.7)", "statsmodels (>=0.12)"] - -[[package]] -name = "sgmllib3k" -version = "1.0.0" -description = "Py3k port of sgmllib." -optional = true -python-versions = "*" -files = [ - {file = "sgmllib3k-1.0.0.tar.gz", hash = "sha256:7868fb1c8bfa764c1ac563d3cf369c381d1325d36124933a726f29fcdaa812e9"}, -] - -[[package]] -name = "six" -version = "1.17.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, - {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, -] - -[[package]] -name = "smmap" -version = "5.0.1" -description = "A pure Python implementation of a sliding window memory map manager" -optional = false -python-versions = ">=3.7" -files = [ - {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"}, - {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"}, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -description = "Sniff out which async library your code is running under" -optional = true -python-versions = ">=3.7" -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - -[[package]] -name = "snowplow-tracker" -version = "1.0.4" -description = "Snowplow event tracker for Python. Add analytics to your Python and Django apps, webapps and games" -optional = false -python-versions = "*" -files = [ - {file = "snowplow_tracker-1.0.4-py3-none-any.whl", hash = "sha256:382e289811550f6ce7d5abc9e68590cc080ac9b21916b701b17497cfd6b32038"}, - {file = "snowplow_tracker-1.0.4.tar.gz", hash = "sha256:16d8a3c001a7847d91dc081d508324550c314a4cbf5d6106b5ab35f77fa34678"}, -] - -[package.dependencies] -requests = ">=2.25.1,<3.0" -types-requests = ">=2.25.1,<3.0" -typing-extensions = ">=3.7.4" - -[[package]] -name = "sqlparse" -version = "0.5.2" -description = "A non-validating SQL parser." -optional = false -python-versions = ">=3.8" -files = [ - {file = "sqlparse-0.5.2-py3-none-any.whl", hash = "sha256:e99bc85c78160918c3e1d9230834ab8d80fc06c59d03f8db2618f65f65dda55e"}, - {file = "sqlparse-0.5.2.tar.gz", hash = "sha256:9e37b35e16d1cc652a2545f0997c1deb23ea28fa1f3eefe609eee3063c3b105f"}, -] - -[package.extras] -dev = ["build", "hatch"] -doc = ["sphinx"] - -[[package]] -name = "statsmodels" -version = "0.14.4" -description = "Statistical computations and models for Python" -optional = true -python-versions = ">=3.9" -files = [ - {file = "statsmodels-0.14.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7a62f1fc9086e4b7ee789a6f66b3c0fc82dd8de1edda1522d30901a0aa45e42b"}, - {file = "statsmodels-0.14.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:46ac7ddefac0c9b7b607eed1d47d11e26fe92a1bc1f4d9af48aeed4e21e87981"}, - {file = "statsmodels-0.14.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a337b731aa365d09bb0eab6da81446c04fde6c31976b1d8e3d3a911f0f1e07b"}, - {file = "statsmodels-0.14.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:631bb52159117c5da42ba94bd94859276b68cab25dc4cac86475bc24671143bc"}, - {file = "statsmodels-0.14.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3bb2e580d382545a65f298589809af29daeb15f9da2eb252af8f79693e618abc"}, - {file = "statsmodels-0.14.4-cp310-cp310-win_amd64.whl", hash = "sha256:9729642884147ee9db67b5a06a355890663d21f76ed608a56ac2ad98b94d201a"}, - {file = "statsmodels-0.14.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5ed7e118e6e3e02d6723a079b8c97eaadeed943fa1f7f619f7148dfc7862670f"}, - {file = "statsmodels-0.14.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f5f537f7d000de4a1708c63400755152b862cd4926bb81a86568e347c19c364b"}, - {file = "statsmodels-0.14.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa74aaa26eaa5012b0a01deeaa8a777595d0835d3d6c7175f2ac65435a7324d2"}, - {file = "statsmodels-0.14.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e332c2d9b806083d1797231280602340c5c913f90d4caa0213a6a54679ce9331"}, - {file = "statsmodels-0.14.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9c8fa28dfd75753d9cf62769ba1fecd7e73a0be187f35cc6f54076f98aa3f3f"}, - {file = "statsmodels-0.14.4-cp311-cp311-win_amd64.whl", hash = "sha256:a6087ecb0714f7c59eb24c22781491e6f1cfffb660b4740e167625ca4f052056"}, - {file = "statsmodels-0.14.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5221dba7424cf4f2561b22e9081de85f5bb871228581124a0d1b572708545199"}, - {file = "statsmodels-0.14.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:17672b30c6b98afe2b095591e32d1d66d4372f2651428e433f16a3667f19eabb"}, - {file = "statsmodels-0.14.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab5e6312213b8cfb9dca93dd46a0f4dccb856541f91d3306227c3d92f7659245"}, - {file = "statsmodels-0.14.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bbb150620b53133d6cd1c5d14c28a4f85701e6c781d9b689b53681effaa655f"}, - {file = "statsmodels-0.14.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bb695c2025d122a101c2aca66d2b78813c321b60d3a7c86bb8ec4467bb53b0f9"}, - {file = "statsmodels-0.14.4-cp312-cp312-win_amd64.whl", hash = "sha256:7f7917a51766b4e074da283c507a25048ad29a18e527207883d73535e0dc6184"}, - {file = "statsmodels-0.14.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5a24f5d2c22852d807d2b42daf3a61740820b28d8381daaf59dcb7055bf1a79"}, - {file = "statsmodels-0.14.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df4f7864606fa843d7e7c0e6af288f034a2160dba14e6ccc09020a3cf67cb092"}, - {file = "statsmodels-0.14.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91341cbde9e8bea5fb419a76e09114e221567d03f34ca26e6d67ae2c27d8fe3c"}, - {file = "statsmodels-0.14.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1322286a7bfdde2790bf72d29698a1b76c20b8423a55bdcd0d457969d0041f72"}, - {file = "statsmodels-0.14.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e31b95ac603415887c9f0d344cb523889cf779bc52d68e27e2d23c358958fec7"}, - {file = "statsmodels-0.14.4-cp313-cp313-win_amd64.whl", hash = "sha256:81030108d27aecc7995cac05aa280cf8c6025f6a6119894eef648997936c2dd0"}, - {file = "statsmodels-0.14.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4793b01b7a5f5424f5a1dbcefc614c83c7608aa2b035f087538253007c339d5d"}, - {file = "statsmodels-0.14.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d330da34f59f1653c5193f9fe3a3a258977c880746db7f155fc33713ea858db5"}, - {file = "statsmodels-0.14.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e9ddefba1d4e1107c1f20f601b0581421ea3ad9fd75ce3c2ba6a76b6dc4682c"}, - {file = "statsmodels-0.14.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f43da7957e00190104c5dd0f661bfc6dfc68b87313e3f9c4dbd5e7d222e0aeb"}, - {file = "statsmodels-0.14.4-cp39-cp39-win_amd64.whl", hash = "sha256:8286f69a5e1d0e0b366ffed5691140c83d3efc75da6dbf34a3d06e88abfaaab6"}, - {file = "statsmodels-0.14.4.tar.gz", hash = "sha256:5d69e0f39060dc72c067f9bb6e8033b6dccdb0bae101d76a7ef0bcc94e898b67"}, -] - -[package.dependencies] -numpy = ">=1.22.3,<3" -packaging = ">=21.3" -pandas = ">=1.4,<2.1.0 || >2.1.0" -patsy = ">=0.5.6" -scipy = ">=1.8,<1.9.2 || >1.9.2" - -[package.extras] -build = ["cython (>=3.0.10)"] -develop = ["colorama", "cython (>=3.0.10)", "cython (>=3.0.10,<4)", "flake8", "isort", "joblib", "matplotlib (>=3)", "pytest (>=7.3.0,<8)", "pytest-cov", "pytest-randomly", "pytest-xdist", "pywinpty", "setuptools-scm[toml] (>=8.0,<9.0)"] -docs = ["ipykernel", "jupyter-client", "matplotlib", "nbconvert", "nbformat", "numpydoc", "pandas-datareader", "sphinx"] - -[[package]] -name = "streamlit" -version = "1.40.2" -description = "A faster way to build and share data apps" -optional = true -python-versions = "!=3.9.7,>=3.9" -files = [ - {file = "streamlit-1.40.2-py2.py3-none-any.whl", hash = "sha256:7f6d1379a590f9625a6aee79ca73ceccff03cd2e05a3acbe5fe98915c27a7ffe"}, - {file = "streamlit-1.40.2.tar.gz", hash = "sha256:0cc131fc9b18065feaff8f6f241c81164ad37d8d9e3a85499a0240aaaf6a6a61"}, -] - -[package.dependencies] -altair = ">=4.0,<6" -blinker = ">=1.0.0,<2" -cachetools = ">=4.0,<6" -click = ">=7.0,<9" -gitpython = ">=3.0.7,<3.1.19 || >3.1.19,<4" -numpy = ">=1.23,<3" -packaging = ">=20,<25" -pandas = ">=1.4.0,<3" -pillow = ">=7.1.0,<12" -protobuf = ">=3.20,<6" -pyarrow = ">=7.0" -pydeck = ">=0.8.0b4,<1" -requests = ">=2.27,<3" -rich = ">=10.14.0,<14" -tenacity = ">=8.1.0,<10" -toml = ">=0.10.1,<2" -tornado = ">=6.0.3,<7" -typing-extensions = ">=4.3.0,<5" -watchdog = {version = ">=2.1.5,<7", markers = "platform_system != \"Darwin\""} - -[package.extras] -snowflake = ["snowflake-connector-python (>=2.8.0)", "snowflake-snowpark-python[modin] (>=1.17.0)"] - -[[package]] -name = "streamlit-ace" -version = "0.1.1" -description = "Ace editor component for Streamlit." -optional = true -python-versions = ">=3.6" -files = [ - {file = "streamlit_ace-0.1.1-py3-none-any.whl", hash = "sha256:cdf908a90058fa831fb720d29e2aef35a0f5799eac33e2b2e58ba9f3631b1aa5"}, - {file = "streamlit_ace-0.1.1.tar.gz", hash = "sha256:1852fa19707685fd4241be9256c1ab1a89da4a3b8c28ab286e3bff122d3a1686"}, -] - -[package.dependencies] -streamlit = ">=0.63" - -[[package]] -name = "streamlit-elements-fluence" -version = "0.1.4" -description = "React Components for Streamlit." -optional = true -python-versions = ">=3.6" -files = [ - {file = "streamlit-elements-fluence-0.1.4.tar.gz", hash = "sha256:79263c002918b67ddc5a6ee929aa3f80f091ddc3df0b4703ffda134187e255ef"}, - {file = "streamlit_elements_fluence-0.1.4-py3-none-any.whl", hash = "sha256:6bf8c7ee582e4b0edf4603dc5b743888bb9df878ee1bd1780c8db287cd08c303"}, -] - -[package.dependencies] -streamlit = ">=1.4.0" - -[[package]] -name = "tenacity" -version = "9.0.0" -description = "Retry code until it succeeds" -optional = true -python-versions = ">=3.8" -files = [ - {file = "tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539"}, - {file = "tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b"}, -] - -[package.extras] -doc = ["reno", "sphinx"] -test = ["pytest", "tornado (>=4.5)", "typeguard"] - -[[package]] -name = "text-unidecode" -version = "1.3" -description = "The most basic Text::Unidecode port" -optional = false -python-versions = "*" -files = [ - {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, - {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, -] - -[[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -optional = true -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] - -[[package]] -name = "tomli" -version = "2.2.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, - {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, - {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, - {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, - {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, - {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, - {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, - {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, - {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, - {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, -] - -[[package]] -name = "tomlkit" -version = "0.13.2" -description = "Style preserving TOML library" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, - {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, -] - -[[package]] -name = "tornado" -version = "6.4.2" -description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." -optional = true -python-versions = ">=3.8" -files = [ - {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1"}, - {file = "tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803"}, - {file = "tornado-6.4.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a017d239bd1bb0919f72af256a970624241f070496635784d9bf0db640d3fec"}, - {file = "tornado-6.4.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c36e62ce8f63409301537222faffcef7dfc5284f27eec227389f2ad11b09d946"}, - {file = "tornado-6.4.2-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca9eb02196e789c9cb5c3c7c0f04fb447dc2adffd95265b2c7223a8a615ccbf"}, - {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:304463bd0772442ff4d0f5149c6f1c2135a1fae045adf070821c6cdc76980634"}, - {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:c82c46813ba483a385ab2a99caeaedf92585a1f90defb5693351fa7e4ea0bf73"}, - {file = "tornado-6.4.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:932d195ca9015956fa502c6b56af9eb06106140d844a335590c1ec7f5277d10c"}, - {file = "tornado-6.4.2-cp38-abi3-win32.whl", hash = "sha256:2876cef82e6c5978fde1e0d5b1f919d756968d5b4282418f3146b79b58556482"}, - {file = "tornado-6.4.2-cp38-abi3-win_amd64.whl", hash = "sha256:908b71bf3ff37d81073356a5fadcc660eb10c1476ee6e2725588626ce7e5ca38"}, - {file = "tornado-6.4.2.tar.gz", hash = "sha256:92bad5b4746e9879fd7bf1eb21dce4e3fc5128d71601f80005afa39237ad620b"}, -] - -[[package]] -name = "tqdm" -version = "4.67.1" -description = "Fast, Extensible Progress Meter" -optional = true -python-versions = ">=3.7" -files = [ - {file = "tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2"}, - {file = "tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["nbval", "pytest (>=6)", "pytest-asyncio (>=0.24)", "pytest-cov", "pytest-timeout"] -discord = ["requests"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] - -[[package]] -name = "typeguard" -version = "4.4.1" -description = "Run-time type checker for Python" -optional = true -python-versions = ">=3.9" -files = [ - {file = "typeguard-4.4.1-py3-none-any.whl", hash = "sha256:9324ec07a27ec67fc54a9c063020ca4c0ae6abad5e9f0f9804ca59aee68c6e21"}, - {file = "typeguard-4.4.1.tar.gz", hash = "sha256:0d22a89d00b453b47c49875f42b6601b961757541a2e1e0ef517b6e24213c21b"}, -] - -[package.dependencies] -importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} -typing-extensions = ">=4.10.0" - -[package.extras] -doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme (>=1.3.0)"] -test = ["coverage[toml] (>=7)", "mypy (>=1.2.0)", "pytest (>=7)"] - -[[package]] -name = "types-requests" -version = "2.32.0.20241016" -description = "Typing stubs for requests" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, - {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, -] - -[package.dependencies] -urllib3 = ">=2" - -[[package]] -name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] - -[[package]] -name = "tzdata" -version = "2024.2" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, - {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, -] - -[[package]] -name = "urllib3" -version = "2.2.3" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, - {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "virtualenv" -version = "20.28.0" -description = "Virtual Python Environment builder" -optional = false -python-versions = ">=3.8" -files = [ - {file = "virtualenv-20.28.0-py3-none-any.whl", hash = "sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0"}, - {file = "virtualenv-20.28.0.tar.gz", hash = "sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa"}, -] - -[package.dependencies] -distlib = ">=0.3.7,<1" -filelock = ">=3.12.2,<4" -platformdirs = ">=3.9.1,<5" - -[package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] - -[[package]] -name = "visions" -version = "0.7.6" -description = "Visions" -optional = true -python-versions = ">=3.8" -files = [ - {file = "visions-0.7.6-py3-none-any.whl", hash = "sha256:72b7f8dbc374e9d6055e938c8c67b0b8da52f3bcb8320f25d86b1a57457e7aa6"}, - {file = "visions-0.7.6.tar.gz", hash = "sha256:00f494a7f78917db2292e11ea832c6e026b64783e688b11da24f4c271ef1631d"}, -] - -[package.dependencies] -attrs = ">=19.3.0" -imagehash = {version = "*", optional = true, markers = "extra == \"type_image_path\""} -multimethod = ">=1.4" -networkx = ">=2.4" -numpy = ">=1.23.2" -pandas = ">=2.0.0" -Pillow = {version = "*", optional = true, markers = "extra == \"type_image_path\""} - -[package.extras] -all = ["Pillow", "attrs (>=19.3.0)", "imagehash", "matplotlib", "multimethod (>=1.4)", "networkx (>=2.4)", "numpy (>=1.23.2)", "pandas (>=2.0.0)", "pydot", "pygraphviz", "shapely"] -dev = ["IPython", "Sphinx-copybutton", "black (>=20.8b1)", "isort (>=5.0.9)", "mypy (>=0.770)", "nbsphinx", "recommonmark (>=0.6.0)", "setuptools (>=46.1.3)", "sphinx-autodoc-typehints (>=1.10.3)", "sphinx-rtd-theme (>=0.4.3)", "wheel (>=0.34.2)"] -plotting = ["matplotlib", "pydot", "pygraphviz"] -test = ["Pillow", "big-o (>=0.10.1)", "black (>=19.10b0)", "check-manifest (>=0.41)", "imagehash", "isort (>=5.0.9)", "matplotlib", "mypy (>=0.800)", "numba", "pandas", "pre-commit", "pyarrow (>=1.0.1)", "pydot", "pyspark", "pytest (>=5.2.0)", "pytest-spark (>=0.6.0)", "shapely", "twine (>=3.1.1)"] -type-geometry = ["shapely"] -type-image-path = ["Pillow", "imagehash"] - -[[package]] -name = "watchdog" -version = "6.0.0" -description = "Filesystem events monitoring" -optional = true -python-versions = ">=3.9" -files = [ - {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1cdb490583ebd691c012b3d6dae011000fe42edb7a82ece80965b42abd61f26"}, - {file = "watchdog-6.0.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc64ab3bdb6a04d69d4023b29422170b74681784ffb9463ed4870cf2f3e66112"}, - {file = "watchdog-6.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c897ac1b55c5a1461e16dae288d22bb2e412ba9807df8397a635d88f671d36c3"}, - {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c"}, - {file = "watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2"}, - {file = "watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c"}, - {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948"}, - {file = "watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860"}, - {file = "watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0"}, - {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c"}, - {file = "watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134"}, - {file = "watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b"}, - {file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e6f0e77c9417e7cd62af82529b10563db3423625c5fce018430b249bf977f9e8"}, - {file = "watchdog-6.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:90c8e78f3b94014f7aaae121e6b909674df5b46ec24d6bebc45c44c56729af2a"}, - {file = "watchdog-6.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7631a77ffb1f7d2eefa4445ebbee491c720a5661ddf6df3498ebecae5ed375c"}, - {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:c7ac31a19f4545dd92fc25d200694098f42c9a8e391bc00bdd362c5736dbf881"}, - {file = "watchdog-6.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:9513f27a1a582d9808cf21a07dae516f0fab1cf2d7683a742c498b93eedabb11"}, - {file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7a0e56874cfbc4b9b05c60c8a1926fedf56324bb08cfbc188969777940aef3aa"}, - {file = "watchdog-6.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e6439e374fc012255b4ec786ae3c4bc838cd7309a540e5fe0952d03687d8804e"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c"}, - {file = "watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2"}, - {file = "watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a"}, - {file = "watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680"}, - {file = "watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f"}, - {file = "watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282"}, -] - -[package.extras] -watchmedo = ["PyYAML (>=3.10)"] - -[[package]] -name = "wordcloud" -version = "1.9.4" -description = "A little word cloud generator" -optional = true -python-versions = ">=3.7" -files = [ - {file = "wordcloud-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:61a84e7311fce8415943edcb7b2ba65b4bfec1dc6dff8fe5a8ea76e278447fb2"}, - {file = "wordcloud-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e8752750726f31385f364823d3ef1d9c8ec829e5c07706c36beb40679945c71"}, - {file = "wordcloud-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:990dfd6dd43a1c7fa156be865eb98aba167a986b65f56cbf50e24772107fcd70"}, - {file = "wordcloud-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a70fe8999cd63aec64daa0377b720be6e5ff344963b828caeb4c2a081599a3a0"}, - {file = "wordcloud-1.9.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:37dcd5500cc2ea02950739390e89e2efa6624c2f54b5e2df1ee961fce685b2d7"}, - {file = "wordcloud-1.9.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5cc5c902dc2492b9fc0e29a1f5c688422d7e6eb9e5c0e43f0331d1c8e1341ba"}, - {file = "wordcloud-1.9.4-cp310-cp310-win32.whl", hash = "sha256:c20fbb51af2046c940b4fead4bafffc30b4191f5fb477c3af844446d8956bfd4"}, - {file = "wordcloud-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:61a153e76d73c72f5cc6c89ee80ddad70758a207c3c6b1d86be8635ec70164f1"}, - {file = "wordcloud-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:af168eeaed67a675f35b5668a7804c4d64f8e4f62a273b909eb5cc39efc4c294"}, - {file = "wordcloud-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3092bf85cb20158c8b90d78650dc0226985109ac6fe13a0086ac47b9581b62ce"}, - {file = "wordcloud-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddfb852f551681f5e33feb934505e060952b6aa98aaa48c781cdbf101f84e7cc"}, - {file = "wordcloud-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57ad8064a634a4870fcd00a9694c0a7839c6dfbac3d32522c69d5e1e9cbfd911"}, - {file = "wordcloud-1.9.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ea14858973ad8561a20a5475eb8d7ad33622bc5f27c60206fbb3e10a036cee26"}, - {file = "wordcloud-1.9.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b27759f12dd235468ff8c1df875b106b23dbf2c74aae05cdcdc3ccd8e23ea89c"}, - {file = "wordcloud-1.9.4-cp311-cp311-win32.whl", hash = "sha256:0ac3d87627022fb8cce17297298be96c91185edd55ecf8906f89f981b55974f0"}, - {file = "wordcloud-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:85368249df056527f1b64e80e68636abb61f0f6bd2d1c430894d2af1feea7f73"}, - {file = "wordcloud-1.9.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3910494ce5acb27731fd5678d146e8aa8f588d5fdb455810c817ff4b84ee0f67"}, - {file = "wordcloud-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b1c29a0089ee90778700cc96305fa830a6a5bbb342eaaa59d6ac8d37a9b232f"}, - {file = "wordcloud-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f369ae7bef16341c2bb208e658d5e4c56517046eb6176f89ac95525eaf8ace09"}, - {file = "wordcloud-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ec6ffba61ca20123e7c09103a5692bbc3163f75ee0bdc7893e80e0e2786ccd2"}, - {file = "wordcloud-1.9.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cdc4aac2bcce77fd91dbfe91db5a8c0cdc239e10d8954356d2ebf79a3b43646c"}, - {file = "wordcloud-1.9.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e4942fbed48a88a0c42c5b0a057651fc09d26b31be8b6c069adaaa5051836040"}, - {file = "wordcloud-1.9.4-cp312-cp312-win32.whl", hash = "sha256:96b801fe4b2aa39bb6c5e68b4a74c81fd8996dd5fb5cea31fda518dc5f77ad82"}, - {file = "wordcloud-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:360977705d0808a1795fcbe98afb5dc4833cb4bb8e421cbb10e93ef0bce816ff"}, - {file = "wordcloud-1.9.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:88c4c99f43b13df0e812fac0e4680cca2afd3ce16ade506812127ed7c7b9d132"}, - {file = "wordcloud-1.9.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2367ec70b2f195c278f91caf4674871ee9218eb57250e01a02b986d34e55f88e"}, - {file = "wordcloud-1.9.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6104a52936886dbc785844ab6986b5321a312238abb242ee4062c7b3fdcca7c"}, - {file = "wordcloud-1.9.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:81bbe75b2725730bf5cbabfe86a5c38960e7ce1166f76ba7001964d8de50b3a7"}, - {file = "wordcloud-1.9.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a936b8e03c32cc84c99ad8f1bdaf261dfef6c44d31ca5b0c7d0df147220dbb3c"}, - {file = "wordcloud-1.9.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:046300566df97b48640bd3efd94957a56941ada98cc23f811bc3f9b6a0ac1350"}, - {file = "wordcloud-1.9.4-cp313-cp313-win32.whl", hash = "sha256:22357990a01d87579dbd38a06c2a5c7b601179c4e17517b1b8f73d25faa6a5ed"}, - {file = "wordcloud-1.9.4-cp313-cp313-win_amd64.whl", hash = "sha256:8c9a5af2fbcf029a19e827adbee58e86efe7536dca7a42380a8601113a86069b"}, - {file = "wordcloud-1.9.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:42affa75c1b033cb0a0afb674f653c4af16d51d97a0852c5770b659b903d9af5"}, - {file = "wordcloud-1.9.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0876722c35cf4d5d7717ab81ba98b946e07b0e869252248fdd9ea1fd6c977cc"}, - {file = "wordcloud-1.9.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:489079ef173fe83ccff8baffd7a3c2d5fedfd31221c25ad21b4de770ea37b49f"}, - {file = "wordcloud-1.9.4-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:3f3dc2dacca48eac9b130a8938b473db81cfbeeb1a738530a7098913941a8211"}, - {file = "wordcloud-1.9.4-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:2e509c4588ae2ce47ee5cc5cf353422e7f7ecc38f450998654ed50565c8a550d"}, - {file = "wordcloud-1.9.4-cp37-cp37m-win32.whl", hash = "sha256:8009f53ba0c3b2d6f2b1dad83e0fb165ebcdfbd000ce62ebe0917106f51d975d"}, - {file = "wordcloud-1.9.4-cp37-cp37m-win_amd64.whl", hash = "sha256:30b1a59b9073eaaa4f2b0f27d5b6b6c3eb6aaa3a6e0b3dbb2220036b25b37dac"}, - {file = "wordcloud-1.9.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8a685babefe032716c1a00b7d8cec3f6bfdc1c89fd839578432fc53824a02fea"}, - {file = "wordcloud-1.9.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b78b9fb292a243cf8fcdf63b9cc1fd157ec6abbf1a6e675303668b85e948f616"}, - {file = "wordcloud-1.9.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f51ab42c00bc4782ab45701de45226a269ca0850df14e1bd63a60da73271724e"}, - {file = "wordcloud-1.9.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38ee69d9404504cf2419d60c3017af7ab9e88f4ba6cf47bc1c96b2d5e58ef513"}, - {file = "wordcloud-1.9.4-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9955223708f196c1e431ae3b86074409bc256c5868e4f50eb9c36c6f06f8b1a3"}, - {file = "wordcloud-1.9.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:3585ab8f4f09f1508f2d351ed48f9b56472ae26eaf6e2d2e76e975abd715d7a2"}, - {file = "wordcloud-1.9.4-cp38-cp38-win32.whl", hash = "sha256:d7d0b89c2ada0e65d84a6ebbdd8d36876b5da1a143cce2f7dcdaff6714232d24"}, - {file = "wordcloud-1.9.4-cp38-cp38-win_amd64.whl", hash = "sha256:bd7caefe91d4084c1608d816052eeb605d9a7aee0c908f3a9d7421ee6363bde0"}, - {file = "wordcloud-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e5b2f7195adef0a071dc24a568d8a7715bc5cf5d752b4560f51da3aa4467dcf8"}, - {file = "wordcloud-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:34843fa49135c4ed3739dea050696e707fd00e7335ee4ed62c33639589f90adf"}, - {file = "wordcloud-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6570cc4e48e8e951d24ef6599cd8bf7ff405fbe995ff6d596bcdfa290a6206a8"}, - {file = "wordcloud-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17f944805a17b8343eb877c9aa1dc9e5339eb14c02dd00ec80feccea899bbf81"}, - {file = "wordcloud-1.9.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:7c1cd2a6ef876f5f9fe0255e44f131a6113f883447ed1cf8bdb86f569603bac9"}, - {file = "wordcloud-1.9.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2b129584327ba21d05869fcf9495f10f7b31a34a580c431c4942a71ce2317e79"}, - {file = "wordcloud-1.9.4-cp39-cp39-win32.whl", hash = "sha256:526dfd822600f158210a191a59cc4bdcaaa1ff05ab2aa199040d857a518b1db6"}, - {file = "wordcloud-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:ac32b851a19b7d2a9ee5e0aebc8210bf16eadc42c5c0da82e36d447552c8ec48"}, - {file = "wordcloud-1.9.4-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f733cca468eae79af83cdda1de2434f1799cefef461ed892e7679d5a4c929fa1"}, - {file = "wordcloud-1.9.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a99f96efe5983c6eed17abb8766ced713ddf18b26450da74addc91570922e62"}, - {file = "wordcloud-1.9.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80773ec6a9caa2048602bc347151e3b6e68e1d8fab148dfd0d2e7d4302ce5c01"}, - {file = "wordcloud-1.9.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ca95392bba150190cca8df4a97854b554bdeb28007f28bf4698bd7e1af91b310"}, - {file = "wordcloud-1.9.4-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:eed94b42676f4cfa9b9bdac777e3a1f046b16250216dd8ddcb583c4b6e4b1286"}, - {file = "wordcloud-1.9.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b38aae2ff7aa10ad00d57a5b87ed4a573ef04dbc9119d4a304349c9cb3e03b6e"}, - {file = "wordcloud-1.9.4-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3057be0d071afd57afb9be84fec767abdd78eac6396ead0f0f55c6775170945"}, - {file = "wordcloud-1.9.4-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:9c39351d2cffc15e3794f7afab78e9135d700f61c5b51904c55d9f3729d1a0df"}, - {file = "wordcloud-1.9.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:914745f0312d248c1a0e1f16ae7b3ce82f78924a2b050ca912d2453c62586da4"}, - {file = "wordcloud-1.9.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:885d51d20cc7b0dad2306fb76b867de20e759e005a1a6e183f3865b5e5f53985"}, - {file = "wordcloud-1.9.4-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61fc126ed9ce8d55bf20acbdc00284f5a6da66900197a2dd7b62c5ac37585ac5"}, - {file = "wordcloud-1.9.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:c7b8536955f5026b0587ff829265392185b6b4bc923f2ed933c805fcac412b28"}, - {file = "wordcloud-1.9.4-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6a30ed8aa50b98edb113f72ef619581c221ba3678adeeed88345263c90092561"}, - {file = "wordcloud-1.9.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a62627e5b081b23a4586104d4b01d064db7b53342ae123b511326585eaf7433c"}, - {file = "wordcloud-1.9.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e137493365770f59655c7308ff76addc95ada2c6bd50ac119e4c33091e2e4e08"}, - {file = "wordcloud-1.9.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:665f8e7de3dcc1e43aa5bdd9560d56ed51026ba638a33472eede2b9051108adb"}, - {file = "wordcloud-1.9.4.tar.gz", hash = "sha256:b273d8a5ded97d3ead904046b49464dcb71119ee79df875072a4c105cadd347a"}, -] - -[package.dependencies] -matplotlib = "*" -numpy = ">=1.6.1" -pillow = "*" - -[[package]] -name = "ydata-profiling" -version = "4.12.1" -description = "Generate profile report for pandas DataFrame" -optional = true -python-versions = "<3.13,>=3.7" -files = [ - {file = "ydata-profiling-4.12.1.tar.gz", hash = "sha256:341b23bbf220a03639a0e2a4b58c4c663cb0a8d73dd27b6f93fa86406cd16cc1"}, - {file = "ydata_profiling-4.12.1-py2.py3-none-any.whl", hash = "sha256:c14e148dfc779540203acd17b2298171a72c8098c7e2481f8030f50d6f0dc4b5"}, -] - -[package.dependencies] -dacite = ">=1.8" -htmlmin = "0.1.12" -imagehash = "4.3.1" -jinja2 = ">=2.11.1,<3.2" -matplotlib = ">=3.5,<3.10" -multimethod = ">=1.4,<2" -numba = ">=0.56.0,<1" -numpy = ">=1.16.0,<2.2" -pandas = ">1.1,<1.4.0 || >1.4.0,<3" -phik = ">=0.11.1,<0.13" -pydantic = ">=2" -PyYAML = ">=5.0.0,<6.1" -requests = ">=2.24.0,<3" -scipy = ">=1.4.1,<1.14" -seaborn = ">=0.10.1,<0.14" -statsmodels = ">=0.13.2,<1" -tqdm = ">=4.48.2,<5" -typeguard = ">=3,<5" -visions = {version = ">=0.7.5,<0.7.7", extras = ["type-image-path"]} -wordcloud = ">=1.9.3" - -[package.extras] -notebook = ["ipywidgets (>=7.5.1)", "jupyter (>=1.0.0)"] -unicode = ["tangled-up-in-unicode (==0.2.0)"] - -[[package]] -name = "zipp" -version = "3.21.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.9" -files = [ - {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, - {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] -type = ["pytest-mypy"] - -[extras] -duckdb = ["dbt-duckdb"] -postgres = ["dbt-postgres"] -sqlite = ["dbt-sqlite"] -workbench = ["feedparser", "streamlit", "streamlit-ace", "streamlit-elements-fluence", "ydata-profiling"] - -[metadata] -lock-version = "2.0" -python-versions = ">=3.9,<3.9.7 || >3.9.7,<3.12" -content-hash = "e32966c1d87c49adb89e8b50d9e15ae7c36bd8ec4270c8b38c043094615b9d19" diff --git a/pyproject.toml b/pyproject.toml index 7d01d01d..4c03dcc2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,10 +1,15 @@ -[tool.poetry] +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] name = "dbt-osmosis" version = "0.14.0" -description = "A dbt server and suite of optional developer tools to make developing with dbt delightful." -authors = ["z3z1ma "] -license = "Apache-2.0" +description = "A dbt utility for managing YAML to make developing with dbt more delightful." readme = "README.md" +license = { text = "Apache-2.0" } +authors = [{ name = "z3z1ma", email = "butler.alex2010@gmail.com" }] +keywords = ["dbt", "yaml", "osmosis"] classifiers = [ "Development Status :: 4 - Beta", "Intended Audience :: Developers", @@ -16,66 +21,53 @@ classifiers = [ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", ] -keywords = ["dbt", "server", "streamlit", "git", "diff"] -documentation = "https://github.com/z3z1ma/dbt-osmosis" -repository = "https://github.com/z3z1ma/dbt-osmosis" +requires-python = ">=3.9,<3.13" -[tool.poetry.dependencies] -python = ">=3.9,<3.9.7 || >3.9.7,<3.13" -click = ">7" -dbt-core = ">=1.8,<1.10" -"ruamel.yaml" = ">=0.17" -rich = ">=10" -GitPython = ">3,<4" -# Streamlit Workbench Dependencies -streamlit = { version = ">=1.20.0", optional = true } -streamlit-ace = { version = ">=0.1.0", optional = true } -ydata-profiling = { version = ">=3.6.0", optional = true } -feedparser = { version = "^6.0.10", optional = true } -streamlit-elements-fluence = { version = ">=0.1.4", optional = true } -# Testing duckdb + sqlite -dbt-duckdb = { version = ">=1.0.0", optional = true } -dbt-sqlite = { version = ">=1.0.0", optional = true } -dbt-postgres = { version = ">=1.0.0", optional = true } -openai = { version = ">0.28.0", optional = true } +dependencies = [ + "click>7", + "dbt-core>=1.8,<1.10", + "ruamel.yaml>=0.17", + "rich>=10", + "GitPython>3,<4", +] -[tool.poetry.extras] -duckdb = ["dbt-duckdb"] -sqlite = ["dbt-sqlite"] -postgres = ["dbt-postgres"] +[project.optional-dependencies] +duckdb = ["dbt-duckdb>=1.0.0"] +sqlite = ["dbt-sqlite>=1.0.0"] +postgres = ["dbt-postgres>=1.0.0"] workbench = [ - "streamlit", - "streamlit-ace", - "ydata-profiling", - "feedparser", - "streamlit-elements-fluence", + "streamlit>=1.20.0", + "streamlit-ace>=0.1.0", + "ydata-profiling>=3.6.0", + "feedparser~=6.0.10", + "streamlit-elements-fluence>=0.1.4", ] -[tool.poetry.group.dev.dependencies] -black = ">=21.9b0" -mypy = ">=0.910" -pylint = ">=2.11.1" -pytest = "^7.4.2" -pre-commit = ">3.0.0" +openai = ["openai>0.28.0"] -[build-system] -requires = ["poetry-core>=1.0.0"] -build-backend = "poetry.core.masonry.api" -[tool.black] # https://black.readthedocs.io/en/stable/usage_and_configuration/the_basics.html#configuration-via-a-file +dev = [ + "black>=21.9b0", + "mypy>=0.910", + "pylint>=2.11.1", + "pytest~=7.4.2", + "pre-commit>3.0.0", +] + +[project.scripts] +"dbt-osmosis" = "dbt_osmosis.main:cli" + +[tool.black] line-length = 100 target-version = ["py39", "py310", "py311"] preview = true -[tool.isort] # https://pycqa.github.io/isort/docs/configuration/options.html +[tool.isort] color_output = true line_length = 100 profile = "black" src_paths = ["src"] include_trailing_comma = true -[tool.ruff] # https://beta.ruff.rs/docs/configuration/#using-pyprojecttoml +[tool.ruff] line-length = 100 - -[tool.poetry.scripts] -dbt-osmosis = 'dbt_osmosis.main:cli' diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..b4223742 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,89 @@ +# This file was autogenerated by uv via the following command: +# uv export --no-hashes --extra duckdb --extra sqlite --extra dev +-e . +agate==1.9.1 +annotated-types==0.7.0 +astroid==3.3.8 +attrs==24.3.0 +babel==2.16.0 +black==24.10.0 +certifi==2024.12.14 +cfgv==3.4.0 +charset-normalizer==3.4.1 +click==8.1.8 +colorama==0.4.6 +daff==1.3.46 +dbt-adapters==1.13.0 +dbt-common==1.14.0 +dbt-core==1.9.1 +dbt-duckdb==1.9.1 +dbt-extractor==0.5.1 +dbt-semantic-interfaces==0.7.4 +dbt-sqlite==1.4.0 +deepdiff==7.0.1 +dill==0.3.9 +distlib==0.3.9 +duckdb==1.1.3 +exceptiongroup==1.2.2 ; python_full_version < '3.11' +filelock==3.16.1 +gitdb==4.0.11 +gitpython==3.1.43 +identify==2.6.4 +idna==3.10 +importlib-metadata==6.11.0 +iniconfig==2.0.0 +isodate==0.6.1 +isort==5.13.2 +jinja2==3.1.5 +jsonschema==4.23.0 +jsonschema-specifications==2024.10.1 +leather==0.4.0 +markdown-it-py==3.0.0 +markupsafe==3.0.2 +mashumaro==3.14 +mccabe==0.7.0 +mdurl==0.1.2 +more-itertools==10.5.0 +msgpack==1.1.0 +mypy==1.14.1 +mypy-extensions==1.0.0 +networkx==3.2.1 ; python_full_version < '3.10' +networkx==3.4.2 ; python_full_version >= '3.10' +nodeenv==1.9.1 +ordered-set==4.1.0 +packaging==24.2 +parsedatetime==2.6 +pathspec==0.12.1 +platformdirs==4.3.6 +pluggy==1.5.0 +pre-commit==4.0.1 +protobuf==5.29.2 +pydantic==2.10.4 +pydantic-core==2.27.2 +pygments==2.18.0 +pylint==3.3.3 +pytest==7.4.4 +python-dateutil==2.9.0.post0 +python-slugify==8.0.4 +pytimeparse==1.1.8 +pytz==2024.2 +pyyaml==6.0.2 +referencing==0.35.1 +requests==2.32.3 +rich==13.9.4 +rpds-py==0.22.3 +ruamel-yaml==0.18.7 +ruamel-yaml-clib==0.2.12 ; platform_python_implementation == 'CPython' +six==1.17.0 +smmap==5.0.1 +snowplow-tracker==1.0.4 +sqlparse==0.5.3 +text-unidecode==1.3 +tomli==2.2.1 ; python_full_version < '3.11' +tomlkit==0.13.2 +types-requests==2.32.0.20241016 +typing-extensions==4.12.2 +tzdata==2024.2 ; sys_platform == 'win32' +urllib3==2.3.0 +virtualenv==20.28.0 +zipp==3.21.0 diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index e274618c..10e26de7 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -59,6 +59,7 @@ T = t.TypeVar("T") EMPTY_STRING = "" +"""A null string constant for use in placeholder lists, this is always considered undocumented""" SKIP_PATTERNS = "_column_ignore_patterns" """This key is used to skip certain column name patterns in dbt-osmosis""" diff --git a/uv.lock b/uv.lock new file mode 100644 index 00000000..2c0826ae --- /dev/null +++ b/uv.lock @@ -0,0 +1,2942 @@ +version = 1 +requires-python = ">=3.9, <3.13" +resolution-markers = [ + "python_full_version >= '3.12'", + "python_full_version == '3.11.*'", + "python_full_version == '3.10.*'", + "python_full_version < '3.10'", +] + +[[package]] +name = "agate" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "babel" }, + { name = "isodate" }, + { name = "leather" }, + { name = "parsedatetime" }, + { name = "python-slugify" }, + { name = "pytimeparse" }, + { name = "tzdata", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/29/77/6f5df1c68bf056f5fdefc60ccc616303c6211e71cd6033c830c12735f605/agate-1.9.1.tar.gz", hash = "sha256:bc60880c2ee59636a2a80cd8603d63f995be64526abf3cbba12f00767bcd5b3d", size = 202303 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/53/89b197cb472a3175d73384761a3413fd58e6b65a794c1102d148b8de87bd/agate-1.9.1-py2.py3-none-any.whl", hash = "sha256:1cf329510b3dde07c4ad1740b7587c9c679abc3dcd92bb1107eabc10c2e03c50", size = 95085 }, +] + +[[package]] +name = "altair" +version = "5.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jinja2" }, + { name = "jsonschema" }, + { name = "narwhals" }, + { name = "packaging" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/16/b1/f2969c7bdb8ad8bbdda031687defdce2c19afba2aa2c8e1d2a17f78376d8/altair-5.5.0.tar.gz", hash = "sha256:d960ebe6178c56de3855a68c47b516be38640b73fb3b5111c2a9ca90546dd73d", size = 705305 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/aa/f3/0b6ced594e51cc95d8c1fc1640d3623770d01e4969d29c0bd09945fafefa/altair-5.5.0-py3-none-any.whl", hash = "sha256:91a310b926508d560fe0148d02a194f38b824122641ef528113d029fcd129f8c", size = 731200 }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, +] + +[[package]] +name = "anyio" +version = "4.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "idna" }, + { name = "sniffio" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f6/40/318e58f669b1a9e00f5c4453910682e2d9dd594334539c7b7817dabb765f/anyio-4.7.0.tar.gz", hash = "sha256:2f834749c602966b7d456a7567cafcb309f96482b5081d14ac93ccd457f9dd48", size = 177076 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/7a/4daaf3b6c08ad7ceffea4634ec206faeff697526421c20f07628c7372156/anyio-4.7.0-py3-none-any.whl", hash = "sha256:ea60c3723ab42ba6fff7e8ccb0488c898ec538ff4df1f1d5e642c3601d07e352", size = 93052 }, +] + +[[package]] +name = "astroid" +version = "3.3.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/80/c5/5c83c48bbf547f3dd8b587529db7cf5a265a3368b33e85e76af8ff6061d3/astroid-3.3.8.tar.gz", hash = "sha256:a88c7994f914a4ea8572fac479459f4955eeccc877be3f2d959a33273b0cf40b", size = 398196 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/28/0bc8a17d6cd4cc3c79ae41b7105a2b9a327c110e5ddd37a8a27b29a5c8a2/astroid-3.3.8-py3-none-any.whl", hash = "sha256:187ccc0c248bfbba564826c26f070494f7bc964fd286b6d9fff4420e55de828c", size = 275153 }, +] + +[[package]] +name = "attrs" +version = "24.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/48/c8/6260f8ccc11f0917360fc0da435c5c9c7504e3db174d5a12a1494887b045/attrs-24.3.0.tar.gz", hash = "sha256:8f5c07333d543103541ba7be0e2ce16eeee8130cb0b3f9238ab904ce1e85baff", size = 805984 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/89/aa/ab0f7891a01eeb2d2e338ae8fecbe57fcebea1a24dbb64d45801bfab481d/attrs-24.3.0-py3-none-any.whl", hash = "sha256:ac96cd038792094f438ad1f6ff80837353805ac950cd2aa0e0625ef19850c308", size = 63397 }, +] + +[[package]] +name = "babel" +version = "2.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/74/f1bc80f23eeba13393b7222b11d95ca3af2c1e28edca18af487137eefed9/babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316", size = 9348104 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ed/20/bc79bc575ba2e2a7f70e8a1155618bb1301eaa5132a8271373a6903f73f8/babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b", size = 9587599 }, +] + +[[package]] +name = "black" +version = "24.10.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "mypy-extensions" }, + { name = "packaging" }, + { name = "pathspec" }, + { name = "platformdirs" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d8/0d/cc2fb42b8c50d80143221515dd7e4766995bd07c56c9a3ed30baf080b6dc/black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875", size = 645813 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/f3/465c0eb5cddf7dbbfe1fecd9b875d1dcf51b88923cd2c1d7e9ab95c6336b/black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812", size = 1623211 }, + { url = "https://files.pythonhosted.org/packages/df/57/b6d2da7d200773fdfcc224ffb87052cf283cec4d7102fab450b4a05996d8/black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea", size = 1457139 }, + { url = "https://files.pythonhosted.org/packages/6e/c5/9023b7673904a5188f9be81f5e129fff69f51f5515655fbd1d5a4e80a47b/black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f", size = 1753774 }, + { url = "https://files.pythonhosted.org/packages/e1/32/df7f18bd0e724e0d9748829765455d6643ec847b3f87e77456fc99d0edab/black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e", size = 1414209 }, + { url = "https://files.pythonhosted.org/packages/c2/cc/7496bb63a9b06a954d3d0ac9fe7a73f3bf1cd92d7a58877c27f4ad1e9d41/black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad", size = 1607468 }, + { url = "https://files.pythonhosted.org/packages/2b/e3/69a738fb5ba18b5422f50b4f143544c664d7da40f09c13969b2fd52900e0/black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50", size = 1437270 }, + { url = "https://files.pythonhosted.org/packages/c9/9b/2db8045b45844665c720dcfe292fdaf2e49825810c0103e1191515fc101a/black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392", size = 1737061 }, + { url = "https://files.pythonhosted.org/packages/a3/95/17d4a09a5be5f8c65aa4a361444d95edc45def0de887810f508d3f65db7a/black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175", size = 1423293 }, + { url = "https://files.pythonhosted.org/packages/90/04/bf74c71f592bcd761610bbf67e23e6a3cff824780761f536512437f1e655/black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3", size = 1644256 }, + { url = "https://files.pythonhosted.org/packages/4c/ea/a77bab4cf1887f4b2e0bce5516ea0b3ff7d04ba96af21d65024629afedb6/black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65", size = 1448534 }, + { url = "https://files.pythonhosted.org/packages/4e/3e/443ef8bc1fbda78e61f79157f303893f3fddf19ca3c8989b163eb3469a12/black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f", size = 1761892 }, + { url = "https://files.pythonhosted.org/packages/52/93/eac95ff229049a6901bc84fec6908a5124b8a0b7c26ea766b3b8a5debd22/black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8", size = 1434796 }, + { url = "https://files.pythonhosted.org/packages/fe/02/f408c804e0ee78c367dcea0a01aedde4f1712af93b8b6e60df981e0228c7/black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd", size = 1622516 }, + { url = "https://files.pythonhosted.org/packages/f8/b9/9b706ed2f55bfb28b436225a9c57da35990c9005b90b8c91f03924454ad7/black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f", size = 1456181 }, + { url = "https://files.pythonhosted.org/packages/0a/1c/314d7f17434a5375682ad097f6f4cc0e3f414f3c95a9b1bb4df14a0f11f9/black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800", size = 1752801 }, + { url = "https://files.pythonhosted.org/packages/39/a7/20e5cd9237d28ad0b31438de5d9f01c8b99814576f4c0cda1edd62caf4b0/black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7", size = 1413626 }, + { url = "https://files.pythonhosted.org/packages/8d/a7/4b27c50537ebca8bec139b872861f9d2bf501c5ec51fcf897cb924d9e264/black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d", size = 206898 }, +] + +[[package]] +name = "blinker" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/21/28/9b3f50ce0e048515135495f198351908d99540d69bfdc8c1d15b73dc55ce/blinker-1.9.0.tar.gz", hash = "sha256:b4ce2265a7abece45e7cc896e98dbebe6cead56bcf805a3d23136d145f5445bf", size = 22460 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/cb/f2ad4230dc2eb1a74edf38f1a38b9b52277f75bef262d8908e60d957e13c/blinker-1.9.0-py3-none-any.whl", hash = "sha256:ba0efaa9080b619ff2f3459d1d500c57bddea4a6b424b60a91141db6fd2f08bc", size = 8458 }, +] + +[[package]] +name = "cachetools" +version = "5.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/38/a0f315319737ecf45b4319a8cd1f3a908e29d9277b46942263292115eee7/cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a", size = 27661 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/07/14f8ad37f2d12a5ce41206c21820d8cb6561b728e51fad4530dff0552a67/cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292", size = 9524 }, +] + +[[package]] +name = "certifi" +version = "2024.12.14" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/bd/1d41ee578ce09523c81a15426705dd20969f5abf006d1afe8aeff0dd776a/certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db", size = 166010 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/32/8f6669fc4798494966bf446c8c4a162e0b5d893dff088afddf76414f70e1/certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56", size = 164927 }, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249 }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/16/b0/572805e227f01586461c80e0fd25d65a2115599cc9dad142fee4b747c357/charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", size = 123188 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/58/5580c1716040bc89206c77d8f74418caf82ce519aae06450393ca73475d1/charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de", size = 198013 }, + { url = "https://files.pythonhosted.org/packages/d0/11/00341177ae71c6f5159a08168bcb98c6e6d196d372c94511f9f6c9afe0c6/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176", size = 141285 }, + { url = "https://files.pythonhosted.org/packages/01/09/11d684ea5819e5a8f5100fb0b38cf8d02b514746607934134d31233e02c8/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037", size = 151449 }, + { url = "https://files.pythonhosted.org/packages/08/06/9f5a12939db324d905dc1f70591ae7d7898d030d7662f0d426e2286f68c9/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f", size = 143892 }, + { url = "https://files.pythonhosted.org/packages/93/62/5e89cdfe04584cb7f4d36003ffa2936681b03ecc0754f8e969c2becb7e24/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a", size = 146123 }, + { url = "https://files.pythonhosted.org/packages/a9/ac/ab729a15c516da2ab70a05f8722ecfccc3f04ed7a18e45c75bbbaa347d61/charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a", size = 147943 }, + { url = "https://files.pythonhosted.org/packages/03/d2/3f392f23f042615689456e9a274640c1d2e5dd1d52de36ab8f7955f8f050/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247", size = 142063 }, + { url = "https://files.pythonhosted.org/packages/f2/e3/e20aae5e1039a2cd9b08d9205f52142329f887f8cf70da3650326670bddf/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408", size = 150578 }, + { url = "https://files.pythonhosted.org/packages/8d/af/779ad72a4da0aed925e1139d458adc486e61076d7ecdcc09e610ea8678db/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb", size = 153629 }, + { url = "https://files.pythonhosted.org/packages/c2/b6/7aa450b278e7aa92cf7732140bfd8be21f5f29d5bf334ae987c945276639/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d", size = 150778 }, + { url = "https://files.pythonhosted.org/packages/39/f4/d9f4f712d0951dcbfd42920d3db81b00dd23b6ab520419626f4023334056/charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807", size = 146453 }, + { url = "https://files.pythonhosted.org/packages/49/2b/999d0314e4ee0cff3cb83e6bc9aeddd397eeed693edb4facb901eb8fbb69/charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f", size = 95479 }, + { url = "https://files.pythonhosted.org/packages/2d/ce/3cbed41cff67e455a386fb5e5dd8906cdda2ed92fbc6297921f2e4419309/charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f", size = 102790 }, + { url = "https://files.pythonhosted.org/packages/72/80/41ef5d5a7935d2d3a773e3eaebf0a9350542f2cab4eac59a7a4741fbbbbe/charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125", size = 194995 }, + { url = "https://files.pythonhosted.org/packages/7a/28/0b9fefa7b8b080ec492110af6d88aa3dea91c464b17d53474b6e9ba5d2c5/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1", size = 139471 }, + { url = "https://files.pythonhosted.org/packages/71/64/d24ab1a997efb06402e3fc07317e94da358e2585165930d9d59ad45fcae2/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3", size = 149831 }, + { url = "https://files.pythonhosted.org/packages/37/ed/be39e5258e198655240db5e19e0b11379163ad7070962d6b0c87ed2c4d39/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd", size = 142335 }, + { url = "https://files.pythonhosted.org/packages/88/83/489e9504711fa05d8dde1574996408026bdbdbd938f23be67deebb5eca92/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00", size = 143862 }, + { url = "https://files.pythonhosted.org/packages/c6/c7/32da20821cf387b759ad24627a9aca289d2822de929b8a41b6241767b461/charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12", size = 145673 }, + { url = "https://files.pythonhosted.org/packages/68/85/f4288e96039abdd5aeb5c546fa20a37b50da71b5cf01e75e87f16cd43304/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77", size = 140211 }, + { url = "https://files.pythonhosted.org/packages/28/a3/a42e70d03cbdabc18997baf4f0227c73591a08041c149e710045c281f97b/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146", size = 148039 }, + { url = "https://files.pythonhosted.org/packages/85/e4/65699e8ab3014ecbe6f5c71d1a55d810fb716bbfd74f6283d5c2aa87febf/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd", size = 151939 }, + { url = "https://files.pythonhosted.org/packages/b1/82/8e9fe624cc5374193de6860aba3ea8070f584c8565ee77c168ec13274bd2/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6", size = 149075 }, + { url = "https://files.pythonhosted.org/packages/3d/7b/82865ba54c765560c8433f65e8acb9217cb839a9e32b42af4aa8e945870f/charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8", size = 144340 }, + { url = "https://files.pythonhosted.org/packages/b5/b6/9674a4b7d4d99a0d2df9b215da766ee682718f88055751e1e5e753c82db0/charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b", size = 95205 }, + { url = "https://files.pythonhosted.org/packages/1e/ab/45b180e175de4402dcf7547e4fb617283bae54ce35c27930a6f35b6bef15/charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76", size = 102441 }, + { url = "https://files.pythonhosted.org/packages/0a/9a/dd1e1cdceb841925b7798369a09279bd1cf183cef0f9ddf15a3a6502ee45/charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", size = 196105 }, + { url = "https://files.pythonhosted.org/packages/d3/8c/90bfabf8c4809ecb648f39794cf2a84ff2e7d2a6cf159fe68d9a26160467/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", size = 140404 }, + { url = "https://files.pythonhosted.org/packages/ad/8f/e410d57c721945ea3b4f1a04b74f70ce8fa800d393d72899f0a40526401f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", size = 150423 }, + { url = "https://files.pythonhosted.org/packages/f0/b8/e6825e25deb691ff98cf5c9072ee0605dc2acfca98af70c2d1b1bc75190d/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", size = 143184 }, + { url = "https://files.pythonhosted.org/packages/3e/a2/513f6cbe752421f16d969e32f3583762bfd583848b763913ddab8d9bfd4f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", size = 145268 }, + { url = "https://files.pythonhosted.org/packages/74/94/8a5277664f27c3c438546f3eb53b33f5b19568eb7424736bdc440a88a31f/charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616", size = 147601 }, + { url = "https://files.pythonhosted.org/packages/7c/5f/6d352c51ee763623a98e31194823518e09bfa48be2a7e8383cf691bbb3d0/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", size = 141098 }, + { url = "https://files.pythonhosted.org/packages/78/d4/f5704cb629ba5ab16d1d3d741396aec6dc3ca2b67757c45b0599bb010478/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", size = 149520 }, + { url = "https://files.pythonhosted.org/packages/c5/96/64120b1d02b81785f222b976c0fb79a35875457fa9bb40827678e54d1bc8/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", size = 152852 }, + { url = "https://files.pythonhosted.org/packages/84/c9/98e3732278a99f47d487fd3468bc60b882920cef29d1fa6ca460a1fdf4e6/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", size = 150488 }, + { url = "https://files.pythonhosted.org/packages/13/0e/9c8d4cb99c98c1007cc11eda969ebfe837bbbd0acdb4736d228ccaabcd22/charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", size = 146192 }, + { url = "https://files.pythonhosted.org/packages/b2/21/2b6b5b860781a0b49427309cb8670785aa543fb2178de875b87b9cc97746/charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", size = 95550 }, + { url = "https://files.pythonhosted.org/packages/21/5b/1b390b03b1d16c7e382b561c5329f83cc06623916aab983e8ab9239c7d5c/charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", size = 102785 }, + { url = "https://files.pythonhosted.org/packages/7f/c0/b913f8f02836ed9ab32ea643c6fe4d3325c3d8627cf6e78098671cafff86/charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41", size = 197867 }, + { url = "https://files.pythonhosted.org/packages/0f/6c/2bee440303d705b6fb1e2ec789543edec83d32d258299b16eed28aad48e0/charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f", size = 141385 }, + { url = "https://files.pythonhosted.org/packages/3d/04/cb42585f07f6f9fd3219ffb6f37d5a39b4fd2db2355b23683060029c35f7/charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2", size = 151367 }, + { url = "https://files.pythonhosted.org/packages/54/54/2412a5b093acb17f0222de007cc129ec0e0df198b5ad2ce5699355269dfe/charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770", size = 143928 }, + { url = "https://files.pythonhosted.org/packages/5a/6d/e2773862b043dcf8a221342954f375392bb2ce6487bcd9f2c1b34e1d6781/charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4", size = 146203 }, + { url = "https://files.pythonhosted.org/packages/b9/f8/ca440ef60d8f8916022859885f231abb07ada3c347c03d63f283bec32ef5/charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537", size = 148082 }, + { url = "https://files.pythonhosted.org/packages/04/d2/42fd330901aaa4b805a1097856c2edf5095e260a597f65def493f4b8c833/charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496", size = 142053 }, + { url = "https://files.pythonhosted.org/packages/9e/af/3a97a4fa3c53586f1910dadfc916e9c4f35eeada36de4108f5096cb7215f/charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78", size = 150625 }, + { url = "https://files.pythonhosted.org/packages/26/ae/23d6041322a3556e4da139663d02fb1b3c59a23ab2e2b56432bd2ad63ded/charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7", size = 153549 }, + { url = "https://files.pythonhosted.org/packages/94/22/b8f2081c6a77cb20d97e57e0b385b481887aa08019d2459dc2858ed64871/charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6", size = 150945 }, + { url = "https://files.pythonhosted.org/packages/c7/0b/c5ec5092747f801b8b093cdf5610e732b809d6cb11f4c51e35fc28d1d389/charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294", size = 146595 }, + { url = "https://files.pythonhosted.org/packages/0c/5a/0b59704c38470df6768aa154cc87b1ac7c9bb687990a1559dc8765e8627e/charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5", size = 95453 }, + { url = "https://files.pythonhosted.org/packages/85/2d/a9790237cb4d01a6d57afadc8573c8b73c609ade20b80f4cda30802009ee/charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765", size = 102811 }, + { url = "https://files.pythonhosted.org/packages/0e/f6/65ecc6878a89bb1c23a086ea335ad4bf21a588990c3f535a227b9eea9108/charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", size = 49767 }, +] + +[[package]] +name = "click" +version = "8.1.8" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/d4/7ebdbd03970677812aac39c869717059dbb71a4cfc033ca6e5221787892c/click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2", size = 98188 }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, +] + +[[package]] +name = "contourpy" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +dependencies = [ + { name = "numpy", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f5/f6/31a8f28b4a2a4fa0e01085e542f3081ab0588eff8e589d39d775172c9792/contourpy-1.3.0.tar.gz", hash = "sha256:7ffa0db17717a8ffb127efd0c95a4362d996b892c2904db72428d5b52e1938a4", size = 13464370 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6c/e0/be8dcc796cfdd96708933e0e2da99ba4bb8f9b2caa9d560a50f3f09a65f3/contourpy-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:880ea32e5c774634f9fcd46504bf9f080a41ad855f4fef54f5380f5133d343c7", size = 265366 }, + { url = "https://files.pythonhosted.org/packages/50/d6/c953b400219443535d412fcbbc42e7a5e823291236bc0bb88936e3cc9317/contourpy-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:76c905ef940a4474a6289c71d53122a4f77766eef23c03cd57016ce19d0f7b42", size = 249226 }, + { url = "https://files.pythonhosted.org/packages/6f/b4/6fffdf213ffccc28483c524b9dad46bb78332851133b36ad354b856ddc7c/contourpy-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92f8557cbb07415a4d6fa191f20fd9d2d9eb9c0b61d1b2f52a8926e43c6e9af7", size = 308460 }, + { url = "https://files.pythonhosted.org/packages/cf/6c/118fc917b4050f0afe07179a6dcbe4f3f4ec69b94f36c9e128c4af480fb8/contourpy-1.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36f965570cff02b874773c49bfe85562b47030805d7d8360748f3eca570f4cab", size = 347623 }, + { url = "https://files.pythonhosted.org/packages/f9/a4/30ff110a81bfe3abf7b9673284d21ddce8cc1278f6f77393c91199da4c90/contourpy-1.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cacd81e2d4b6f89c9f8a5b69b86490152ff39afc58a95af002a398273e5ce589", size = 317761 }, + { url = "https://files.pythonhosted.org/packages/99/e6/d11966962b1aa515f5586d3907ad019f4b812c04e4546cc19ebf62b5178e/contourpy-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69375194457ad0fad3a839b9e29aa0b0ed53bb54db1bfb6c3ae43d111c31ce41", size = 322015 }, + { url = "https://files.pythonhosted.org/packages/4d/e3/182383743751d22b7b59c3c753277b6aee3637049197624f333dac5b4c80/contourpy-1.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a52040312b1a858b5e31ef28c2e865376a386c60c0e248370bbea2d3f3b760d", size = 1262672 }, + { url = "https://files.pythonhosted.org/packages/78/53/974400c815b2e605f252c8fb9297e2204347d1755a5374354ee77b1ea259/contourpy-1.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3faeb2998e4fcb256542e8a926d08da08977f7f5e62cf733f3c211c2a5586223", size = 1321688 }, + { url = "https://files.pythonhosted.org/packages/52/29/99f849faed5593b2926a68a31882af98afbeac39c7fdf7de491d9c85ec6a/contourpy-1.3.0-cp310-cp310-win32.whl", hash = "sha256:36e0cff201bcb17a0a8ecc7f454fe078437fa6bda730e695a92f2d9932bd507f", size = 171145 }, + { url = "https://files.pythonhosted.org/packages/a9/97/3f89bba79ff6ff2b07a3cbc40aa693c360d5efa90d66e914f0ff03b95ec7/contourpy-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:87ddffef1dbe5e669b5c2440b643d3fdd8622a348fe1983fad7a0f0ccb1cd67b", size = 216019 }, + { url = "https://files.pythonhosted.org/packages/b3/1f/9375917786cb39270b0ee6634536c0e22abf225825602688990d8f5c6c19/contourpy-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fa4c02abe6c446ba70d96ece336e621efa4aecae43eaa9b030ae5fb92b309ad", size = 266356 }, + { url = "https://files.pythonhosted.org/packages/05/46/9256dd162ea52790c127cb58cfc3b9e3413a6e3478917d1f811d420772ec/contourpy-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:834e0cfe17ba12f79963861e0f908556b2cedd52e1f75e6578801febcc6a9f49", size = 250915 }, + { url = "https://files.pythonhosted.org/packages/e1/5d/3056c167fa4486900dfbd7e26a2fdc2338dc58eee36d490a0ed3ddda5ded/contourpy-1.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbc4c3217eee163fa3984fd1567632b48d6dfd29216da3ded3d7b844a8014a66", size = 310443 }, + { url = "https://files.pythonhosted.org/packages/ca/c2/1a612e475492e07f11c8e267ea5ec1ce0d89971be496c195e27afa97e14a/contourpy-1.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4865cd1d419e0c7a7bf6de1777b185eebdc51470800a9f42b9e9decf17762081", size = 348548 }, + { url = "https://files.pythonhosted.org/packages/45/cf/2c2fc6bb5874158277b4faf136847f0689e1b1a1f640a36d76d52e78907c/contourpy-1.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:303c252947ab4b14c08afeb52375b26781ccd6a5ccd81abcdfc1fafd14cf93c1", size = 319118 }, + { url = "https://files.pythonhosted.org/packages/03/33/003065374f38894cdf1040cef474ad0546368eea7e3a51d48b8a423961f8/contourpy-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637f674226be46f6ba372fd29d9523dd977a291f66ab2a74fbeb5530bb3f445d", size = 323162 }, + { url = "https://files.pythonhosted.org/packages/42/80/e637326e85e4105a802e42959f56cff2cd39a6b5ef68d5d9aee3ea5f0e4c/contourpy-1.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:76a896b2f195b57db25d6b44e7e03f221d32fe318d03ede41f8b4d9ba1bff53c", size = 1265396 }, + { url = "https://files.pythonhosted.org/packages/7c/3b/8cbd6416ca1bbc0202b50f9c13b2e0b922b64be888f9d9ee88e6cfabfb51/contourpy-1.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e1fd23e9d01591bab45546c089ae89d926917a66dceb3abcf01f6105d927e2cb", size = 1324297 }, + { url = "https://files.pythonhosted.org/packages/4d/2c/021a7afaa52fe891f25535506cc861c30c3c4e5a1c1ce94215e04b293e72/contourpy-1.3.0-cp311-cp311-win32.whl", hash = "sha256:d402880b84df3bec6eab53cd0cf802cae6a2ef9537e70cf75e91618a3801c20c", size = 171808 }, + { url = "https://files.pythonhosted.org/packages/8d/2f/804f02ff30a7fae21f98198828d0857439ec4c91a96e20cf2d6c49372966/contourpy-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:6cb6cc968059db9c62cb35fbf70248f40994dfcd7aa10444bbf8b3faeb7c2d67", size = 217181 }, + { url = "https://files.pythonhosted.org/packages/c9/92/8e0bbfe6b70c0e2d3d81272b58c98ac69ff1a4329f18c73bd64824d8b12e/contourpy-1.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:570ef7cf892f0afbe5b2ee410c507ce12e15a5fa91017a0009f79f7d93a1268f", size = 267838 }, + { url = "https://files.pythonhosted.org/packages/e3/04/33351c5d5108460a8ce6d512307690b023f0cfcad5899499f5c83b9d63b1/contourpy-1.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:da84c537cb8b97d153e9fb208c221c45605f73147bd4cadd23bdae915042aad6", size = 251549 }, + { url = "https://files.pythonhosted.org/packages/51/3d/aa0fe6ae67e3ef9f178389e4caaaa68daf2f9024092aa3c6032e3d174670/contourpy-1.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0be4d8425bfa755e0fd76ee1e019636ccc7c29f77a7c86b4328a9eb6a26d0639", size = 303177 }, + { url = "https://files.pythonhosted.org/packages/56/c3/c85a7e3e0cab635575d3b657f9535443a6f5d20fac1a1911eaa4bbe1aceb/contourpy-1.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c0da700bf58f6e0b65312d0a5e695179a71d0163957fa381bb3c1f72972537c", size = 341735 }, + { url = "https://files.pythonhosted.org/packages/dd/8d/20f7a211a7be966a53f474bc90b1a8202e9844b3f1ef85f3ae45a77151ee/contourpy-1.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb8b141bb00fa977d9122636b16aa67d37fd40a3d8b52dd837e536d64b9a4d06", size = 314679 }, + { url = "https://files.pythonhosted.org/packages/6e/be/524e377567defac0e21a46e2a529652d165fed130a0d8a863219303cee18/contourpy-1.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3634b5385c6716c258d0419c46d05c8aa7dc8cb70326c9a4fb66b69ad2b52e09", size = 320549 }, + { url = "https://files.pythonhosted.org/packages/0f/96/fdb2552a172942d888915f3a6663812e9bc3d359d53dafd4289a0fb462f0/contourpy-1.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0dce35502151b6bd35027ac39ba6e5a44be13a68f55735c3612c568cac3805fd", size = 1263068 }, + { url = "https://files.pythonhosted.org/packages/2a/25/632eab595e3140adfa92f1322bf8915f68c932bac468e89eae9974cf1c00/contourpy-1.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea348f053c645100612b333adc5983d87be69acdc6d77d3169c090d3b01dc35", size = 1322833 }, + { url = "https://files.pythonhosted.org/packages/73/e3/69738782e315a1d26d29d71a550dbbe3eb6c653b028b150f70c1a5f4f229/contourpy-1.3.0-cp312-cp312-win32.whl", hash = "sha256:90f73a5116ad1ba7174341ef3ea5c3150ddf20b024b98fb0c3b29034752c8aeb", size = 172681 }, + { url = "https://files.pythonhosted.org/packages/0c/89/9830ba00d88e43d15e53d64931e66b8792b46eb25e2050a88fec4a0df3d5/contourpy-1.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:b11b39aea6be6764f84360fce6c82211a9db32a7c7de8fa6dd5397cf1d079c3b", size = 218283 }, + { url = "https://files.pythonhosted.org/packages/b3/e3/b9f72758adb6ef7397327ceb8b9c39c75711affb220e4f53c745ea1d5a9a/contourpy-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a11077e395f67ffc2c44ec2418cfebed032cd6da3022a94fc227b6faf8e2acb8", size = 265518 }, + { url = "https://files.pythonhosted.org/packages/ec/22/19f5b948367ab5260fb41d842c7a78dae645603881ea6bc39738bcfcabf6/contourpy-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e8134301d7e204c88ed7ab50028ba06c683000040ede1d617298611f9dc6240c", size = 249350 }, + { url = "https://files.pythonhosted.org/packages/26/76/0c7d43263dd00ae21a91a24381b7e813d286a3294d95d179ef3a7b9fb1d7/contourpy-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12968fdfd5bb45ffdf6192a590bd8ddd3ba9e58360b29683c6bb71a7b41edca", size = 309167 }, + { url = "https://files.pythonhosted.org/packages/96/3b/cadff6773e89f2a5a492c1a8068e21d3fccaf1a1c1df7d65e7c8e3ef60ba/contourpy-1.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fd2a0fc506eccaaa7595b7e1418951f213cf8255be2600f1ea1b61e46a60c55f", size = 348279 }, + { url = "https://files.pythonhosted.org/packages/e1/86/158cc43aa549d2081a955ab11c6bdccc7a22caacc2af93186d26f5f48746/contourpy-1.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4cfb5c62ce023dfc410d6059c936dcf96442ba40814aefbfa575425a3a7f19dc", size = 318519 }, + { url = "https://files.pythonhosted.org/packages/05/11/57335544a3027e9b96a05948c32e566328e3a2f84b7b99a325b7a06d2b06/contourpy-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68a32389b06b82c2fdd68276148d7b9275b5f5cf13e5417e4252f6d1a34f72a2", size = 321922 }, + { url = "https://files.pythonhosted.org/packages/0b/e3/02114f96543f4a1b694333b92a6dcd4f8eebbefcc3a5f3bbb1316634178f/contourpy-1.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:94e848a6b83da10898cbf1311a815f770acc9b6a3f2d646f330d57eb4e87592e", size = 1258017 }, + { url = "https://files.pythonhosted.org/packages/f3/3b/bfe4c81c6d5881c1c643dde6620be0b42bf8aab155976dd644595cfab95c/contourpy-1.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d78ab28a03c854a873787a0a42254a0ccb3cb133c672f645c9f9c8f3ae9d0800", size = 1316773 }, + { url = "https://files.pythonhosted.org/packages/f1/17/c52d2970784383cafb0bd918b6fb036d98d96bbf0bc1befb5d1e31a07a70/contourpy-1.3.0-cp39-cp39-win32.whl", hash = "sha256:81cb5ed4952aae6014bc9d0421dec7c5835c9c8c31cdf51910b708f548cf58e5", size = 171353 }, + { url = "https://files.pythonhosted.org/packages/53/23/db9f69676308e094d3c45f20cc52e12d10d64f027541c995d89c11ad5c75/contourpy-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:14e262f67bd7e6eb6880bc564dcda30b15e351a594657e55b7eec94b6ef72843", size = 211817 }, + { url = "https://files.pythonhosted.org/packages/d1/09/60e486dc2b64c94ed33e58dcfb6f808192c03dfc5574c016218b9b7680dc/contourpy-1.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fe41b41505a5a33aeaed2a613dccaeaa74e0e3ead6dd6fd3a118fb471644fd6c", size = 261886 }, + { url = "https://files.pythonhosted.org/packages/19/20/b57f9f7174fcd439a7789fb47d764974ab646fa34d1790551de386457a8e/contourpy-1.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca7e17a65f72a5133bdbec9ecf22401c62bcf4821361ef7811faee695799779", size = 311008 }, + { url = "https://files.pythonhosted.org/packages/74/fc/5040d42623a1845d4f17a418e590fd7a79ae8cb2bad2b2f83de63c3bdca4/contourpy-1.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ec4dc6bf570f5b22ed0d7efba0dfa9c5b9e0431aeea7581aa217542d9e809a4", size = 215690 }, + { url = "https://files.pythonhosted.org/packages/2b/24/dc3dcd77ac7460ab7e9d2b01a618cb31406902e50e605a8d6091f0a8f7cc/contourpy-1.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:00ccd0dbaad6d804ab259820fa7cb0b8036bda0686ef844d24125d8287178ce0", size = 261894 }, + { url = "https://files.pythonhosted.org/packages/b1/db/531642a01cfec39d1682e46b5457b07cf805e3c3c584ec27e2a6223f8f6c/contourpy-1.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca947601224119117f7c19c9cdf6b3ab54c5726ef1d906aa4a69dfb6dd58102", size = 311099 }, + { url = "https://files.pythonhosted.org/packages/38/1e/94bda024d629f254143a134eead69e21c836429a2a6ce82209a00ddcb79a/contourpy-1.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6ec93afeb848a0845a18989da3beca3eec2c0f852322efe21af1931147d12cb", size = 215838 }, +] + +[[package]] +name = "contourpy" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12'", + "python_full_version == '3.11.*'", + "python_full_version == '3.10.*'", +] +dependencies = [ + { name = "numpy", marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/25/c2/fc7193cc5383637ff390a712e88e4ded0452c9fbcf84abe3de5ea3df1866/contourpy-1.3.1.tar.gz", hash = "sha256:dfd97abd83335045a913e3bcc4a09c0ceadbe66580cf573fe961f4a825efa699", size = 13465753 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b2/a3/80937fe3efe0edacf67c9a20b955139a1a622730042c1ea991956f2704ad/contourpy-1.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a045f341a77b77e1c5de31e74e966537bba9f3c4099b35bf4c2e3939dd54cdab", size = 268466 }, + { url = "https://files.pythonhosted.org/packages/82/1d/e3eaebb4aa2d7311528c048350ca8e99cdacfafd99da87bc0a5f8d81f2c2/contourpy-1.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:500360b77259914f7805af7462e41f9cb7ca92ad38e9f94d6c8641b089338124", size = 253314 }, + { url = "https://files.pythonhosted.org/packages/de/f3/d796b22d1a2b587acc8100ba8c07fb7b5e17fde265a7bb05ab967f4c935a/contourpy-1.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2f926efda994cdf3c8d3fdb40b9962f86edbc4457e739277b961eced3d0b4c1", size = 312003 }, + { url = "https://files.pythonhosted.org/packages/bf/f5/0e67902bc4394daee8daa39c81d4f00b50e063ee1a46cb3938cc65585d36/contourpy-1.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:adce39d67c0edf383647a3a007de0a45fd1b08dedaa5318404f1a73059c2512b", size = 351896 }, + { url = "https://files.pythonhosted.org/packages/1f/d6/e766395723f6256d45d6e67c13bb638dd1fa9dc10ef912dc7dd3dcfc19de/contourpy-1.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:abbb49fb7dac584e5abc6636b7b2a7227111c4f771005853e7d25176daaf8453", size = 320814 }, + { url = "https://files.pythonhosted.org/packages/a9/57/86c500d63b3e26e5b73a28b8291a67c5608d4aa87ebd17bd15bb33c178bc/contourpy-1.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0cffcbede75c059f535725c1680dfb17b6ba8753f0c74b14e6a9c68c29d7ea3", size = 324969 }, + { url = "https://files.pythonhosted.org/packages/b8/62/bb146d1289d6b3450bccc4642e7f4413b92ebffd9bf2e91b0404323704a7/contourpy-1.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ab29962927945d89d9b293eabd0d59aea28d887d4f3be6c22deaefbb938a7277", size = 1265162 }, + { url = "https://files.pythonhosted.org/packages/18/04/9f7d132ce49a212c8e767042cc80ae390f728060d2eea47058f55b9eff1c/contourpy-1.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:974d8145f8ca354498005b5b981165b74a195abfae9a8129df3e56771961d595", size = 1324328 }, + { url = "https://files.pythonhosted.org/packages/46/23/196813901be3f97c83ababdab1382e13e0edc0bb4e7b49a7bff15fcf754e/contourpy-1.3.1-cp310-cp310-win32.whl", hash = "sha256:ac4578ac281983f63b400f7fe6c101bedc10651650eef012be1ccffcbacf3697", size = 173861 }, + { url = "https://files.pythonhosted.org/packages/e0/82/c372be3fc000a3b2005061ca623a0d1ecd2eaafb10d9e883a2fc8566e951/contourpy-1.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:174e758c66bbc1c8576992cec9599ce8b6672b741b5d336b5c74e35ac382b18e", size = 218566 }, + { url = "https://files.pythonhosted.org/packages/12/bb/11250d2906ee2e8b466b5f93e6b19d525f3e0254ac8b445b56e618527718/contourpy-1.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3e8b974d8db2c5610fb4e76307e265de0edb655ae8169e8b21f41807ccbeec4b", size = 269555 }, + { url = "https://files.pythonhosted.org/packages/67/71/1e6e95aee21a500415f5d2dbf037bf4567529b6a4e986594d7026ec5ae90/contourpy-1.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:20914c8c973f41456337652a6eeca26d2148aa96dd7ac323b74516988bea89fc", size = 254549 }, + { url = "https://files.pythonhosted.org/packages/31/2c/b88986e8d79ac45efe9d8801ae341525f38e087449b6c2f2e6050468a42c/contourpy-1.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19d40d37c1c3a4961b4619dd9d77b12124a453cc3d02bb31a07d58ef684d3d86", size = 313000 }, + { url = "https://files.pythonhosted.org/packages/c4/18/65280989b151fcf33a8352f992eff71e61b968bef7432fbfde3a364f0730/contourpy-1.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:113231fe3825ebf6f15eaa8bc1f5b0ddc19d42b733345eae0934cb291beb88b6", size = 352925 }, + { url = "https://files.pythonhosted.org/packages/f5/c7/5fd0146c93220dbfe1a2e0f98969293b86ca9bc041d6c90c0e065f4619ad/contourpy-1.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4dbbc03a40f916a8420e420d63e96a1258d3d1b58cbdfd8d1f07b49fcbd38e85", size = 323693 }, + { url = "https://files.pythonhosted.org/packages/85/fc/7fa5d17daf77306840a4e84668a48ddff09e6bc09ba4e37e85ffc8e4faa3/contourpy-1.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a04ecd68acbd77fa2d39723ceca4c3197cb2969633836ced1bea14e219d077c", size = 326184 }, + { url = "https://files.pythonhosted.org/packages/ef/e7/104065c8270c7397c9571620d3ab880558957216f2b5ebb7e040f85eeb22/contourpy-1.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c414fc1ed8ee1dbd5da626cf3710c6013d3d27456651d156711fa24f24bd1291", size = 1268031 }, + { url = "https://files.pythonhosted.org/packages/e2/4a/c788d0bdbf32c8113c2354493ed291f924d4793c4a2e85b69e737a21a658/contourpy-1.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:31c1b55c1f34f80557d3830d3dd93ba722ce7e33a0b472cba0ec3b6535684d8f", size = 1325995 }, + { url = "https://files.pythonhosted.org/packages/a6/e6/a2f351a90d955f8b0564caf1ebe4b1451a3f01f83e5e3a414055a5b8bccb/contourpy-1.3.1-cp311-cp311-win32.whl", hash = "sha256:f611e628ef06670df83fce17805c344710ca5cde01edfdc72751311da8585375", size = 174396 }, + { url = "https://files.pythonhosted.org/packages/a8/7e/cd93cab453720a5d6cb75588cc17dcdc08fc3484b9de98b885924ff61900/contourpy-1.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:b2bdca22a27e35f16794cf585832e542123296b4687f9fd96822db6bae17bfc9", size = 219787 }, + { url = "https://files.pythonhosted.org/packages/37/6b/175f60227d3e7f5f1549fcb374592be311293132207e451c3d7c654c25fb/contourpy-1.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0ffa84be8e0bd33410b17189f7164c3589c229ce5db85798076a3fa136d0e509", size = 271494 }, + { url = "https://files.pythonhosted.org/packages/6b/6a/7833cfae2c1e63d1d8875a50fd23371394f540ce809d7383550681a1fa64/contourpy-1.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:805617228ba7e2cbbfb6c503858e626ab528ac2a32a04a2fe88ffaf6b02c32bc", size = 255444 }, + { url = "https://files.pythonhosted.org/packages/7f/b3/7859efce66eaca5c14ba7619791b084ed02d868d76b928ff56890d2d059d/contourpy-1.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ade08d343436a94e633db932e7e8407fe7de8083967962b46bdfc1b0ced39454", size = 307628 }, + { url = "https://files.pythonhosted.org/packages/48/b2/011415f5e3f0a50b1e285a0bf78eb5d92a4df000553570f0851b6e309076/contourpy-1.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:47734d7073fb4590b4a40122b35917cd77be5722d80683b249dac1de266aac80", size = 347271 }, + { url = "https://files.pythonhosted.org/packages/84/7d/ef19b1db0f45b151ac78c65127235239a8cf21a59d1ce8507ce03e89a30b/contourpy-1.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ba94a401342fc0f8b948e57d977557fbf4d515f03c67682dd5c6191cb2d16ec", size = 318906 }, + { url = "https://files.pythonhosted.org/packages/ba/99/6794142b90b853a9155316c8f470d2e4821fe6f086b03e372aca848227dd/contourpy-1.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efa874e87e4a647fd2e4f514d5e91c7d493697127beb95e77d2f7561f6905bd9", size = 323622 }, + { url = "https://files.pythonhosted.org/packages/3c/0f/37d2c84a900cd8eb54e105f4fa9aebd275e14e266736778bb5dccbf3bbbb/contourpy-1.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1bf98051f1045b15c87868dbaea84f92408337d4f81d0e449ee41920ea121d3b", size = 1266699 }, + { url = "https://files.pythonhosted.org/packages/3a/8a/deb5e11dc7d9cc8f0f9c8b29d4f062203f3af230ba83c30a6b161a6effc9/contourpy-1.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:61332c87493b00091423e747ea78200659dc09bdf7fd69edd5e98cef5d3e9a8d", size = 1326395 }, + { url = "https://files.pythonhosted.org/packages/1a/35/7e267ae7c13aaf12322ccc493531f1e7f2eb8fba2927b9d7a05ff615df7a/contourpy-1.3.1-cp312-cp312-win32.whl", hash = "sha256:e914a8cb05ce5c809dd0fe350cfbb4e881bde5e2a38dc04e3afe1b3e58bd158e", size = 175354 }, + { url = "https://files.pythonhosted.org/packages/a1/35/c2de8823211d07e8a79ab018ef03960716c5dff6f4d5bff5af87fd682992/contourpy-1.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:08d9d449a61cf53033612cb368f3a1b26cd7835d9b8cd326647efe43bca7568d", size = 220971 }, + { url = "https://files.pythonhosted.org/packages/3e/4f/e56862e64b52b55b5ddcff4090085521fc228ceb09a88390a2b103dccd1b/contourpy-1.3.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b457d6430833cee8e4b8e9b6f07aa1c161e5e0d52e118dc102c8f9bd7dd060d6", size = 265605 }, + { url = "https://files.pythonhosted.org/packages/b0/2e/52bfeeaa4541889f23d8eadc6386b442ee2470bd3cff9baa67deb2dd5c57/contourpy-1.3.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb76c1a154b83991a3cbbf0dfeb26ec2833ad56f95540b442c73950af2013750", size = 315040 }, + { url = "https://files.pythonhosted.org/packages/52/94/86bfae441707205634d80392e873295652fc313dfd93c233c52c4dc07874/contourpy-1.3.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:44a29502ca9c7b5ba389e620d44f2fbe792b1fb5734e8b931ad307071ec58c53", size = 218221 }, +] + +[[package]] +name = "cycler" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a9/95/a3dbbb5028f35eafb79008e7522a75244477d2838f38cbb722248dabc2a8/cycler-0.12.1.tar.gz", hash = "sha256:88bb128f02ba341da8ef447245a9e138fae777f6a23943da4540077d3601eb1c", size = 7615 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e7/05/c19819d5e3d95294a6f5947fb9b9629efb316b96de511b418c53d245aae6/cycler-0.12.1-py3-none-any.whl", hash = "sha256:85cef7cff222d8644161529808465972e51340599459b8ac3ccbac5a854e0d30", size = 8321 }, +] + +[[package]] +name = "dacite" +version = "1.8.1" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/21/0f/cf0943f4f55f0fbc7c6bd60caf1343061dff818b02af5a0d444e473bb78d/dacite-1.8.1-py3-none-any.whl", hash = "sha256:cc31ad6fdea1f49962ea42db9421772afe01ac5442380d9a99fcf3d188c61afe", size = 14309 }, +] + +[[package]] +name = "daff" +version = "1.3.46" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0e/fc/82796c10545f3df9882566c79debac28b664e3a3a08fdb493ac3cc418709/daff-1.3.46.tar.gz", hash = "sha256:22d0da9fd6a3275b54c926a9c97b180f9258aad65113ea18f3fec52cbadcd818", size = 149820 } + +[[package]] +name = "dbt-adapters" +version = "1.13.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "agate" }, + { name = "dbt-common" }, + { name = "mashumaro", extra = ["msgpack"] }, + { name = "protobuf" }, + { name = "pytz" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ad/96/4c937891a2832d0520e5e311d38604df8547f4b8266d7d3bedbc2245c068/dbt_adapters-1.13.0.tar.gz", hash = "sha256:2e4e743d3613e7d72319edf6eb85ea2ca57133472e32ff0148c1325f21ff6d16", size = 106862 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/d7/3b627f5b2d1a390762c2b3fbfb690fd4395b94dc70957ee7cdfe50198596/dbt_adapters-1.13.0-py3-none-any.whl", hash = "sha256:4888f9b7d0ddb709e8219e722ee01678e31aade2b8b19595cf094a177c71dc56", size = 164945 }, +] + +[[package]] +name = "dbt-common" +version = "1.14.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "agate" }, + { name = "colorama" }, + { name = "deepdiff" }, + { name = "isodate" }, + { name = "jinja2" }, + { name = "jsonschema" }, + { name = "mashumaro", extra = ["msgpack"] }, + { name = "pathspec" }, + { name = "protobuf" }, + { name = "python-dateutil" }, + { name = "requests" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ec/59/b0758aa2219ae4df642e179c879e42e5c90071275eae545cfe3ebd506da2/dbt_common-1.14.0.tar.gz", hash = "sha256:2227e24a165780c5368320dedd3c6bc40038dedece48af03daab43c11bf20372", size = 80035 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/03/25d3618dc3af65996e3fde27859ad8a7fdf94889a327430a99f33d811db0/dbt_common-1.14.0-py3-none-any.whl", hash = "sha256:239b568a0dd764a431b93cdfe247628622c975f2eed8abf3bc04f4dc770ad161", size = 82948 }, +] + +[[package]] +name = "dbt-core" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "agate" }, + { name = "click" }, + { name = "daff" }, + { name = "dbt-adapters" }, + { name = "dbt-common" }, + { name = "dbt-extractor" }, + { name = "dbt-semantic-interfaces" }, + { name = "jinja2" }, + { name = "mashumaro", extra = ["msgpack"] }, + { name = "networkx", version = "3.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "packaging" }, + { name = "pathspec" }, + { name = "protobuf" }, + { name = "pytz" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "snowplow-tracker" }, + { name = "sqlparse" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/93/34/f7613d15ddad74a72992175df0147c82931e63a0b116e4e2bec5e45a6a11/dbt_core-1.9.1.tar.gz", hash = "sha256:38c931dd5206fdb11a9db1decf1075ce891ad9f4692bd00a9ba760a6cfe4358d", size = 865279 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/38/0125f4a9c808ba061fa9c621fda88399bd5cacc4ff35399a8710e69bb335/dbt_core-1.9.1-py3-none-any.whl", hash = "sha256:a1db009c30f08a95fcf2620f70b2258aeca43a480229d7eb710e2243f5f622ea", size = 944639 }, +] + +[[package]] +name = "dbt-duckdb" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dbt-adapters" }, + { name = "dbt-common" }, + { name = "dbt-core" }, + { name = "duckdb" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/41/52ca73b0de603343c8be141c10c44defa425630fd881c1021a3135562c97/dbt_duckdb-1.9.1.tar.gz", hash = "sha256:3d5b5e0372033684f3515b4f152757d330598b517ba7ee9c6612819a2e13e084", size = 97052 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5e/ad/8788bf89b98be02ca0236a119f9a3f611dccc4b8a558b5bc9986026eef6e/dbt_duckdb-1.9.1-py3-none-any.whl", hash = "sha256:4034e57a0f3ee5283597b447b8fddf5220aa11744e84deb28ee32c977826139b", size = 64514 }, +] + +[[package]] +name = "dbt-extractor" +version = "0.5.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/d0/4ee14955ad0214da695b3c15dc0acf2ab54c9d263242f36073c999cb699a/dbt_extractor-0.5.1.tar.gz", hash = "sha256:cd5d95576a8dea4190240aaf9936a37fd74b4b7913ca69a3c368fc4472bb7e13", size = 266278 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/1f/ca6d66d67464df1ea8e814d09b1100d15672ae4ce7f0dff41f67956e5f7f/dbt_extractor-0.5.1-cp38-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:3b91e6106b967d908b34f83929d3f50ee2b498876a1be9c055fe060ed728c556", size = 865677 }, + { url = "https://files.pythonhosted.org/packages/3b/be/0ae4a5c6c721ee42d849482084b5f4544acafe3c8cf4c84170f35c63fe50/dbt_extractor-0.5.1-cp38-abi3-macosx_10_12_x86_64.whl", hash = "sha256:3614ce9f83ae4cd0dc95f77730034a793a1c090a52dcf698ba1c94050afe3a8b", size = 438730 }, + { url = "https://files.pythonhosted.org/packages/a9/ac/bbe5d223a03632d4192414a8af0aa6e2c16555a6e7d33515225b4c978096/dbt_extractor-0.5.1-cp38-abi3-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ea4edf33035d0a060b1e01c42fb2d99316457d44c954d6ed4eed9f1948664d87", size = 1385155 }, + { url = "https://files.pythonhosted.org/packages/6d/96/caef63d79f3a06bcae1aca43302c1b9efa58590644efca41c4404607510e/dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3b9bf50eb062b4344d9546fe42038996c6e7e7daa10724aa955d64717260e5d", size = 1344382 }, + { url = "https://files.pythonhosted.org/packages/66/ce/8c248ba3def50203925a1404d21a03999e2fe32bf7611e6f9de1006817ba/dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c0ce901d4ebf0664977e4e1cbf596d4afc6c1339fcc7d2cf67ce3481566a626f", size = 1343152 }, + { url = "https://files.pythonhosted.org/packages/11/73/5ead77c8b742453e1a34a064d921933bbca4f8941ad8f14fd47d0a15c49c/dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:cbe338b76e9ffaa18275456e041af56c21bb517f6fbda7a58308138703da0996", size = 1498587 }, + { url = "https://files.pythonhosted.org/packages/51/e6/140058fbeb482071a7b199986c40385dfdc97f23b0ea20b0740762d2e116/dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b25fa7a276ab26aa2d70ff6e0cf4cfb1490d7831fb57ee1337c24d2b0333b84", size = 1482391 }, + { url = "https://files.pythonhosted.org/packages/63/e6/a40a89c75701fa91fc7297b9d77f303fc93669a32a10be4457a02de0584f/dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c5651e458be910ff567c0da3ea2eb084fd01884cc88888ac2cf1e240dcddacc2", size = 1517273 }, + { url = "https://files.pythonhosted.org/packages/30/da/a9528ca8224317aad1dab22f77468dd13e94c46b56db953b5b1e3b698a8f/dbt_extractor-0.5.1-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62e4f040fd338b652683421ce48e903812e27fd6e7af58b1b70a4e1f9f2c79e3", size = 1346957 }, + { url = "https://files.pythonhosted.org/packages/7b/2b/48ad70e0490e492b1f59e260d447b3c9eaaad661eb4b46baacc2f328dabf/dbt_extractor-0.5.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91e25ad78f1f4feadd27587ebbcc46ad909cfad843118908f30336d08d8400ca", size = 1524362 }, + { url = "https://files.pythonhosted.org/packages/6c/cc/6dce67509e94080535b400b03d7d13fecd2acba72c10c21df8b7755212ce/dbt_extractor-0.5.1-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:cdf9938b36cd098bcdd80f43dc03864da3f69f57d903a9160a32236540d4ddcd", size = 1603552 }, + { url = "https://files.pythonhosted.org/packages/58/b6/14ab2c80385a29ad013a0a0642522b393bf1220d6c01587aad4796784cc1/dbt_extractor-0.5.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:475e2c05b17eb4976eff6c8f7635be42bec33f15a74ceb87a40242c94a99cebf", size = 1550461 }, + { url = "https://files.pythonhosted.org/packages/7c/04/19af8b0cb0e341d091cca21ff3cfed95f152e39f598b7313c79a6804f32f/dbt_extractor-0.5.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:100453ba06e169cbdb118234ab3f06f6722a2e0e316089b81c88dea701212abc", size = 1520792 }, + { url = "https://files.pythonhosted.org/packages/10/dd/b3c440b8eeac318a2d3b0f190783feedad60b962fe984d6d0cb482b128b4/dbt_extractor-0.5.1-cp38-abi3-win32.whl", hash = "sha256:6916aae085fd5f2af069fd6947933e78b742c9e3d2165e1740c2e28ae543309a", size = 261615 }, + { url = "https://files.pythonhosted.org/packages/8c/ad/fa331537dbe97250dda06342775891ae2b1fb8b54cf9219e47781f641657/dbt_extractor-0.5.1-cp38-abi3-win_amd64.whl", hash = "sha256:eecc08f3743e802a8ede60c89f7b2bce872acc86120cbc0ae7df229bb8a95083", size = 283481 }, +] + +[[package]] +name = "dbt-osmosis" +version = "0.14.0" +source = { editable = "." } +dependencies = [ + { name = "click" }, + { name = "dbt-core" }, + { name = "gitpython" }, + { name = "rich" }, + { name = "ruamel-yaml" }, +] + +[package.optional-dependencies] +dev = [ + { name = "black" }, + { name = "mypy" }, + { name = "pre-commit" }, + { name = "pylint" }, + { name = "pytest" }, +] +duckdb = [ + { name = "dbt-duckdb" }, +] +openai = [ + { name = "openai" }, +] +postgres = [ + { name = "dbt-postgres" }, +] +sqlite = [ + { name = "dbt-sqlite" }, +] +workbench = [ + { name = "feedparser" }, + { name = "streamlit" }, + { name = "streamlit-ace" }, + { name = "streamlit-elements-fluence" }, + { name = "ydata-profiling" }, +] + +[package.metadata] +requires-dist = [ + { name = "black", marker = "extra == 'dev'", specifier = ">=21.9b0" }, + { name = "click", specifier = ">7" }, + { name = "dbt-core", specifier = ">=1.8,<1.10" }, + { name = "dbt-duckdb", marker = "extra == 'duckdb'", specifier = ">=1.0.0" }, + { name = "dbt-postgres", marker = "extra == 'postgres'", specifier = ">=1.0.0" }, + { name = "dbt-sqlite", marker = "extra == 'sqlite'", specifier = ">=1.0.0" }, + { name = "feedparser", marker = "extra == 'workbench'", specifier = "~=6.0.10" }, + { name = "gitpython", specifier = ">3,<4" }, + { name = "mypy", marker = "extra == 'dev'", specifier = ">=0.910" }, + { name = "openai", marker = "extra == 'openai'", specifier = ">0.28.0" }, + { name = "pre-commit", marker = "extra == 'dev'", specifier = ">3.0.0" }, + { name = "pylint", marker = "extra == 'dev'", specifier = ">=2.11.1" }, + { name = "pytest", marker = "extra == 'dev'", specifier = "~=7.4.2" }, + { name = "rich", specifier = ">=10" }, + { name = "ruamel-yaml", specifier = ">=0.17" }, + { name = "streamlit", marker = "extra == 'workbench'", specifier = ">=1.20.0" }, + { name = "streamlit-ace", marker = "extra == 'workbench'", specifier = ">=0.1.0" }, + { name = "streamlit-elements-fluence", marker = "extra == 'workbench'", specifier = ">=0.1.4" }, + { name = "ydata-profiling", marker = "extra == 'workbench'", specifier = ">=3.6.0" }, +] + +[[package]] +name = "dbt-postgres" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "agate" }, + { name = "dbt-adapters" }, + { name = "dbt-common" }, + { name = "dbt-core" }, + { name = "psycopg2-binary" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/09/24/03eae698e7f0bffb579a120758fcf95cdf7a58caccec79254195b4a1cb4c/dbt_postgres-1.9.0.tar.gz", hash = "sha256:b0574e9e1e66d8a5cd627b1d464ec0278eef7342f0b5babe4f987eee9d02a143", size = 23555 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/30/bb/8e48678036e5f89b49f72c98e41fa41ebe853c219e658cad3797afbc50b9/dbt_postgres-1.9.0-py3-none-any.whl", hash = "sha256:c85d1adb419251ac989e5f720fdbb964aa6c280da7739dc8c48d44e6f45d354a", size = 35182 }, +] + +[[package]] +name = "dbt-semantic-interfaces" +version = "0.7.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "importlib-metadata" }, + { name = "jinja2" }, + { name = "jsonschema" }, + { name = "more-itertools" }, + { name = "pydantic" }, + { name = "python-dateutil" }, + { name = "pyyaml" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/53/67/86c4c068d1c6371928064bb9fe44eb8fbc4e15bb101e924d2e596c6fa511/dbt_semantic_interfaces-0.7.4.tar.gz", hash = "sha256:dcedda6702ecabb633aa4e8ab3b1eb7f9c4301dcc0026076a4a0ef64f9e59cf0", size = 90809 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/4d/9e0eec04821154c51c05dab77bc3eed95337c42f471d6ffe7b4a699e8cb1/dbt_semantic_interfaces-0.7.4-py3-none-any.whl", hash = "sha256:63965478ef27056f20a8c9a0f59b1355ebbc15133c1a6f0d368d93996a31dd5d", size = 141981 }, +] + +[[package]] +name = "dbt-sqlite" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dbt-core" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/51/00/e65f10afdda193da78346893a93db502e629374f76ed6921f21dd35bd841/dbt-sqlite-1.4.0.tar.gz", hash = "sha256:f1fcacd057b78f0a48c1a84bff7d3ff31cd1c34bbffa02933f2f9d31ddfd0a3b", size = 20055 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e3/6a/c814306bf0d74f73292100aa9409a3c470e992adb05aa322bee3fdee8dfc/dbt_sqlite-1.4.0-py3-none-any.whl", hash = "sha256:4ef86682e6fb4940088b824cfb986465b5d66ed589ebfc3071238d915c37aa4e", size = 22870 }, +] + +[[package]] +name = "deepdiff" +version = "7.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ordered-set" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/70/10/6f4b0bd0627d542f63a24f38e29d77095dc63d5f45bc1a7b4a6ca8750fa9/deepdiff-7.0.1.tar.gz", hash = "sha256:260c16f052d4badbf60351b4f77e8390bee03a0b516246f6839bc813fb429ddf", size = 421718 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/e6/d27d37dc55dbf40cdbd665aa52844b065ac760c9a02a02265f97ea7a4256/deepdiff-7.0.1-py3-none-any.whl", hash = "sha256:447760081918216aa4fd4ca78a4b6a848b81307b2ea94c810255334b759e1dc3", size = 80825 }, +] + +[[package]] +name = "dill" +version = "0.3.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/70/43/86fe3f9e130c4137b0f1b50784dd70a5087b911fe07fa81e53e0c4c47fea/dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c", size = 187000 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/d1/e73b6ad76f0b1fb7f23c35c6d95dbc506a9c8804f43dda8cb5b0fa6331fd/dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a", size = 119418 }, +] + +[[package]] +name = "distlib" +version = "0.3.9" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0d/dd/1bec4c5ddb504ca60fc29472f3d27e8d4da1257a854e1d96742f15c1d02d/distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403", size = 613923 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973 }, +] + +[[package]] +name = "distro" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277 }, +] + +[[package]] +name = "duckdb" +version = "1.1.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a0/d7/ec014b351b6bb026d5f473b1d0ec6bd6ba40786b9abbf530b4c9041d9895/duckdb-1.1.3.tar.gz", hash = "sha256:68c3a46ab08836fe041d15dcbf838f74a990d551db47cb24ab1c4576fc19351c", size = 12240672 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/7e/aef0fa22a80939edb04f66152a1fd5ce7257931576be192a8068e74f0892/duckdb-1.1.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:1c0226dc43e2ee4cc3a5a4672fddb2d76fd2cf2694443f395c02dd1bea0b7fce", size = 15469781 }, + { url = "https://files.pythonhosted.org/packages/38/22/df548714ddd915929ebbba9699e8614655ed93cd367f5849f6dbd1b3e160/duckdb-1.1.3-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:7c71169fa804c0b65e49afe423ddc2dc83e198640e3b041028da8110f7cd16f7", size = 32313005 }, + { url = "https://files.pythonhosted.org/packages/9f/38/8de640857f4c55df870faf025835e09c69222d365dc773507e934cee3376/duckdb-1.1.3-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:872d38b65b66e3219d2400c732585c5b4d11b13d7a36cd97908d7981526e9898", size = 16931481 }, + { url = "https://files.pythonhosted.org/packages/41/9b/87fff1341a9f57ab75284d79f902fee8cd6ef3a9135af4c723c90384d307/duckdb-1.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25fb02629418c0d4d94a2bc1776edaa33f6f6ccaa00bd84eb96ecb97ae4b50e9", size = 18491670 }, + { url = "https://files.pythonhosted.org/packages/3e/ee/8f74ccecbafd14e257c634f0f2cdebbc35634d9d74f04bb7ad8a0e142bf8/duckdb-1.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3f5cd604e7c39527e6060f430769b72234345baaa0987f9500988b2814f5e4", size = 20144774 }, + { url = "https://files.pythonhosted.org/packages/36/7b/edffb833b8569a7fc1799ceb4392911e0082f18a6076225441e954a95853/duckdb-1.1.3-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:08935700e49c187fe0e9b2b86b5aad8a2ccd661069053e38bfaed3b9ff795efd", size = 18287084 }, + { url = "https://files.pythonhosted.org/packages/a9/ab/6367e8c98b3331260bb4389c6b80deef96614c1e21edcdba23a882e45ab0/duckdb-1.1.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9b47036945e1db32d70e414a10b1593aec641bd4c5e2056873d971cc21e978b", size = 21614877 }, + { url = "https://files.pythonhosted.org/packages/03/d8/89b1c5f1dbd16342640742f6f6d3f1c827d1a1b966d674774ddfe6a385e2/duckdb-1.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:35c420f58abc79a68a286a20fd6265636175fadeca1ce964fc8ef159f3acc289", size = 10954044 }, + { url = "https://files.pythonhosted.org/packages/57/d0/96127582230183dc36f1209d5e8e67f54b3459b3b9794603305d816f350a/duckdb-1.1.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:4f0e2e5a6f5a53b79aee20856c027046fba1d73ada6178ed8467f53c3877d5e0", size = 15469495 }, + { url = "https://files.pythonhosted.org/packages/70/07/b78b435f8fe85c23ee2d49a01dc9599bb4a272c40f2a6bf67ff75958bdad/duckdb-1.1.3-cp311-cp311-macosx_12_0_universal2.whl", hash = "sha256:911d58c22645bfca4a5a049ff53a0afd1537bc18fedb13bc440b2e5af3c46148", size = 32318595 }, + { url = "https://files.pythonhosted.org/packages/6c/d8/253b3483fc554daf72503ba0f112404f75be6bbd7ca7047e804873cbb182/duckdb-1.1.3-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:c443d3d502335e69fc1e35295fcfd1108f72cb984af54c536adfd7875e79cee5", size = 16934057 }, + { url = "https://files.pythonhosted.org/packages/f8/11/908a8fb73cef8304d3f4eab7f27cc489f6fd675f921d382c83c55253be86/duckdb-1.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a55169d2d2e2e88077d91d4875104b58de45eff6a17a59c7dc41562c73df4be", size = 18498214 }, + { url = "https://files.pythonhosted.org/packages/bf/56/f627b6fcd4aa34015a15449d852ccb78d7cc6eda654aa20c1d378e99fa76/duckdb-1.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d0767ada9f06faa5afcf63eb7ba1befaccfbcfdac5ff86f0168c673dd1f47aa", size = 20149376 }, + { url = "https://files.pythonhosted.org/packages/b5/1d/c318dada688119b9ca975d431f9b38bde8dda41b6d18cc06e0dc52123788/duckdb-1.1.3-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51c6d79e05b4a0933672b1cacd6338f882158f45ef9903aef350c4427d9fc898", size = 18293289 }, + { url = "https://files.pythonhosted.org/packages/37/8e/fd346444b270ffe52e06c1af1243eaae30ab651c1d59f51711e3502fd060/duckdb-1.1.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:183ac743f21c6a4d6adfd02b69013d5fd78e5e2cd2b4db023bc8a95457d4bc5d", size = 21622129 }, + { url = "https://files.pythonhosted.org/packages/18/aa/804c1cf5077b6f17d752b23637d9ef53eaad77ea73ee43d4c12bff480e36/duckdb-1.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:a30dd599b8090ea6eafdfb5a9f1b872d78bac318b6914ada2d35c7974d643640", size = 10954756 }, + { url = "https://files.pythonhosted.org/packages/9b/ff/7ee500f4cff0d2a581c1afdf2c12f70ee3bf1a61041fea4d88934a35a7a3/duckdb-1.1.3-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:a433ae9e72c5f397c44abdaa3c781d94f94f4065bcbf99ecd39433058c64cb38", size = 15482881 }, + { url = "https://files.pythonhosted.org/packages/28/16/dda10da6bde54562c3cb0002ca3b7678e3108fa73ac9b7509674a02c5249/duckdb-1.1.3-cp312-cp312-macosx_12_0_universal2.whl", hash = "sha256:d08308e0a46c748d9c30f1d67ee1143e9c5ea3fbcccc27a47e115b19e7e78aa9", size = 32349440 }, + { url = "https://files.pythonhosted.org/packages/2e/c2/06f7f7a51a1843c9384e1637abb6bbebc29367710ffccc7e7e52d72b3dd9/duckdb-1.1.3-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:5d57776539211e79b11e94f2f6d63de77885f23f14982e0fac066f2885fcf3ff", size = 16953473 }, + { url = "https://files.pythonhosted.org/packages/1a/84/9991221ef7dde79d85231f20646e1b12d645490cd8be055589276f62847e/duckdb-1.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e59087dbbb63705f2483544e01cccf07d5b35afa58be8931b224f3221361d537", size = 18491915 }, + { url = "https://files.pythonhosted.org/packages/aa/76/330fe16f12b7ddda0c664ba9869f3afbc8773dbe17ae750121d407dc0f37/duckdb-1.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ebf5f60ddbd65c13e77cddb85fe4af671d31b851f125a4d002a313696af43f1", size = 20150288 }, + { url = "https://files.pythonhosted.org/packages/c4/88/e4b08b7a5d08c0f65f6c7a6594de64431ce7df38d7258511417ba7989ad3/duckdb-1.1.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e4ef7ba97a65bd39d66f2a7080e6fb60e7c3e41d4c1e19245f90f53b98e3ac32", size = 18296560 }, + { url = "https://files.pythonhosted.org/packages/1a/32/011e6e3ce14375a1ba01a588c119ad82be757f847c6b60207e0762d9ec3a/duckdb-1.1.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f58db1b65593ff796c8ea6e63e2e144c944dd3d51c8d8e40dffa7f41693d35d3", size = 21635270 }, + { url = "https://files.pythonhosted.org/packages/f2/eb/58d4e0eccdc7b3523c062d008ad9eef28edccf88591d1a78659c809fe6e8/duckdb-1.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:e86006958e84c5c02f08f9b96f4bc26990514eab329b1b4f71049b3727ce5989", size = 10955715 }, + { url = "https://files.pythonhosted.org/packages/e5/c4/8a0f629aadfa8e09574e70ceb2d4fa2e81dc36b67d353806e14474983403/duckdb-1.1.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:09c68522c30fc38fc972b8a75e9201616b96ae6da3444585f14cf0d116008c95", size = 15470008 }, + { url = "https://files.pythonhosted.org/packages/be/0c/9f85e133c2b84f87c70fc29cf89289f65602494f15304b392d82cb76aec4/duckdb-1.1.3-cp39-cp39-macosx_12_0_universal2.whl", hash = "sha256:8ee97ec337794c162c0638dda3b4a30a483d0587deda22d45e1909036ff0b739", size = 32312989 }, + { url = "https://files.pythonhosted.org/packages/1a/ff/6abd85726dcb4df11c405f80038c0959df3a08d1c4dd6f36c046c8587e10/duckdb-1.1.3-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:a1f83c7217c188b7ab42e6a0963f42070d9aed114f6200e3c923c8899c090f16", size = 16931410 }, + { url = "https://files.pythonhosted.org/packages/13/b1/478ceb0228fab92c1f6dd24c7bf0dcbbfd5c5ed690eb0492e72edc2cda0f/duckdb-1.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1aa3abec8e8995a03ff1a904b0e66282d19919f562dd0a1de02f23169eeec461", size = 18492142 }, + { url = "https://files.pythonhosted.org/packages/e3/9e/e3995491d4c3bc6b3e3e0f3bad55902225c09f571e296c1eb093f33c5c75/duckdb-1.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80158f4c7c7ada46245837d5b6869a336bbaa28436fbb0537663fa324a2750cd", size = 20144252 }, + { url = "https://files.pythonhosted.org/packages/53/16/c79fe2111451f85c4c08b1d3e09da4e0b0bf67095fb5908da497ed1e87d8/duckdb-1.1.3-cp39-cp39-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:647f17bd126170d96a38a9a6f25fca47ebb0261e5e44881e3782989033c94686", size = 18288990 }, + { url = "https://files.pythonhosted.org/packages/5a/ce/6cd14acc799501c44bbc0617a8fbc6769acd145a6aef0fc49bba9399fd8b/duckdb-1.1.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:252d9b17d354beb9057098d4e5d5698e091a4f4a0d38157daeea5fc0ec161670", size = 21599071 }, + { url = "https://files.pythonhosted.org/packages/13/31/071c1ee0457caa93414b12c4204059823cbc20cf8ed4099a3e54919ea015/duckdb-1.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:eeacb598120040e9591f5a4edecad7080853aa8ac27e62d280f151f8c862afa3", size = 10988880 }, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/09/35/2495c4ac46b980e4ca1f6ad6db102322ef3ad2410b79fdde159a4b0f3b92/exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc", size = 28883 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b", size = 16453 }, +] + +[[package]] +name = "feedparser" +version = "6.0.11" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sgmllib3k" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ff/aa/7af346ebeb42a76bf108027fe7f3328bb4e57a3a96e53e21fd9ef9dd6dd0/feedparser-6.0.11.tar.gz", hash = "sha256:c9d0407b64c6f2a065d0ebb292c2b35c01050cc0dc33757461aaabdc4c4184d5", size = 286197 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/d4/8c31aad9cc18f451c49f7f9cfb5799dadffc88177f7917bc90a66459b1d7/feedparser-6.0.11-py3-none-any.whl", hash = "sha256:0be7ee7b395572b19ebeb1d6aafb0028dee11169f1c934e0ed67d54992f4ad45", size = 81343 }, +] + +[[package]] +name = "filelock" +version = "3.16.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/db/3ef5bb276dae18d6ec2124224403d1d67bccdbefc17af4cc8f553e341ab1/filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435", size = 18037 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/f8/feced7779d755758a52d1f6635d990b8d98dc0a29fa568bbe0625f18fdf3/filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0", size = 16163 }, +] + +[[package]] +name = "fonttools" +version = "4.55.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/76/61/a300d1574dc381393424047c0396a0e213db212e28361123af9830d71a8d/fonttools-4.55.3.tar.gz", hash = "sha256:3983313c2a04d6cc1fe9251f8fc647754cf49a61dac6cb1e7249ae67afaafc45", size = 3498155 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/f3/9ac8c6705e4a0ff3c29e524df1caeee6f2987b02fb630129f21cc99a8212/fonttools-4.55.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1dcc07934a2165ccdc3a5a608db56fb3c24b609658a5b340aee4ecf3ba679dc0", size = 2769857 }, + { url = "https://files.pythonhosted.org/packages/d8/24/e8b8edd280bdb7d0ecc88a5d952b1dec2ee2335be71cc5a33c64871cdfe8/fonttools-4.55.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f7d66c15ba875432a2d2fb419523f5d3d347f91f48f57b8b08a2dfc3c39b8a3f", size = 2299705 }, + { url = "https://files.pythonhosted.org/packages/f8/9e/e1ba20bd3b71870207fd45ca3b90208a7edd8ae3b001081dc31c45adb017/fonttools-4.55.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e4ae3592e62eba83cd2c4ccd9462dcfa603ff78e09110680a5444c6925d841", size = 4576104 }, + { url = "https://files.pythonhosted.org/packages/34/db/d423bc646e6703fe3e6aea0edd22a2df47b9d188c5f7f1b49070be4d2205/fonttools-4.55.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62d65a3022c35e404d19ca14f291c89cc5890032ff04f6c17af0bd1927299674", size = 4618282 }, + { url = "https://files.pythonhosted.org/packages/75/a0/e5062ac960a385b984ba74e7b55132e7f2c65e449e8330ab0f595407a3de/fonttools-4.55.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d342e88764fb201286d185093781bf6628bbe380a913c24adf772d901baa8276", size = 4570539 }, + { url = "https://files.pythonhosted.org/packages/1f/33/0d744ff518ebe50020b63e5018b8b278efd6a930c1d2eedda7defc42153b/fonttools-4.55.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:dd68c87a2bfe37c5b33bcda0fba39b65a353876d3b9006fde3adae31f97b3ef5", size = 4742411 }, + { url = "https://files.pythonhosted.org/packages/7e/6c/2f768652dba6b801f1567fc5d1829cda369bcd6e95e315a91e628f91c702/fonttools-4.55.3-cp310-cp310-win32.whl", hash = "sha256:1bc7ad24ff98846282eef1cbeac05d013c2154f977a79886bb943015d2b1b261", size = 2175132 }, + { url = "https://files.pythonhosted.org/packages/19/d1/4dcd865360fb2c499749a913fe80e41c26e8ae18629d87dfffa3de27e831/fonttools-4.55.3-cp310-cp310-win_amd64.whl", hash = "sha256:b54baf65c52952db65df39fcd4820668d0ef4766c0ccdf32879b77f7c804d5c5", size = 2219430 }, + { url = "https://files.pythonhosted.org/packages/4b/18/14be25545600bd100e5b74a3ac39089b7c1cb403dc513b7ca348be3381bf/fonttools-4.55.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8c4491699bad88efe95772543cd49870cf756b019ad56294f6498982408ab03e", size = 2771005 }, + { url = "https://files.pythonhosted.org/packages/b2/51/2e1a5d3871cd7c2ae2054b54e92604e7d6abc3fd3656e9583c399648fe1c/fonttools-4.55.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5323a22eabddf4b24f66d26894f1229261021dacd9d29e89f7872dd8c63f0b8b", size = 2300654 }, + { url = "https://files.pythonhosted.org/packages/73/1a/50109bb2703bc6f774b52ea081db21edf2a9fa4b6d7485faadf9d1b997e9/fonttools-4.55.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5480673f599ad410695ca2ddef2dfefe9df779a9a5cda89503881e503c9c7d90", size = 4877541 }, + { url = "https://files.pythonhosted.org/packages/5d/52/c0b9857fa075da1b8806c5dc2d8342918a8cc2065fd14fbddb3303282693/fonttools-4.55.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da9da6d65cd7aa6b0f806556f4985bcbf603bf0c5c590e61b43aa3e5a0f822d0", size = 4906304 }, + { url = "https://files.pythonhosted.org/packages/0b/1b/55f85c7e962d295e456d5209581c919620ee3e877b95cd86245187a5050f/fonttools-4.55.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e894b5bd60d9f473bed7a8f506515549cc194de08064d829464088d23097331b", size = 4888087 }, + { url = "https://files.pythonhosted.org/packages/83/13/6f2809c612ea2ac51391f92468ff861c63473601530fca96458b453212bf/fonttools-4.55.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:aee3b57643827e237ff6ec6d28d9ff9766bd8b21e08cd13bff479e13d4b14765", size = 5056958 }, + { url = "https://files.pythonhosted.org/packages/c1/28/d0ea9e872fa4208b9dfca686e1dd9ca22f6c9ef33ecff2f0ebc2dbe7c29b/fonttools-4.55.3-cp311-cp311-win32.whl", hash = "sha256:eb6ca911c4c17eb51853143624d8dc87cdcdf12a711fc38bf5bd21521e79715f", size = 2173939 }, + { url = "https://files.pythonhosted.org/packages/be/36/d74ae1020bc41a1dff3e6f5a99f646563beecb97e386d27abdac3ba07650/fonttools-4.55.3-cp311-cp311-win_amd64.whl", hash = "sha256:6314bf82c54c53c71805318fcf6786d986461622dd926d92a465199ff54b1b72", size = 2220363 }, + { url = "https://files.pythonhosted.org/packages/89/58/fbcf5dff7e3ea844bb00c4d806ca1e339e1f2dce5529633bf4842c0c9a1f/fonttools-4.55.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f9e736f60f4911061235603a6119e72053073a12c6d7904011df2d8fad2c0e35", size = 2765380 }, + { url = "https://files.pythonhosted.org/packages/81/dd/da6e329e51919b4f421c8738f3497e2ab08c168e76aaef7b6d5351862bdf/fonttools-4.55.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7a8aa2c5e5b8b3bcb2e4538d929f6589a5c6bdb84fd16e2ed92649fb5454f11c", size = 2297940 }, + { url = "https://files.pythonhosted.org/packages/00/44/f5ee560858425c99ef07e04919e736db09d6416408e5a8d3bbfb4a6623fd/fonttools-4.55.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07f8288aacf0a38d174445fc78377a97fb0b83cfe352a90c9d9c1400571963c7", size = 4793327 }, + { url = "https://files.pythonhosted.org/packages/24/da/0a001926d791c55e29ac3c52964957a20dbc1963615446b568b7432891c3/fonttools-4.55.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8d5e8916c0970fbc0f6f1bece0063363bb5857a7f170121a4493e31c3db3314", size = 4865624 }, + { url = "https://files.pythonhosted.org/packages/3d/d8/1edd8b13a427a9fb6418373437caa586c0caa57f260af8e0548f4d11e340/fonttools-4.55.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ae3b6600565b2d80b7c05acb8e24d2b26ac407b27a3f2e078229721ba5698427", size = 4774166 }, + { url = "https://files.pythonhosted.org/packages/9c/ec/ade054097976c3d6debc9032e09a351505a0196aa5493edf021be376f75e/fonttools-4.55.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:54153c49913f45065c8d9e6d0c101396725c5621c8aee744719300f79771d75a", size = 5001832 }, + { url = "https://files.pythonhosted.org/packages/e2/cd/233f0e31ad799bb91fc78099c8b4e5ec43b85a131688519640d6bae46f6a/fonttools-4.55.3-cp312-cp312-win32.whl", hash = "sha256:827e95fdbbd3e51f8b459af5ea10ecb4e30af50221ca103bea68218e9615de07", size = 2162228 }, + { url = "https://files.pythonhosted.org/packages/46/45/a498b5291f6c0d91b2394b1ed7447442a57d1c9b9cf8f439aee3c316a56e/fonttools-4.55.3-cp312-cp312-win_amd64.whl", hash = "sha256:e6e8766eeeb2de759e862004aa11a9ea3d6f6d5ec710551a88b476192b64fd54", size = 2209118 }, + { url = "https://files.pythonhosted.org/packages/7c/2e/3481bd2f8a501146ceb60fa98e54ac91d3589d22dd0b3605d26d65280f3f/fonttools-4.55.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bdcc9f04b36c6c20978d3f060e5323a43f6222accc4e7fcbef3f428e216d96af", size = 2772598 }, + { url = "https://files.pythonhosted.org/packages/ea/29/532e428003bdd98f728aff4a48ae91d6ba44eb0b7b4f30e746ad18bf56b6/fonttools-4.55.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c3ca99e0d460eff46e033cd3992a969658c3169ffcd533e0a39c63a38beb6831", size = 2300985 }, + { url = "https://files.pythonhosted.org/packages/a4/51/738a32b3f4968b614657d945c5574897c773e5efeae48bdf965480fbd963/fonttools-4.55.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22f38464daa6cdb7b6aebd14ab06609328fe1e9705bb0fcc7d1e69de7109ee02", size = 4579674 }, + { url = "https://files.pythonhosted.org/packages/6b/77/9893aa413e7d839e292685bae0749a319eacba9470c85253529d35248075/fonttools-4.55.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed63959d00b61959b035c7d47f9313c2c1ece090ff63afea702fe86de00dbed4", size = 4627321 }, + { url = "https://files.pythonhosted.org/packages/5d/5b/1f5f813264e7b09f50fdc2ba4cd20ad7136d671c70f022be41e0ad6a3abb/fonttools-4.55.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5e8d657cd7326eeaba27de2740e847c6b39dde2f8d7cd7cc56f6aad404ddf0bd", size = 4573730 }, + { url = "https://files.pythonhosted.org/packages/a7/bf/a68606cbc37b04d604799e039614f145fbf267cb46c46353c647b6471d75/fonttools-4.55.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:fb594b5a99943042c702c550d5494bdd7577f6ef19b0bc73877c948a63184a32", size = 4742326 }, + { url = "https://files.pythonhosted.org/packages/bb/c0/2779d167e6b8def68f31cf70e077e91afba10a46e330f22cc3ee206086d1/fonttools-4.55.3-cp39-cp39-win32.whl", hash = "sha256:dc5294a3d5c84226e3dbba1b6f61d7ad813a8c0238fceea4e09aa04848c3d851", size = 2175672 }, + { url = "https://files.pythonhosted.org/packages/e3/05/b0ddd91049475035cc341d1e6fa88a04340f2ef6c38192ca45e4e2b3331f/fonttools-4.55.3-cp39-cp39-win_amd64.whl", hash = "sha256:aedbeb1db64496d098e6be92b2e63b5fac4e53b1b92032dfc6988e1ea9134a4d", size = 2219942 }, + { url = "https://files.pythonhosted.org/packages/99/3b/406d17b1f63e04a82aa621936e6e1c53a8c05458abd66300ac85ea7f9ae9/fonttools-4.55.3-py3-none-any.whl", hash = "sha256:f412604ccbeee81b091b420272841e5ec5ef68967a9790e80bffd0e30b8e2977", size = 1111638 }, +] + +[[package]] +name = "gitdb" +version = "4.0.11" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "smmap" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/0d/bbb5b5ee188dec84647a4664f3e11b06ade2bde568dbd489d9d64adef8ed/gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b", size = 394469 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/5b/8f0c4a5bb9fd491c277c21eff7ccae71b47d43c4446c9d0c6cff2fe8c2c4/gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4", size = 62721 }, +] + +[[package]] +name = "gitpython" +version = "3.1.43" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "gitdb" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b6/a1/106fd9fa2dd989b6fb36e5893961f82992cf676381707253e0bf93eb1662/GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c", size = 214149 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/bd/cc3a402a6439c15c3d4294333e13042b915bbeab54edc457c723931fed3f/GitPython-3.1.43-py3-none-any.whl", hash = "sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff", size = 207337 }, +] + +[[package]] +name = "h11" +version = "0.14.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259 }, +] + +[[package]] +name = "htmlmin" +version = "0.1.12" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b3/e7/fcd59e12169de19f0131ff2812077f964c6b960e7c09804d30a7bf2ab461/htmlmin-0.1.12.tar.gz", hash = "sha256:50c1ef4630374a5d723900096a961cff426dff46b48f34d194a81bbe14eca178", size = 19940 } + +[[package]] +name = "httpcore" +version = "1.0.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6a/41/d7d0a89eb493922c37d343b607bc1b5da7f5be7e383740b4753ad8943e90/httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c", size = 85196 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/f5/72347bc88306acb359581ac4d52f23c0ef445b57157adedb9aee0cd689d2/httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd", size = 78551 }, +] + +[[package]] +name = "httpx" +version = "0.28.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "certifi" }, + { name = "httpcore" }, + { name = "idna" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517 }, +] + +[[package]] +name = "identify" +version = "2.6.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/49/a5/7de3053524ee006b91099968d7ecb2e0b420f7ae728094394c33e8a2a2b9/identify-2.6.4.tar.gz", hash = "sha256:285a7d27e397652e8cafe537a6cc97dd470a970f48fb2e9d979aa38eae5513ac", size = 99209 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/9d/52f036403ae86474804f699c0d084b4b071e333a390b20269bb8accc65e0/identify-2.6.4-py2.py3-none-any.whl", hash = "sha256:993b0f01b97e0568c179bb9196391ff391bfb88a99099dbf5ce392b68f42d0af", size = 99072 }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, +] + +[[package]] +name = "imagehash" +version = "4.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "pillow" }, + { name = "pywavelets", version = "1.6.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "pywavelets", version = "1.8.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "scipy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6c/f4/9821fe373a4788bca43f00491b008f930de0b12a60ff631852d1f984b966/ImageHash-4.3.1.tar.gz", hash = "sha256:7038d1b7f9e0585beb3dd8c0a956f02b95a346c0b5f24a9e8cc03ebadaf0aa70", size = 296989 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2d/b4/19a746a986c6e38595fa5947c028b1b8e287773dcad766e648897ad2a4cf/ImageHash-4.3.1-py2.py3-none-any.whl", hash = "sha256:5ad9a5cde14fe255745a8245677293ac0d67f09c330986a351f34b614ba62fb5", size = 296543 }, +] + +[[package]] +name = "importlib-metadata" +version = "6.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ee/eb/58c2ab27ee628ad801f56d4017fe62afab0293116f6d0b08f1d5bd46e06f/importlib_metadata-6.11.0.tar.gz", hash = "sha256:1231cf92d825c9e03cfc4da076a16de6422c863558229ea0b22b675657463443", size = 54593 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/59/9b/ecce94952ab5ea74c31dcf9ccf78ccd484eebebef06019bf8cb579ab4519/importlib_metadata-6.11.0-py3-none-any.whl", hash = "sha256:f0afba6205ad8f8947c7d338b5342d5db2afbfd82f9cbef7879a9539cc12eb9b", size = 23427 }, +] + +[[package]] +name = "importlib-resources" +version = "6.4.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/98/be/f3e8c6081b684f176b761e6a2fef02a0be939740ed6f54109a2951d806f3/importlib_resources-6.4.5.tar.gz", hash = "sha256:980862a1d16c9e147a59603677fa2aa5fd82b87f223b6cb870695bcfce830065", size = 43372 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e1/6a/4604f9ae2fa62ef47b9de2fa5ad599589d28c9fd1d335f32759813dfa91e/importlib_resources-6.4.5-py3-none-any.whl", hash = "sha256:ac29d5f956f01d5e4bb63102a5a19957f1b9175e45649977264a1416783bb717", size = 36115 }, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892 }, +] + +[[package]] +name = "isodate" +version = "0.6.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/db/7a/c0a56c7d56c7fa723988f122fa1f1ccf8c5c4ccc48efad0d214b49e5b1af/isodate-0.6.1.tar.gz", hash = "sha256:48c5881de7e8b0a0d648cb024c8062dc84e7b840ed81e864c7614fd3c127bde9", size = 28443 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b6/85/7882d311924cbcfc70b1890780763e36ff0b140c7e51c110fc59a532f087/isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96", size = 41722 }, +] + +[[package]] +name = "isort" +version = "5.13.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/87/f9/c1eb8635a24e87ade2efce21e3ce8cd6b8630bb685ddc9cdaca1349b2eb5/isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109", size = 175303 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/b3/8def84f539e7d2289a02f0524b944b15d7c75dab7628bedf1c4f0992029c/isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6", size = 92310 }, +] + +[[package]] +name = "jinja2" +version = "3.1.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/af/92/b3130cbbf5591acf9ade8708c365f3238046ac7cb8ccba6e81abccb0ccff/jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb", size = 244674 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bd/0f/2ba5fbcd631e3e88689309dbe978c5769e883e4b84ebfe7da30b43275c5a/jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb", size = 134596 }, +] + +[[package]] +name = "jiter" +version = "0.8.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/70/90bc7bd3932e651486861df5c8ffea4ca7c77d28e8532ddefe2abc561a53/jiter-0.8.2.tar.gz", hash = "sha256:cd73d3e740666d0e639f678adb176fad25c1bcbdae88d8d7b857e1783bb4212d", size = 163007 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f2/f3/8c11e0e87bd5934c414f9b1cfae3cbfd4a938d4669d57cb427e1c4d11a7f/jiter-0.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ca8577f6a413abe29b079bc30f907894d7eb07a865c4df69475e868d73e71c7b", size = 303381 }, + { url = "https://files.pythonhosted.org/packages/ea/28/4cd3f0bcbf40e946bc6a62a82c951afc386a25673d3d8d5ee461f1559bbe/jiter-0.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b25bd626bde7fb51534190c7e3cb97cee89ee76b76d7585580e22f34f5e3f393", size = 311718 }, + { url = "https://files.pythonhosted.org/packages/0d/17/57acab00507e60bd954eaec0837d9d7b119b4117ff49b8a62f2b646f32ed/jiter-0.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5c826a221851a8dc028eb6d7d6429ba03184fa3c7e83ae01cd6d3bd1d4bd17d", size = 335465 }, + { url = "https://files.pythonhosted.org/packages/74/b9/1a3ddd2bc95ae17c815b021521020f40c60b32137730126bada962ef32b4/jiter-0.8.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d35c864c2dff13dfd79fb070fc4fc6235d7b9b359efe340e1261deb21b9fcb66", size = 355570 }, + { url = "https://files.pythonhosted.org/packages/78/69/6d29e2296a934199a7d0dde673ecccf98c9c8db44caf0248b3f2b65483cb/jiter-0.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f557c55bc2b7676e74d39d19bcb8775ca295c7a028246175d6a8b431e70835e5", size = 381383 }, + { url = "https://files.pythonhosted.org/packages/22/d7/fbc4c3fb1bf65f9be22a32759b539f88e897aeb13fe84ab0266e4423487a/jiter-0.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:580ccf358539153db147e40751a0b41688a5ceb275e6f3e93d91c9467f42b2e3", size = 390454 }, + { url = "https://files.pythonhosted.org/packages/4d/a0/3993cda2e267fe679b45d0bcc2cef0b4504b0aa810659cdae9737d6bace9/jiter-0.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af102d3372e917cffce49b521e4c32c497515119dc7bd8a75665e90a718bbf08", size = 345039 }, + { url = "https://files.pythonhosted.org/packages/b9/ef/69c18562b4c09ce88fab5df1dcaf643f6b1a8b970b65216e7221169b81c4/jiter-0.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cadcc978f82397d515bb2683fc0d50103acff2a180552654bb92d6045dec2c49", size = 376200 }, + { url = "https://files.pythonhosted.org/packages/4d/17/0b5a8de46a6ab4d836f70934036278b49b8530c292b29dde3483326d4555/jiter-0.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ba5bdf56969cad2019d4e8ffd3f879b5fdc792624129741d3d83fc832fef8c7d", size = 511158 }, + { url = "https://files.pythonhosted.org/packages/6c/b2/c401a0a2554b36c9e6d6e4876b43790d75139cf3936f0222e675cbc23451/jiter-0.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3b94a33a241bee9e34b8481cdcaa3d5c2116f575e0226e421bed3f7a6ea71cff", size = 503956 }, + { url = "https://files.pythonhosted.org/packages/d4/02/a0291ed7d72c0ac130f172354ee3cf0b2556b69584de391463a8ee534f40/jiter-0.8.2-cp310-cp310-win32.whl", hash = "sha256:6e5337bf454abddd91bd048ce0dca5134056fc99ca0205258766db35d0a2ea43", size = 202846 }, + { url = "https://files.pythonhosted.org/packages/ad/20/8c988831ae4bf437e29f1671e198fc99ba8fe49f2895f23789acad1d1811/jiter-0.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:4a9220497ca0cb1fe94e3f334f65b9b5102a0b8147646118f020d8ce1de70105", size = 204414 }, + { url = "https://files.pythonhosted.org/packages/cb/b0/c1a7caa7f9dc5f1f6cfa08722867790fe2d3645d6e7170ca280e6e52d163/jiter-0.8.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2dd61c5afc88a4fda7d8b2cf03ae5947c6ac7516d32b7a15bf4b49569a5c076b", size = 303666 }, + { url = "https://files.pythonhosted.org/packages/f5/97/0468bc9eeae43079aaa5feb9267964e496bf13133d469cfdc135498f8dd0/jiter-0.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a6c710d657c8d1d2adbbb5c0b0c6bfcec28fd35bd6b5f016395f9ac43e878a15", size = 311934 }, + { url = "https://files.pythonhosted.org/packages/e5/69/64058e18263d9a5f1e10f90c436853616d5f047d997c37c7b2df11b085ec/jiter-0.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9584de0cd306072635fe4b89742bf26feae858a0683b399ad0c2509011b9dc0", size = 335506 }, + { url = "https://files.pythonhosted.org/packages/9d/14/b747f9a77b8c0542141d77ca1e2a7523e854754af2c339ac89a8b66527d6/jiter-0.8.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5a90a923338531b7970abb063cfc087eebae6ef8ec8139762007188f6bc69a9f", size = 355849 }, + { url = "https://files.pythonhosted.org/packages/53/e2/98a08161db7cc9d0e39bc385415890928ff09709034982f48eccfca40733/jiter-0.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21974d246ed0181558087cd9f76e84e8321091ebfb3a93d4c341479a736f099", size = 381700 }, + { url = "https://files.pythonhosted.org/packages/7a/38/1674672954d35bce3b1c9af99d5849f9256ac8f5b672e020ac7821581206/jiter-0.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32475a42b2ea7b344069dc1e81445cfc00b9d0e3ca837f0523072432332e9f74", size = 389710 }, + { url = "https://files.pythonhosted.org/packages/f8/9b/92f9da9a9e107d019bcf883cd9125fa1690079f323f5a9d5c6986eeec3c0/jiter-0.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b9931fd36ee513c26b5bf08c940b0ac875de175341cbdd4fa3be109f0492586", size = 345553 }, + { url = "https://files.pythonhosted.org/packages/44/a6/6d030003394e9659cd0d7136bbeabd82e869849ceccddc34d40abbbbb269/jiter-0.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ce0820f4a3a59ddced7fce696d86a096d5cc48d32a4183483a17671a61edfddc", size = 376388 }, + { url = "https://files.pythonhosted.org/packages/ad/8d/87b09e648e4aca5f9af89e3ab3cfb93db2d1e633b2f2931ede8dabd9b19a/jiter-0.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8ffc86ae5e3e6a93765d49d1ab47b6075a9c978a2b3b80f0f32628f39caa0c88", size = 511226 }, + { url = "https://files.pythonhosted.org/packages/77/95/8008ebe4cdc82eac1c97864a8042ca7e383ed67e0ec17bfd03797045c727/jiter-0.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5127dc1abd809431172bc3fbe8168d6b90556a30bb10acd5ded41c3cfd6f43b6", size = 504134 }, + { url = "https://files.pythonhosted.org/packages/26/0d/3056a74de13e8b2562e4d526de6dac2f65d91ace63a8234deb9284a1d24d/jiter-0.8.2-cp311-cp311-win32.whl", hash = "sha256:66227a2c7b575720c1871c8800d3a0122bb8ee94edb43a5685aa9aceb2782d44", size = 203103 }, + { url = "https://files.pythonhosted.org/packages/4e/1e/7f96b798f356e531ffc0f53dd2f37185fac60fae4d6c612bbbd4639b90aa/jiter-0.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:cde031d8413842a1e7501e9129b8e676e62a657f8ec8166e18a70d94d4682855", size = 206717 }, + { url = "https://files.pythonhosted.org/packages/a1/17/c8747af8ea4e045f57d6cfd6fc180752cab9bc3de0e8a0c9ca4e8af333b1/jiter-0.8.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e6ec2be506e7d6f9527dae9ff4b7f54e68ea44a0ef6b098256ddf895218a2f8f", size = 302027 }, + { url = "https://files.pythonhosted.org/packages/3c/c1/6da849640cd35a41e91085723b76acc818d4b7d92b0b6e5111736ce1dd10/jiter-0.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76e324da7b5da060287c54f2fabd3db5f76468006c811831f051942bf68c9d44", size = 310326 }, + { url = "https://files.pythonhosted.org/packages/06/99/a2bf660d8ccffee9ad7ed46b4f860d2108a148d0ea36043fd16f4dc37e94/jiter-0.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:180a8aea058f7535d1c84183c0362c710f4750bef66630c05f40c93c2b152a0f", size = 334242 }, + { url = "https://files.pythonhosted.org/packages/a7/5f/cea1c17864828731f11427b9d1ab7f24764dbd9aaf4648a7f851164d2718/jiter-0.8.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025337859077b41548bdcbabe38698bcd93cfe10b06ff66617a48ff92c9aec60", size = 356654 }, + { url = "https://files.pythonhosted.org/packages/e9/13/62774b7e5e7f5d5043efe1d0f94ead66e6d0f894ae010adb56b3f788de71/jiter-0.8.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ecff0dc14f409599bbcafa7e470c00b80f17abc14d1405d38ab02e4b42e55b57", size = 379967 }, + { url = "https://files.pythonhosted.org/packages/ec/fb/096b34c553bb0bd3f2289d5013dcad6074948b8d55212aa13a10d44c5326/jiter-0.8.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ffd9fee7d0775ebaba131f7ca2e2d83839a62ad65e8e02fe2bd8fc975cedeb9e", size = 389252 }, + { url = "https://files.pythonhosted.org/packages/17/61/beea645c0bf398ced8b199e377b61eb999d8e46e053bb285c91c3d3eaab0/jiter-0.8.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14601dcac4889e0a1c75ccf6a0e4baf70dbc75041e51bcf8d0e9274519df6887", size = 345490 }, + { url = "https://files.pythonhosted.org/packages/d5/df/834aa17ad5dcc3cf0118821da0a0cf1589ea7db9832589278553640366bc/jiter-0.8.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92249669925bc1c54fcd2ec73f70f2c1d6a817928480ee1c65af5f6b81cdf12d", size = 376991 }, + { url = "https://files.pythonhosted.org/packages/67/80/87d140399d382fb4ea5b3d56e7ecaa4efdca17cd7411ff904c1517855314/jiter-0.8.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:e725edd0929fa79f8349ab4ec7f81c714df51dc4e991539a578e5018fa4a7152", size = 510822 }, + { url = "https://files.pythonhosted.org/packages/5c/37/3394bb47bac1ad2cb0465601f86828a0518d07828a650722e55268cdb7e6/jiter-0.8.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bf55846c7b7a680eebaf9c3c48d630e1bf51bdf76c68a5f654b8524335b0ad29", size = 503730 }, + { url = "https://files.pythonhosted.org/packages/f9/e2/253fc1fa59103bb4e3aa0665d6ceb1818df1cd7bf3eb492c4dad229b1cd4/jiter-0.8.2-cp312-cp312-win32.whl", hash = "sha256:7efe4853ecd3d6110301665a5178b9856be7e2a9485f49d91aa4d737ad2ae49e", size = 203375 }, + { url = "https://files.pythonhosted.org/packages/41/69/6d4bbe66b3b3b4507e47aa1dd5d075919ad242b4b1115b3f80eecd443687/jiter-0.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:83c0efd80b29695058d0fd2fa8a556490dbce9804eac3e281f373bbc99045f6c", size = 204740 }, + { url = "https://files.pythonhosted.org/packages/c9/b2/ed7fbabd21c3cf556d6ea849cee35c74f13a509e668baad8323091e2867e/jiter-0.8.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:e41e75344acef3fc59ba4765df29f107f309ca9e8eace5baacabd9217e52a5ee", size = 304502 }, + { url = "https://files.pythonhosted.org/packages/75/6e/1386857ac9165c1e9c71031566e7884d8a4f63724ce29ad1ace5bfe1351c/jiter-0.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f22b16b35d5c1df9dfd58843ab2cd25e6bf15191f5a236bed177afade507bfc", size = 300982 }, + { url = "https://files.pythonhosted.org/packages/56/4c/b413977c20bbb359b4d6c91d04f7f36fc525af0b7778119815477fc97242/jiter-0.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f7200b8f7619d36aa51c803fd52020a2dfbea36ffec1b5e22cab11fd34d95a6d", size = 335344 }, + { url = "https://files.pythonhosted.org/packages/b0/59/51b080519938192edd33b4e8d48adb7e9bf9e0d699ec8b91119b9269fc75/jiter-0.8.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:70bf4c43652cc294040dbb62256c83c8718370c8b93dd93d934b9a7bf6c4f53c", size = 356298 }, + { url = "https://files.pythonhosted.org/packages/72/bb/828db5ea406916d7b2232be31393f782b0f71bcb0b128750c4a028157565/jiter-0.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9d471356dc16f84ed48768b8ee79f29514295c7295cb41e1133ec0b2b8d637d", size = 381703 }, + { url = "https://files.pythonhosted.org/packages/c0/88/45d33a8728733e161e9783c54d8ecca0fc4c1aa74b1cebea1d97917eddc3/jiter-0.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:859e8eb3507894093d01929e12e267f83b1d5f6221099d3ec976f0c995cb6bd9", size = 391281 }, + { url = "https://files.pythonhosted.org/packages/45/3e/142712e0f45c28ad8a678dc8732a78294ce5a36fc694141f772bb827a8f2/jiter-0.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaa58399c01db555346647a907b4ef6d4f584b123943be6ed5588c3f2359c9f4", size = 345553 }, + { url = "https://files.pythonhosted.org/packages/36/42/9b463b59fd22687b6da1afcad6c9adc870464a808208651de73f1dbeda09/jiter-0.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8f2d5ed877f089862f4c7aacf3a542627c1496f972a34d0474ce85ee7d939c27", size = 377063 }, + { url = "https://files.pythonhosted.org/packages/83/b3/44b1f5cd2e4eb15757eec341b25399da4c90515bb881ef6636b50a8c08a5/jiter-0.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:03c9df035d4f8d647f8c210ddc2ae0728387275340668fb30d2421e17d9a0841", size = 512543 }, + { url = "https://files.pythonhosted.org/packages/46/4e/c695c803aa2b668c057b2dea1cdd7a884d1a819ce610cec0be9666210bfd/jiter-0.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8bd2a824d08d8977bb2794ea2682f898ad3d8837932e3a74937e93d62ecbb637", size = 505141 }, + { url = "https://files.pythonhosted.org/packages/8e/51/e805b837db056f872db0b7a7a3610b7d764392be696dbe47afa0bea05bf2/jiter-0.8.2-cp39-cp39-win32.whl", hash = "sha256:ca29b6371ebc40e496995c94b988a101b9fbbed48a51190a4461fcb0a68b4a36", size = 203529 }, + { url = "https://files.pythonhosted.org/packages/32/b7/a3cde72c644fd1caf9da07fb38cf2c130f43484d8f91011940b7c4f42c8f/jiter-0.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:1c0dfbd1be3cbefc7510102370d86e35d1d53e5a93d48519688b1bf0f761160a", size = 207527 }, +] + +[[package]] +name = "joblib" +version = "1.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/64/33/60135848598c076ce4b231e1b1895170f45fbcaeaa2c9d5e38b04db70c35/joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e", size = 2116621 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/29/df4b9b42f2be0b623cbd5e2140cafcaa2bef0759a00b7b70104dcfe2fb51/joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6", size = 301817 }, +] + +[[package]] +name = "jsonschema" +version = "4.23.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "jsonschema-specifications" }, + { name = "referencing" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/2e/03362ee4034a4c917f697890ccd4aec0800ccf9ded7f511971c75451deec/jsonschema-4.23.0.tar.gz", hash = "sha256:d71497fef26351a33265337fa77ffeb82423f3ea21283cd9467bb03999266bc4", size = 325778 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/4a/4f9dbeb84e8850557c02365a0eee0649abe5eb1d84af92a25731c6c0f922/jsonschema-4.23.0-py3-none-any.whl", hash = "sha256:fbadb6f8b144a8f8cf9f0b89ba94501d143e50411a1278633f56a7acf7fd5566", size = 88462 }, +] + +[[package]] +name = "jsonschema-specifications" +version = "2024.10.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "referencing" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/10/db/58f950c996c793472e336ff3655b13fbcf1e3b359dcf52dcf3ed3b52c352/jsonschema_specifications-2024.10.1.tar.gz", hash = "sha256:0f38b83639958ce1152d02a7f062902c41c8fd20d558b0c34344292d417ae272", size = 15561 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/0f/8910b19ac0670a0f80ce1008e5e751c4a57e14d2c4c13a482aa6079fa9d6/jsonschema_specifications-2024.10.1-py3-none-any.whl", hash = "sha256:a09a0680616357d9a0ecf05c12ad234479f549239d0f5b55f3deea67475da9bf", size = 18459 }, +] + +[[package]] +name = "kiwisolver" +version = "1.4.7" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/85/4d/2255e1c76304cbd60b48cee302b66d1dde4468dc5b1160e4b7cb43778f2a/kiwisolver-1.4.7.tar.gz", hash = "sha256:9893ff81bd7107f7b685d3017cc6583daadb4fc26e4a888350df530e41980a60", size = 97286 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/97/14/fc943dd65268a96347472b4fbe5dcc2f6f55034516f80576cd0dd3a8930f/kiwisolver-1.4.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8a9c83f75223d5e48b0bc9cb1bf2776cf01563e00ade8775ffe13b0b6e1af3a6", size = 122440 }, + { url = "https://files.pythonhosted.org/packages/1e/46/e68fed66236b69dd02fcdb506218c05ac0e39745d696d22709498896875d/kiwisolver-1.4.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:58370b1ffbd35407444d57057b57da5d6549d2d854fa30249771775c63b5fe17", size = 65758 }, + { url = "https://files.pythonhosted.org/packages/ef/fa/65de49c85838681fc9cb05de2a68067a683717321e01ddafb5b8024286f0/kiwisolver-1.4.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:aa0abdf853e09aff551db11fce173e2177d00786c688203f52c87ad7fcd91ef9", size = 64311 }, + { url = "https://files.pythonhosted.org/packages/42/9c/cc8d90f6ef550f65443bad5872ffa68f3dee36de4974768628bea7c14979/kiwisolver-1.4.7-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8d53103597a252fb3ab8b5845af04c7a26d5e7ea8122303dd7a021176a87e8b9", size = 1637109 }, + { url = "https://files.pythonhosted.org/packages/55/91/0a57ce324caf2ff5403edab71c508dd8f648094b18cfbb4c8cc0fde4a6ac/kiwisolver-1.4.7-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:88f17c5ffa8e9462fb79f62746428dd57b46eb931698e42e990ad63103f35e6c", size = 1617814 }, + { url = "https://files.pythonhosted.org/packages/12/5d/c36140313f2510e20207708adf36ae4919416d697ee0236b0ddfb6fd1050/kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88a9ca9c710d598fd75ee5de59d5bda2684d9db36a9f50b6125eaea3969c2599", size = 1400881 }, + { url = "https://files.pythonhosted.org/packages/56/d0/786e524f9ed648324a466ca8df86298780ef2b29c25313d9a4f16992d3cf/kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f4d742cb7af1c28303a51b7a27aaee540e71bb8e24f68c736f6f2ffc82f2bf05", size = 1512972 }, + { url = "https://files.pythonhosted.org/packages/67/5a/77851f2f201e6141d63c10a0708e996a1363efaf9e1609ad0441b343763b/kiwisolver-1.4.7-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e28c7fea2196bf4c2f8d46a0415c77a1c480cc0724722f23d7410ffe9842c407", size = 1444787 }, + { url = "https://files.pythonhosted.org/packages/06/5f/1f5eaab84355885e224a6fc8d73089e8713dc7e91c121f00b9a1c58a2195/kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e968b84db54f9d42046cf154e02911e39c0435c9801681e3fc9ce8a3c4130278", size = 2199212 }, + { url = "https://files.pythonhosted.org/packages/b5/28/9152a3bfe976a0ae21d445415defc9d1cd8614b2910b7614b30b27a47270/kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0c18ec74c0472de033e1bebb2911c3c310eef5649133dd0bedf2a169a1b269e5", size = 2346399 }, + { url = "https://files.pythonhosted.org/packages/26/f6/453d1904c52ac3b400f4d5e240ac5fec25263716723e44be65f4d7149d13/kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8f0ea6da6d393d8b2e187e6a5e3fb81f5862010a40c3945e2c6d12ae45cfb2ad", size = 2308688 }, + { url = "https://files.pythonhosted.org/packages/5a/9a/d4968499441b9ae187e81745e3277a8b4d7c60840a52dc9d535a7909fac3/kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:f106407dda69ae456dd1227966bf445b157ccc80ba0dff3802bb63f30b74e895", size = 2445493 }, + { url = "https://files.pythonhosted.org/packages/07/c9/032267192e7828520dacb64dfdb1d74f292765f179e467c1cba97687f17d/kiwisolver-1.4.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:84ec80df401cfee1457063732d90022f93951944b5b58975d34ab56bb150dfb3", size = 2262191 }, + { url = "https://files.pythonhosted.org/packages/6c/ad/db0aedb638a58b2951da46ddaeecf204be8b4f5454df020d850c7fa8dca8/kiwisolver-1.4.7-cp310-cp310-win32.whl", hash = "sha256:71bb308552200fb2c195e35ef05de12f0c878c07fc91c270eb3d6e41698c3bcc", size = 46644 }, + { url = "https://files.pythonhosted.org/packages/12/ca/d0f7b7ffbb0be1e7c2258b53554efec1fd652921f10d7d85045aff93ab61/kiwisolver-1.4.7-cp310-cp310-win_amd64.whl", hash = "sha256:44756f9fd339de0fb6ee4f8c1696cfd19b2422e0d70b4cefc1cc7f1f64045a8c", size = 55877 }, + { url = "https://files.pythonhosted.org/packages/97/6c/cfcc128672f47a3e3c0d918ecb67830600078b025bfc32d858f2e2d5c6a4/kiwisolver-1.4.7-cp310-cp310-win_arm64.whl", hash = "sha256:78a42513018c41c2ffd262eb676442315cbfe3c44eed82385c2ed043bc63210a", size = 48347 }, + { url = "https://files.pythonhosted.org/packages/e9/44/77429fa0a58f941d6e1c58da9efe08597d2e86bf2b2cce6626834f49d07b/kiwisolver-1.4.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d2b0e12a42fb4e72d509fc994713d099cbb15ebf1103545e8a45f14da2dfca54", size = 122442 }, + { url = "https://files.pythonhosted.org/packages/e5/20/8c75caed8f2462d63c7fd65e16c832b8f76cda331ac9e615e914ee80bac9/kiwisolver-1.4.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2a8781ac3edc42ea4b90bc23e7d37b665d89423818e26eb6df90698aa2287c95", size = 65762 }, + { url = "https://files.pythonhosted.org/packages/f4/98/fe010f15dc7230f45bc4cf367b012d651367fd203caaa992fd1f5963560e/kiwisolver-1.4.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:46707a10836894b559e04b0fd143e343945c97fd170d69a2d26d640b4e297935", size = 64319 }, + { url = "https://files.pythonhosted.org/packages/8b/1b/b5d618f4e58c0675654c1e5051bcf42c776703edb21c02b8c74135541f60/kiwisolver-1.4.7-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef97b8df011141c9b0f6caf23b29379f87dd13183c978a30a3c546d2c47314cb", size = 1334260 }, + { url = "https://files.pythonhosted.org/packages/b8/01/946852b13057a162a8c32c4c8d2e9ed79f0bb5d86569a40c0b5fb103e373/kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab58c12a2cd0fc769089e6d38466c46d7f76aced0a1f54c77652446733d2d02", size = 1426589 }, + { url = "https://files.pythonhosted.org/packages/70/d1/c9f96df26b459e15cf8a965304e6e6f4eb291e0f7a9460b4ad97b047561e/kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:803b8e1459341c1bb56d1c5c010406d5edec8a0713a0945851290a7930679b51", size = 1541080 }, + { url = "https://files.pythonhosted.org/packages/d3/73/2686990eb8b02d05f3de759d6a23a4ee7d491e659007dd4c075fede4b5d0/kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9a9e8a507420fe35992ee9ecb302dab68550dedc0da9e2880dd88071c5fb052", size = 1470049 }, + { url = "https://files.pythonhosted.org/packages/a7/4b/2db7af3ed3af7c35f388d5f53c28e155cd402a55432d800c543dc6deb731/kiwisolver-1.4.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18077b53dc3bb490e330669a99920c5e6a496889ae8c63b58fbc57c3d7f33a18", size = 1426376 }, + { url = "https://files.pythonhosted.org/packages/05/83/2857317d04ea46dc5d115f0df7e676997bbd968ced8e2bd6f7f19cfc8d7f/kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6af936f79086a89b3680a280c47ea90b4df7047b5bdf3aa5c524bbedddb9e545", size = 2222231 }, + { url = "https://files.pythonhosted.org/packages/0d/b5/866f86f5897cd4ab6d25d22e403404766a123f138bd6a02ecb2cdde52c18/kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:3abc5b19d24af4b77d1598a585b8a719beb8569a71568b66f4ebe1fb0449460b", size = 2368634 }, + { url = "https://files.pythonhosted.org/packages/c1/ee/73de8385403faba55f782a41260210528fe3273d0cddcf6d51648202d6d0/kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:933d4de052939d90afbe6e9d5273ae05fb836cc86c15b686edd4b3560cc0ee36", size = 2329024 }, + { url = "https://files.pythonhosted.org/packages/a1/e7/cd101d8cd2cdfaa42dc06c433df17c8303d31129c9fdd16c0ea37672af91/kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:65e720d2ab2b53f1f72fb5da5fb477455905ce2c88aaa671ff0a447c2c80e8e3", size = 2468484 }, + { url = "https://files.pythonhosted.org/packages/e1/72/84f09d45a10bc57a40bb58b81b99d8f22b58b2040c912b7eb97ebf625bf2/kiwisolver-1.4.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3bf1ed55088f214ba6427484c59553123fdd9b218a42bbc8c6496d6754b1e523", size = 2284078 }, + { url = "https://files.pythonhosted.org/packages/d2/d4/71828f32b956612dc36efd7be1788980cb1e66bfb3706e6dec9acad9b4f9/kiwisolver-1.4.7-cp311-cp311-win32.whl", hash = "sha256:4c00336b9dd5ad96d0a558fd18a8b6f711b7449acce4c157e7343ba92dd0cf3d", size = 46645 }, + { url = "https://files.pythonhosted.org/packages/a1/65/d43e9a20aabcf2e798ad1aff6c143ae3a42cf506754bcb6a7ed8259c8425/kiwisolver-1.4.7-cp311-cp311-win_amd64.whl", hash = "sha256:929e294c1ac1e9f615c62a4e4313ca1823ba37326c164ec720a803287c4c499b", size = 56022 }, + { url = "https://files.pythonhosted.org/packages/35/b3/9f75a2e06f1b4ca00b2b192bc2b739334127d27f1d0625627ff8479302ba/kiwisolver-1.4.7-cp311-cp311-win_arm64.whl", hash = "sha256:e33e8fbd440c917106b237ef1a2f1449dfbb9b6f6e1ce17c94cd6a1e0d438376", size = 48536 }, + { url = "https://files.pythonhosted.org/packages/97/9c/0a11c714cf8b6ef91001c8212c4ef207f772dd84540104952c45c1f0a249/kiwisolver-1.4.7-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:5360cc32706dab3931f738d3079652d20982511f7c0ac5711483e6eab08efff2", size = 121808 }, + { url = "https://files.pythonhosted.org/packages/f2/d8/0fe8c5f5d35878ddd135f44f2af0e4e1d379e1c7b0716f97cdcb88d4fd27/kiwisolver-1.4.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:942216596dc64ddb25adb215c3c783215b23626f8d84e8eff8d6d45c3f29f75a", size = 65531 }, + { url = "https://files.pythonhosted.org/packages/80/c5/57fa58276dfdfa612241d640a64ca2f76adc6ffcebdbd135b4ef60095098/kiwisolver-1.4.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:48b571ecd8bae15702e4f22d3ff6a0f13e54d3d00cd25216d5e7f658242065ee", size = 63894 }, + { url = "https://files.pythonhosted.org/packages/8b/e9/26d3edd4c4ad1c5b891d8747a4f81b1b0aba9fb9721de6600a4adc09773b/kiwisolver-1.4.7-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad42ba922c67c5f219097b28fae965e10045ddf145d2928bfac2eb2e17673640", size = 1369296 }, + { url = "https://files.pythonhosted.org/packages/b6/67/3f4850b5e6cffb75ec40577ddf54f7b82b15269cc5097ff2e968ee32ea7d/kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:612a10bdae23404a72941a0fc8fa2660c6ea1217c4ce0dbcab8a8f6543ea9e7f", size = 1461450 }, + { url = "https://files.pythonhosted.org/packages/52/be/86cbb9c9a315e98a8dc6b1d23c43cffd91d97d49318854f9c37b0e41cd68/kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e838bba3a3bac0fe06d849d29772eb1afb9745a59710762e4ba3f4cb8424483", size = 1579168 }, + { url = "https://files.pythonhosted.org/packages/0f/00/65061acf64bd5fd34c1f4ae53f20b43b0a017a541f242a60b135b9d1e301/kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:22f499f6157236c19f4bbbd472fa55b063db77a16cd74d49afe28992dff8c258", size = 1507308 }, + { url = "https://files.pythonhosted.org/packages/21/e4/c0b6746fd2eb62fe702118b3ca0cb384ce95e1261cfada58ff693aeec08a/kiwisolver-1.4.7-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:693902d433cf585133699972b6d7c42a8b9f8f826ebcaf0132ff55200afc599e", size = 1464186 }, + { url = "https://files.pythonhosted.org/packages/0a/0f/529d0a9fffb4d514f2782c829b0b4b371f7f441d61aa55f1de1c614c4ef3/kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4e77f2126c3e0b0d055f44513ed349038ac180371ed9b52fe96a32aa071a5107", size = 2247877 }, + { url = "https://files.pythonhosted.org/packages/d1/e1/66603ad779258843036d45adcbe1af0d1a889a07af4635f8b4ec7dccda35/kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:657a05857bda581c3656bfc3b20e353c232e9193eb167766ad2dc58b56504948", size = 2404204 }, + { url = "https://files.pythonhosted.org/packages/8d/61/de5fb1ca7ad1f9ab7970e340a5b833d735df24689047de6ae71ab9d8d0e7/kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4bfa75a048c056a411f9705856abfc872558e33c055d80af6a380e3658766038", size = 2352461 }, + { url = "https://files.pythonhosted.org/packages/ba/d2/0edc00a852e369827f7e05fd008275f550353f1f9bcd55db9363d779fc63/kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:34ea1de54beef1c104422d210c47c7d2a4999bdecf42c7b5718fbe59a4cac383", size = 2501358 }, + { url = "https://files.pythonhosted.org/packages/84/15/adc15a483506aec6986c01fb7f237c3aec4d9ed4ac10b756e98a76835933/kiwisolver-1.4.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:90da3b5f694b85231cf93586dad5e90e2d71b9428f9aad96952c99055582f520", size = 2314119 }, + { url = "https://files.pythonhosted.org/packages/36/08/3a5bb2c53c89660863a5aa1ee236912269f2af8762af04a2e11df851d7b2/kiwisolver-1.4.7-cp312-cp312-win32.whl", hash = "sha256:18e0cca3e008e17fe9b164b55735a325140a5a35faad8de92dd80265cd5eb80b", size = 46367 }, + { url = "https://files.pythonhosted.org/packages/19/93/c05f0a6d825c643779fc3c70876bff1ac221f0e31e6f701f0e9578690d70/kiwisolver-1.4.7-cp312-cp312-win_amd64.whl", hash = "sha256:58cb20602b18f86f83a5c87d3ee1c766a79c0d452f8def86d925e6c60fbf7bfb", size = 55884 }, + { url = "https://files.pythonhosted.org/packages/d2/f9/3828d8f21b6de4279f0667fb50a9f5215e6fe57d5ec0d61905914f5b6099/kiwisolver-1.4.7-cp312-cp312-win_arm64.whl", hash = "sha256:f5a8b53bdc0b3961f8b6125e198617c40aeed638b387913bf1ce78afb1b0be2a", size = 48528 }, + { url = "https://files.pythonhosted.org/packages/11/88/37ea0ea64512997b13d69772db8dcdc3bfca5442cda3a5e4bb943652ee3e/kiwisolver-1.4.7-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3f9362ecfca44c863569d3d3c033dbe8ba452ff8eed6f6b5806382741a1334bd", size = 122449 }, + { url = "https://files.pythonhosted.org/packages/4e/45/5a5c46078362cb3882dcacad687c503089263c017ca1241e0483857791eb/kiwisolver-1.4.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e8df2eb9b2bac43ef8b082e06f750350fbbaf2887534a5be97f6cf07b19d9583", size = 65757 }, + { url = "https://files.pythonhosted.org/packages/8a/be/a6ae58978772f685d48dd2e84460937761c53c4bbd84e42b0336473d9775/kiwisolver-1.4.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f32d6edbc638cde7652bd690c3e728b25332acbadd7cad670cc4a02558d9c417", size = 64312 }, + { url = "https://files.pythonhosted.org/packages/f4/04/18ef6f452d311e1e1eb180c9bf5589187fa1f042db877e6fe443ef10099c/kiwisolver-1.4.7-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e2e6c39bd7b9372b0be21456caab138e8e69cc0fc1190a9dfa92bd45a1e6e904", size = 1626966 }, + { url = "https://files.pythonhosted.org/packages/21/b1/40655f6c3fa11ce740e8a964fa8e4c0479c87d6a7944b95af799c7a55dfe/kiwisolver-1.4.7-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dda56c24d869b1193fcc763f1284b9126550eaf84b88bbc7256e15028f19188a", size = 1607044 }, + { url = "https://files.pythonhosted.org/packages/fd/93/af67dbcfb9b3323bbd2c2db1385a7139d8f77630e4a37bb945b57188eb2d/kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79849239c39b5e1fd906556c474d9b0439ea6792b637511f3fe3a41158d89ca8", size = 1391879 }, + { url = "https://files.pythonhosted.org/packages/40/6f/d60770ef98e77b365d96061d090c0cd9e23418121c55fff188fa4bdf0b54/kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e3bc157fed2a4c02ec468de4ecd12a6e22818d4f09cde2c31ee3226ffbefab2", size = 1504751 }, + { url = "https://files.pythonhosted.org/packages/fa/3a/5f38667d313e983c432f3fcd86932177519ed8790c724e07d77d1de0188a/kiwisolver-1.4.7-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3da53da805b71e41053dc670f9a820d1157aae77b6b944e08024d17bcd51ef88", size = 1436990 }, + { url = "https://files.pythonhosted.org/packages/cb/3b/1520301a47326e6a6043b502647e42892be33b3f051e9791cc8bb43f1a32/kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8705f17dfeb43139a692298cb6637ee2e59c0194538153e83e9ee0c75c2eddde", size = 2191122 }, + { url = "https://files.pythonhosted.org/packages/cf/c4/eb52da300c166239a2233f1f9c4a1b767dfab98fae27681bfb7ea4873cb6/kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:82a5c2f4b87c26bb1a0ef3d16b5c4753434633b83d365cc0ddf2770c93829e3c", size = 2338126 }, + { url = "https://files.pythonhosted.org/packages/1a/cb/42b92fd5eadd708dd9107c089e817945500685f3437ce1fd387efebc6d6e/kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce8be0466f4c0d585cdb6c1e2ed07232221df101a4c6f28821d2aa754ca2d9e2", size = 2298313 }, + { url = "https://files.pythonhosted.org/packages/4f/eb/be25aa791fe5fc75a8b1e0c965e00f942496bc04635c9aae8035f6b76dcd/kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:409afdfe1e2e90e6ee7fc896f3df9a7fec8e793e58bfa0d052c8a82f99c37abb", size = 2437784 }, + { url = "https://files.pythonhosted.org/packages/c5/22/30a66be7f3368d76ff95689e1c2e28d382383952964ab15330a15d8bfd03/kiwisolver-1.4.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5b9c3f4ee0b9a439d2415012bd1b1cc2df59e4d6a9939f4d669241d30b414327", size = 2253988 }, + { url = "https://files.pythonhosted.org/packages/35/d3/5f2ecb94b5211c8a04f218a76133cc8d6d153b0f9cd0b45fad79907f0689/kiwisolver-1.4.7-cp39-cp39-win32.whl", hash = "sha256:a79ae34384df2b615eefca647a2873842ac3b596418032bef9a7283675962644", size = 46980 }, + { url = "https://files.pythonhosted.org/packages/ef/17/cd10d020578764ea91740204edc6b3236ed8106228a46f568d716b11feb2/kiwisolver-1.4.7-cp39-cp39-win_amd64.whl", hash = "sha256:cf0438b42121a66a3a667de17e779330fc0f20b0d97d59d2f2121e182b0505e4", size = 55847 }, + { url = "https://files.pythonhosted.org/packages/91/84/32232502020bd78d1d12be7afde15811c64a95ed1f606c10456db4e4c3ac/kiwisolver-1.4.7-cp39-cp39-win_arm64.whl", hash = "sha256:764202cc7e70f767dab49e8df52c7455e8de0df5d858fa801a11aa0d882ccf3f", size = 48494 }, + { url = "https://files.pythonhosted.org/packages/ac/59/741b79775d67ab67ced9bb38552da688c0305c16e7ee24bba7a2be253fb7/kiwisolver-1.4.7-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:94252291e3fe68001b1dd747b4c0b3be12582839b95ad4d1b641924d68fd4643", size = 59491 }, + { url = "https://files.pythonhosted.org/packages/58/cc/fb239294c29a5656e99e3527f7369b174dd9cc7c3ef2dea7cb3c54a8737b/kiwisolver-1.4.7-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5b7dfa3b546da08a9f622bb6becdb14b3e24aaa30adba66749d38f3cc7ea9706", size = 57648 }, + { url = "https://files.pythonhosted.org/packages/3b/ef/2f009ac1f7aab9f81efb2d837301d255279d618d27b6015780115ac64bdd/kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd3de6481f4ed8b734da5df134cd5a6a64fe32124fe83dde1e5b5f29fe30b1e6", size = 84257 }, + { url = "https://files.pythonhosted.org/packages/81/e1/c64f50987f85b68b1c52b464bb5bf73e71570c0f7782d626d1eb283ad620/kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a91b5f9f1205845d488c928e8570dcb62b893372f63b8b6e98b863ebd2368ff2", size = 80906 }, + { url = "https://files.pythonhosted.org/packages/fd/71/1687c5c0a0be2cee39a5c9c389e546f9c6e215e46b691d00d9f646892083/kiwisolver-1.4.7-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40fa14dbd66b8b8f470d5fc79c089a66185619d31645f9b0773b88b19f7223c4", size = 79951 }, + { url = "https://files.pythonhosted.org/packages/ea/8b/d7497df4a1cae9367adf21665dd1f896c2a7aeb8769ad77b662c5e2bcce7/kiwisolver-1.4.7-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:eb542fe7933aa09d8d8f9d9097ef37532a7df6497819d16efe4359890a2f417a", size = 55715 }, + { url = "https://files.pythonhosted.org/packages/d5/df/ce37d9b26f07ab90880923c94d12a6ff4d27447096b4c849bfc4339ccfdf/kiwisolver-1.4.7-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8b01aac285f91ca889c800042c35ad3b239e704b150cfd3382adfc9dcc780e39", size = 58666 }, + { url = "https://files.pythonhosted.org/packages/b0/d3/e4b04f43bc629ac8e186b77b2b1a251cdfa5b7610fa189dc0db622672ce6/kiwisolver-1.4.7-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:48be928f59a1f5c8207154f935334d374e79f2b5d212826307d072595ad76a2e", size = 57088 }, + { url = "https://files.pythonhosted.org/packages/30/1c/752df58e2d339e670a535514d2db4fe8c842ce459776b8080fbe08ebb98e/kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f37cfe618a117e50d8c240555331160d73d0411422b59b5ee217843d7b693608", size = 84321 }, + { url = "https://files.pythonhosted.org/packages/f0/f8/fe6484e847bc6e238ec9f9828089fb2c0bb53f2f5f3a79351fde5b565e4f/kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:599b5c873c63a1f6ed7eead644a8a380cfbdf5db91dcb6f85707aaab213b1674", size = 80776 }, + { url = "https://files.pythonhosted.org/packages/9b/57/d7163c0379f250ef763aba85330a19feefb5ce6cb541ade853aaba881524/kiwisolver-1.4.7-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:801fa7802e5cfabe3ab0c81a34c323a319b097dfb5004be950482d882f3d7225", size = 79984 }, + { url = "https://files.pythonhosted.org/packages/8c/95/4a103776c265d13b3d2cd24fb0494d4e04ea435a8ef97e1b2c026d43250b/kiwisolver-1.4.7-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0c6c43471bc764fad4bc99c5c2d6d16a676b1abf844ca7c8702bdae92df01ee0", size = 55811 }, +] + +[[package]] +name = "kiwisolver" +version = "1.4.8" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12'", + "python_full_version == '3.11.*'", + "python_full_version == '3.10.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/82/59/7c91426a8ac292e1cdd53a63b6d9439abd573c875c3f92c146767dd33faf/kiwisolver-1.4.8.tar.gz", hash = "sha256:23d5f023bdc8c7e54eb65f03ca5d5bb25b601eac4d7f1a042888a1f45237987e", size = 97538 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/5f/4d8e9e852d98ecd26cdf8eaf7ed8bc33174033bba5e07001b289f07308fd/kiwisolver-1.4.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88c6f252f6816a73b1f8c904f7bbe02fd67c09a69f7cb8a0eecdbf5ce78e63db", size = 124623 }, + { url = "https://files.pythonhosted.org/packages/1d/70/7f5af2a18a76fe92ea14675f8bd88ce53ee79e37900fa5f1a1d8e0b42998/kiwisolver-1.4.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c72941acb7b67138f35b879bbe85be0f6c6a70cab78fe3ef6db9c024d9223e5b", size = 66720 }, + { url = "https://files.pythonhosted.org/packages/c6/13/e15f804a142353aefd089fadc8f1d985561a15358c97aca27b0979cb0785/kiwisolver-1.4.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ce2cf1e5688edcb727fdf7cd1bbd0b6416758996826a8be1d958f91880d0809d", size = 65413 }, + { url = "https://files.pythonhosted.org/packages/ce/6d/67d36c4d2054e83fb875c6b59d0809d5c530de8148846b1370475eeeece9/kiwisolver-1.4.8-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c8bf637892dc6e6aad2bc6d4d69d08764166e5e3f69d469e55427b6ac001b19d", size = 1650826 }, + { url = "https://files.pythonhosted.org/packages/de/c6/7b9bb8044e150d4d1558423a1568e4f227193662a02231064e3824f37e0a/kiwisolver-1.4.8-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:034d2c891f76bd3edbdb3ea11140d8510dca675443da7304205a2eaa45d8334c", size = 1628231 }, + { url = "https://files.pythonhosted.org/packages/b6/38/ad10d437563063eaaedbe2c3540a71101fc7fb07a7e71f855e93ea4de605/kiwisolver-1.4.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d47b28d1dfe0793d5e96bce90835e17edf9a499b53969b03c6c47ea5985844c3", size = 1408938 }, + { url = "https://files.pythonhosted.org/packages/52/ce/c0106b3bd7f9e665c5f5bc1e07cc95b5dabd4e08e3dad42dbe2faad467e7/kiwisolver-1.4.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb158fe28ca0c29f2260cca8c43005329ad58452c36f0edf298204de32a9a3ed", size = 1422799 }, + { url = "https://files.pythonhosted.org/packages/d0/87/efb704b1d75dc9758087ba374c0f23d3254505edaedd09cf9d247f7878b9/kiwisolver-1.4.8-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5536185fce131780ebd809f8e623bf4030ce1b161353166c49a3c74c287897f", size = 1354362 }, + { url = "https://files.pythonhosted.org/packages/eb/b3/fd760dc214ec9a8f208b99e42e8f0130ff4b384eca8b29dd0efc62052176/kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:369b75d40abedc1da2c1f4de13f3482cb99e3237b38726710f4a793432b1c5ff", size = 2222695 }, + { url = "https://files.pythonhosted.org/packages/a2/09/a27fb36cca3fc01700687cc45dae7a6a5f8eeb5f657b9f710f788748e10d/kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:641f2ddf9358c80faa22e22eb4c9f54bd3f0e442e038728f500e3b978d00aa7d", size = 2370802 }, + { url = "https://files.pythonhosted.org/packages/3d/c3/ba0a0346db35fe4dc1f2f2cf8b99362fbb922d7562e5f911f7ce7a7b60fa/kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d561d2d8883e0819445cfe58d7ddd673e4015c3c57261d7bdcd3710d0d14005c", size = 2334646 }, + { url = "https://files.pythonhosted.org/packages/41/52/942cf69e562f5ed253ac67d5c92a693745f0bed3c81f49fc0cbebe4d6b00/kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:1732e065704b47c9afca7ffa272f845300a4eb959276bf6970dc07265e73b605", size = 2467260 }, + { url = "https://files.pythonhosted.org/packages/32/26/2d9668f30d8a494b0411d4d7d4ea1345ba12deb6a75274d58dd6ea01e951/kiwisolver-1.4.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bcb1ebc3547619c3b58a39e2448af089ea2ef44b37988caf432447374941574e", size = 2288633 }, + { url = "https://files.pythonhosted.org/packages/98/99/0dd05071654aa44fe5d5e350729961e7bb535372935a45ac89a8924316e6/kiwisolver-1.4.8-cp310-cp310-win_amd64.whl", hash = "sha256:89c107041f7b27844179ea9c85d6da275aa55ecf28413e87624d033cf1f6b751", size = 71885 }, + { url = "https://files.pythonhosted.org/packages/6c/fc/822e532262a97442989335394d441cd1d0448c2e46d26d3e04efca84df22/kiwisolver-1.4.8-cp310-cp310-win_arm64.whl", hash = "sha256:b5773efa2be9eb9fcf5415ea3ab70fc785d598729fd6057bea38d539ead28271", size = 65175 }, + { url = "https://files.pythonhosted.org/packages/da/ed/c913ee28936c371418cb167b128066ffb20bbf37771eecc2c97edf8a6e4c/kiwisolver-1.4.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a4d3601908c560bdf880f07d94f31d734afd1bb71e96585cace0e38ef44c6d84", size = 124635 }, + { url = "https://files.pythonhosted.org/packages/4c/45/4a7f896f7467aaf5f56ef093d1f329346f3b594e77c6a3c327b2d415f521/kiwisolver-1.4.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:856b269c4d28a5c0d5e6c1955ec36ebfd1651ac00e1ce0afa3e28da95293b561", size = 66717 }, + { url = "https://files.pythonhosted.org/packages/5f/b4/c12b3ac0852a3a68f94598d4c8d569f55361beef6159dce4e7b624160da2/kiwisolver-1.4.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c2b9a96e0f326205af81a15718a9073328df1173a2619a68553decb7097fd5d7", size = 65413 }, + { url = "https://files.pythonhosted.org/packages/a9/98/1df4089b1ed23d83d410adfdc5947245c753bddfbe06541c4aae330e9e70/kiwisolver-1.4.8-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c5020c83e8553f770cb3b5fc13faac40f17e0b205bd237aebd21d53d733adb03", size = 1343994 }, + { url = "https://files.pythonhosted.org/packages/8d/bf/b4b169b050c8421a7c53ea1ea74e4ef9c335ee9013216c558a047f162d20/kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dace81d28c787956bfbfbbfd72fdcef014f37d9b48830829e488fdb32b49d954", size = 1434804 }, + { url = "https://files.pythonhosted.org/packages/66/5a/e13bd341fbcf73325ea60fdc8af752addf75c5079867af2e04cc41f34434/kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11e1022b524bd48ae56c9b4f9296bce77e15a2e42a502cceba602f804b32bb79", size = 1450690 }, + { url = "https://files.pythonhosted.org/packages/9b/4f/5955dcb376ba4a830384cc6fab7d7547bd6759fe75a09564910e9e3bb8ea/kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b9b4d2892fefc886f30301cdd80debd8bb01ecdf165a449eb6e78f79f0fabd6", size = 1376839 }, + { url = "https://files.pythonhosted.org/packages/3a/97/5edbed69a9d0caa2e4aa616ae7df8127e10f6586940aa683a496c2c280b9/kiwisolver-1.4.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a96c0e790ee875d65e340ab383700e2b4891677b7fcd30a699146f9384a2bb0", size = 1435109 }, + { url = "https://files.pythonhosted.org/packages/13/fc/e756382cb64e556af6c1809a1bbb22c141bbc2445049f2da06b420fe52bf/kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:23454ff084b07ac54ca8be535f4174170c1094a4cff78fbae4f73a4bcc0d4dab", size = 2245269 }, + { url = "https://files.pythonhosted.org/packages/76/15/e59e45829d7f41c776d138245cabae6515cb4eb44b418f6d4109c478b481/kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:87b287251ad6488e95b4f0b4a79a6d04d3ea35fde6340eb38fbd1ca9cd35bbbc", size = 2393468 }, + { url = "https://files.pythonhosted.org/packages/e9/39/483558c2a913ab8384d6e4b66a932406f87c95a6080112433da5ed668559/kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:b21dbe165081142b1232a240fc6383fd32cdd877ca6cc89eab93e5f5883e1c25", size = 2355394 }, + { url = "https://files.pythonhosted.org/packages/01/aa/efad1fbca6570a161d29224f14b082960c7e08268a133fe5dc0f6906820e/kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:768cade2c2df13db52475bd28d3a3fac8c9eff04b0e9e2fda0f3760f20b3f7fc", size = 2490901 }, + { url = "https://files.pythonhosted.org/packages/c9/4f/15988966ba46bcd5ab9d0c8296914436720dd67fca689ae1a75b4ec1c72f/kiwisolver-1.4.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d47cfb2650f0e103d4bf68b0b5804c68da97272c84bb12850d877a95c056bd67", size = 2312306 }, + { url = "https://files.pythonhosted.org/packages/2d/27/bdf1c769c83f74d98cbc34483a972f221440703054894a37d174fba8aa68/kiwisolver-1.4.8-cp311-cp311-win_amd64.whl", hash = "sha256:ed33ca2002a779a2e20eeb06aea7721b6e47f2d4b8a8ece979d8ba9e2a167e34", size = 71966 }, + { url = "https://files.pythonhosted.org/packages/4a/c9/9642ea855604aeb2968a8e145fc662edf61db7632ad2e4fb92424be6b6c0/kiwisolver-1.4.8-cp311-cp311-win_arm64.whl", hash = "sha256:16523b40aab60426ffdebe33ac374457cf62863e330a90a0383639ce14bf44b2", size = 65311 }, + { url = "https://files.pythonhosted.org/packages/fc/aa/cea685c4ab647f349c3bc92d2daf7ae34c8e8cf405a6dcd3a497f58a2ac3/kiwisolver-1.4.8-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:d6af5e8815fd02997cb6ad9bbed0ee1e60014438ee1a5c2444c96f87b8843502", size = 124152 }, + { url = "https://files.pythonhosted.org/packages/c5/0b/8db6d2e2452d60d5ebc4ce4b204feeb16176a851fd42462f66ade6808084/kiwisolver-1.4.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bade438f86e21d91e0cf5dd7c0ed00cda0f77c8c1616bd83f9fc157fa6760d31", size = 66555 }, + { url = "https://files.pythonhosted.org/packages/60/26/d6a0db6785dd35d3ba5bf2b2df0aedc5af089962c6eb2cbf67a15b81369e/kiwisolver-1.4.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b83dc6769ddbc57613280118fb4ce3cd08899cc3369f7d0e0fab518a7cf37fdb", size = 65067 }, + { url = "https://files.pythonhosted.org/packages/c9/ed/1d97f7e3561e09757a196231edccc1bcf59d55ddccefa2afc9c615abd8e0/kiwisolver-1.4.8-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:111793b232842991be367ed828076b03d96202c19221b5ebab421ce8bcad016f", size = 1378443 }, + { url = "https://files.pythonhosted.org/packages/29/61/39d30b99954e6b46f760e6289c12fede2ab96a254c443639052d1b573fbc/kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:257af1622860e51b1a9d0ce387bf5c2c4f36a90594cb9514f55b074bcc787cfc", size = 1472728 }, + { url = "https://files.pythonhosted.org/packages/0c/3e/804163b932f7603ef256e4a715e5843a9600802bb23a68b4e08c8c0ff61d/kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:69b5637c3f316cab1ec1c9a12b8c5f4750a4c4b71af9157645bf32830e39c03a", size = 1478388 }, + { url = "https://files.pythonhosted.org/packages/8a/9e/60eaa75169a154700be74f875a4d9961b11ba048bef315fbe89cb6999056/kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:782bb86f245ec18009890e7cb8d13a5ef54dcf2ebe18ed65f795e635a96a1c6a", size = 1413849 }, + { url = "https://files.pythonhosted.org/packages/bc/b3/9458adb9472e61a998c8c4d95cfdfec91c73c53a375b30b1428310f923e4/kiwisolver-1.4.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc978a80a0db3a66d25767b03688f1147a69e6237175c0f4ffffaaedf744055a", size = 1475533 }, + { url = "https://files.pythonhosted.org/packages/e4/7a/0a42d9571e35798de80aef4bb43a9b672aa7f8e58643d7bd1950398ffb0a/kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:36dbbfd34838500a31f52c9786990d00150860e46cd5041386f217101350f0d3", size = 2268898 }, + { url = "https://files.pythonhosted.org/packages/d9/07/1255dc8d80271400126ed8db35a1795b1a2c098ac3a72645075d06fe5c5d/kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:eaa973f1e05131de5ff3569bbba7f5fd07ea0595d3870ed4a526d486fe57fa1b", size = 2425605 }, + { url = "https://files.pythonhosted.org/packages/84/df/5a3b4cf13780ef6f6942df67b138b03b7e79e9f1f08f57c49957d5867f6e/kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a66f60f8d0c87ab7f59b6fb80e642ebb29fec354a4dfad687ca4092ae69d04f4", size = 2375801 }, + { url = "https://files.pythonhosted.org/packages/8f/10/2348d068e8b0f635c8c86892788dac7a6b5c0cb12356620ab575775aad89/kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:858416b7fb777a53f0c59ca08190ce24e9abbd3cffa18886a5781b8e3e26f65d", size = 2520077 }, + { url = "https://files.pythonhosted.org/packages/32/d8/014b89fee5d4dce157d814303b0fce4d31385a2af4c41fed194b173b81ac/kiwisolver-1.4.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:085940635c62697391baafaaeabdf3dd7a6c3643577dde337f4d66eba021b2b8", size = 2338410 }, + { url = "https://files.pythonhosted.org/packages/bd/72/dfff0cc97f2a0776e1c9eb5bef1ddfd45f46246c6533b0191887a427bca5/kiwisolver-1.4.8-cp312-cp312-win_amd64.whl", hash = "sha256:01c3d31902c7db5fb6182832713d3b4122ad9317c2c5877d0539227d96bb2e50", size = 71853 }, + { url = "https://files.pythonhosted.org/packages/dc/85/220d13d914485c0948a00f0b9eb419efaf6da81b7d72e88ce2391f7aed8d/kiwisolver-1.4.8-cp312-cp312-win_arm64.whl", hash = "sha256:a3c44cb68861de93f0c4a8175fbaa691f0aa22550c331fefef02b618a9dcb476", size = 65424 }, + { url = "https://files.pythonhosted.org/packages/1f/f9/ae81c47a43e33b93b0a9819cac6723257f5da2a5a60daf46aa5c7226ea85/kiwisolver-1.4.8-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e7a019419b7b510f0f7c9dceff8c5eae2392037eae483a7f9162625233802b0a", size = 60403 }, + { url = "https://files.pythonhosted.org/packages/58/ca/f92b5cb6f4ce0c1ebfcfe3e2e42b96917e16f7090e45b21102941924f18f/kiwisolver-1.4.8-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:286b18e86682fd2217a48fc6be6b0f20c1d0ed10958d8dc53453ad58d7be0bf8", size = 58657 }, + { url = "https://files.pythonhosted.org/packages/80/28/ae0240f732f0484d3a4dc885d055653c47144bdf59b670aae0ec3c65a7c8/kiwisolver-1.4.8-pp310-pypy310_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4191ee8dfd0be1c3666ccbac178c5a05d5f8d689bbe3fc92f3c4abec817f8fe0", size = 84948 }, + { url = "https://files.pythonhosted.org/packages/5d/eb/78d50346c51db22c7203c1611f9b513075f35c4e0e4877c5dde378d66043/kiwisolver-1.4.8-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7cd2785b9391f2873ad46088ed7599a6a71e762e1ea33e87514b1a441ed1da1c", size = 81186 }, + { url = "https://files.pythonhosted.org/packages/43/f8/7259f18c77adca88d5f64f9a522792e178b2691f3748817a8750c2d216ef/kiwisolver-1.4.8-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c07b29089b7ba090b6f1a669f1411f27221c3662b3a1b7010e67b59bb5a6f10b", size = 80279 }, + { url = "https://files.pythonhosted.org/packages/3a/1d/50ad811d1c5dae091e4cf046beba925bcae0a610e79ae4c538f996f63ed5/kiwisolver-1.4.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:65ea09a5a3faadd59c2ce96dc7bf0f364986a315949dc6374f04396b0d60e09b", size = 71762 }, +] + +[[package]] +name = "leather" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/6e/48a05e2f7f62a616d675cfee182643f2dd8023bf7429aa326f4bebd629c8/leather-0.4.0.tar.gz", hash = "sha256:f964bec2086f3153a6c16e707f20cb718f811f57af116075f4c0f4805c608b95", size = 43877 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a1/30/9ec597c962c5249ebd5c580386e4b5f2884cd943af42634291ee3b406415/leather-0.4.0-py2.py3-none-any.whl", hash = "sha256:18290bc93749ae39039af5e31e871fcfad74d26c4c3ea28ea4f681f4571b3a2b", size = 30256 }, +] + +[[package]] +name = "llvmlite" +version = "0.43.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9f/3d/f513755f285db51ab363a53e898b85562e950f79a2e6767a364530c2f645/llvmlite-0.43.0.tar.gz", hash = "sha256:ae2b5b5c3ef67354824fb75517c8db5fbe93bc02cd9671f3c62271626bc041d5", size = 157069 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/23/ff/6ca7e98998b573b4bd6566f15c35e5c8bea829663a6df0c7aa55ab559da9/llvmlite-0.43.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a289af9a1687c6cf463478f0fa8e8aa3b6fb813317b0d70bf1ed0759eab6f761", size = 31064408 }, + { url = "https://files.pythonhosted.org/packages/ca/5c/a27f9257f86f0cda3f764ff21d9f4217b9f6a0d45e7a39ecfa7905f524ce/llvmlite-0.43.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d4fd101f571a31acb1559ae1af30f30b1dc4b3186669f92ad780e17c81e91bc", size = 28793153 }, + { url = "https://files.pythonhosted.org/packages/7e/3c/4410f670ad0a911227ea2ecfcba9f672a77cf1924df5280c4562032ec32d/llvmlite-0.43.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d434ec7e2ce3cc8f452d1cd9a28591745de022f931d67be688a737320dfcead", size = 42857276 }, + { url = "https://files.pythonhosted.org/packages/c6/21/2ffbab5714e72f2483207b4a1de79b2eecd9debbf666ff4e7067bcc5c134/llvmlite-0.43.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6912a87782acdff6eb8bf01675ed01d60ca1f2551f8176a300a886f09e836a6a", size = 43871781 }, + { url = "https://files.pythonhosted.org/packages/f2/26/b5478037c453554a61625ef1125f7e12bb1429ae11c6376f47beba9b0179/llvmlite-0.43.0-cp310-cp310-win_amd64.whl", hash = "sha256:14f0e4bf2fd2d9a75a3534111e8ebeb08eda2f33e9bdd6dfa13282afacdde0ed", size = 28123487 }, + { url = "https://files.pythonhosted.org/packages/95/8c/de3276d773ab6ce3ad676df5fab5aac19696b2956319d65d7dd88fb10f19/llvmlite-0.43.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3e8d0618cb9bfe40ac38a9633f2493d4d4e9fcc2f438d39a4e854f39cc0f5f98", size = 31064409 }, + { url = "https://files.pythonhosted.org/packages/ee/e1/38deed89ced4cf378c61e232265cfe933ccde56ae83c901aa68b477d14b1/llvmlite-0.43.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e0a9a1a39d4bf3517f2af9d23d479b4175ead205c592ceeb8b89af48a327ea57", size = 28793149 }, + { url = "https://files.pythonhosted.org/packages/2f/b2/4429433eb2dc8379e2cb582502dca074c23837f8fd009907f78a24de4c25/llvmlite-0.43.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1da416ab53e4f7f3bc8d4eeba36d801cc1894b9fbfbf2022b29b6bad34a7df2", size = 42857277 }, + { url = "https://files.pythonhosted.org/packages/6b/99/5d00a7d671b1ba1751fc9f19d3b36f3300774c6eebe2bcdb5f6191763eb4/llvmlite-0.43.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977525a1e5f4059316b183fb4fd34fa858c9eade31f165427a3977c95e3ee749", size = 43871781 }, + { url = "https://files.pythonhosted.org/packages/20/ab/ed5ed3688c6ba4f0b8d789da19fd8e30a9cf7fc5852effe311bc5aefe73e/llvmlite-0.43.0-cp311-cp311-win_amd64.whl", hash = "sha256:d5bd550001d26450bd90777736c69d68c487d17bf371438f975229b2b8241a91", size = 28107433 }, + { url = "https://files.pythonhosted.org/packages/0b/67/9443509e5d2b6d8587bae3ede5598fa8bd586b1c7701696663ea8af15b5b/llvmlite-0.43.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f99b600aa7f65235a5a05d0b9a9f31150c390f31261f2a0ba678e26823ec38f7", size = 31064409 }, + { url = "https://files.pythonhosted.org/packages/a2/9c/24139d3712d2d352e300c39c0e00d167472c08b3bd350c3c33d72c88ff8d/llvmlite-0.43.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:35d80d61d0cda2d767f72de99450766250560399edc309da16937b93d3b676e7", size = 28793145 }, + { url = "https://files.pythonhosted.org/packages/bf/f1/4c205a48488e574ee9f6505d50e84370a978c90f08dab41a42d8f2c576b6/llvmlite-0.43.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eccce86bba940bae0d8d48ed925f21dbb813519169246e2ab292b5092aba121f", size = 42857276 }, + { url = "https://files.pythonhosted.org/packages/00/5f/323c4d56e8401c50185fd0e875fcf06b71bf825a863699be1eb10aa2a9cb/llvmlite-0.43.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df6509e1507ca0760787a199d19439cc887bfd82226f5af746d6977bd9f66844", size = 43871781 }, + { url = "https://files.pythonhosted.org/packages/c6/94/dea10e263655ce78d777e78d904903faae39d1fc440762be4a9dc46bed49/llvmlite-0.43.0-cp312-cp312-win_amd64.whl", hash = "sha256:7a2872ee80dcf6b5dbdc838763d26554c2a18aa833d31a2635bff16aafefb9c9", size = 28107442 }, + { url = "https://files.pythonhosted.org/packages/2a/73/12925b1bbb3c2beb6d96f892ef5b4d742c34f00ddb9f4a125e9e87b22f52/llvmlite-0.43.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9cd2a7376f7b3367019b664c21f0c61766219faa3b03731113ead75107f3b66c", size = 31064410 }, + { url = "https://files.pythonhosted.org/packages/cc/61/58c70aa0808a8cba825a7d98cc65bef4801b99328fba80837bfcb5fc767f/llvmlite-0.43.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18e9953c748b105668487b7c81a3e97b046d8abf95c4ddc0cd3c94f4e4651ae8", size = 28793145 }, + { url = "https://files.pythonhosted.org/packages/c8/c6/9324eb5de2ba9d99cbed853d85ba7a318652a48e077797bec27cf40f911d/llvmlite-0.43.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74937acd22dc11b33946b67dca7680e6d103d6e90eeaaaf932603bec6fe7b03a", size = 42857276 }, + { url = "https://files.pythonhosted.org/packages/e0/d0/889e9705107db7b1ec0767b03f15d7b95b4c4f9fdf91928ab1c7e9ffacf6/llvmlite-0.43.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9efc739cc6ed760f795806f67889923f7274276f0eb45092a1473e40d9b867", size = 43871777 }, + { url = "https://files.pythonhosted.org/packages/df/41/73cc26a2634b538cfe813f618c91e7e9960b8c163f8f0c94a2b0f008b9da/llvmlite-0.43.0-cp39-cp39-win_amd64.whl", hash = "sha256:47e147cdda9037f94b399bf03bfd8a6b6b1f2f90be94a454e3386f006455a9b4", size = 28123489 }, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528 }, +] + +[[package]] +name = "markupsafe" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357 }, + { url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393 }, + { url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732 }, + { url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866 }, + { url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964 }, + { url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977 }, + { url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366 }, + { url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091 }, + { url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065 }, + { url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514 }, + { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353 }, + { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392 }, + { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984 }, + { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120 }, + { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032 }, + { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057 }, + { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359 }, + { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306 }, + { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094 }, + { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521 }, + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274 }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348 }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149 }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118 }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993 }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178 }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319 }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352 }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097 }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601 }, + { url = "https://files.pythonhosted.org/packages/a7/ea/9b1530c3fdeeca613faeb0fb5cbcf2389d816072fab72a71b45749ef6062/MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a", size = 14344 }, + { url = "https://files.pythonhosted.org/packages/4b/c2/fbdbfe48848e7112ab05e627e718e854d20192b674952d9042ebd8c9e5de/MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff", size = 12389 }, + { url = "https://files.pythonhosted.org/packages/f0/25/7a7c6e4dbd4f867d95d94ca15449e91e52856f6ed1905d58ef1de5e211d0/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13", size = 21607 }, + { url = "https://files.pythonhosted.org/packages/53/8f/f339c98a178f3c1e545622206b40986a4c3307fe39f70ccd3d9df9a9e425/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144", size = 20728 }, + { url = "https://files.pythonhosted.org/packages/1a/03/8496a1a78308456dbd50b23a385c69b41f2e9661c67ea1329849a598a8f9/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29", size = 20826 }, + { url = "https://files.pythonhosted.org/packages/e6/cf/0a490a4bd363048c3022f2f475c8c05582179bb179defcee4766fb3dcc18/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0", size = 21843 }, + { url = "https://files.pythonhosted.org/packages/19/a3/34187a78613920dfd3cdf68ef6ce5e99c4f3417f035694074beb8848cd77/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0", size = 21219 }, + { url = "https://files.pythonhosted.org/packages/17/d8/5811082f85bb88410ad7e452263af048d685669bbbfb7b595e8689152498/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178", size = 20946 }, + { url = "https://files.pythonhosted.org/packages/7c/31/bd635fb5989440d9365c5e3c47556cfea121c7803f5034ac843e8f37c2f2/MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f", size = 15063 }, + { url = "https://files.pythonhosted.org/packages/b3/73/085399401383ce949f727afec55ec3abd76648d04b9f22e1c0e99cb4bec3/MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a", size = 15506 }, +] + +[[package]] +name = "mashumaro" +version = "3.14" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/47/0a450b281bef2d7e97ec02c8e1168d821e283f58e02e6c403b2bb4d73c1c/mashumaro-3.14.tar.gz", hash = "sha256:5ef6f2b963892cbe9a4ceb3441dfbea37f8c3412523f25d42e9b3a7186555f1d", size = 166160 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1b/35/8d63733a2c12149d0c7663c29bf626bdbeea5f0ff963afe58a42b4810981/mashumaro-3.14-py3-none-any.whl", hash = "sha256:c12a649599a8f7b1a0b35d18f12e678423c3066189f7bc7bd8dd431c5c8132c3", size = 92183 }, +] + +[package.optional-dependencies] +msgpack = [ + { name = "msgpack" }, +] + +[[package]] +name = "matplotlib" +version = "3.9.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "contourpy", version = "1.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "contourpy", version = "1.3.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "cycler" }, + { name = "fonttools" }, + { name = "importlib-resources", marker = "python_full_version < '3.10'" }, + { name = "kiwisolver", version = "1.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "kiwisolver", version = "1.4.8", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "numpy" }, + { name = "packaging" }, + { name = "pillow" }, + { name = "pyparsing" }, + { name = "python-dateutil" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/17/1747b4154034befd0ed33b52538f5eb7752d05bb51c5e2a31470c3bc7d52/matplotlib-3.9.4.tar.gz", hash = "sha256:1e00e8be7393cbdc6fedfa8a6fba02cf3e83814b285db1c60b906a023ba41bc3", size = 36106529 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/94/27d2e2c30d54b56c7b764acc1874a909e34d1965a427fc7092bb6a588b63/matplotlib-3.9.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:c5fdd7abfb706dfa8d307af64a87f1a862879ec3cd8d0ec8637458f0885b9c50", size = 7885089 }, + { url = "https://files.pythonhosted.org/packages/c6/25/828273307e40a68eb8e9df832b6b2aaad075864fdc1de4b1b81e40b09e48/matplotlib-3.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d89bc4e85e40a71d1477780366c27fb7c6494d293e1617788986f74e2a03d7ff", size = 7770600 }, + { url = "https://files.pythonhosted.org/packages/f2/65/f841a422ec994da5123368d76b126acf4fc02ea7459b6e37c4891b555b83/matplotlib-3.9.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ddf9f3c26aae695c5daafbf6b94e4c1a30d6cd617ba594bbbded3b33a1fcfa26", size = 8200138 }, + { url = "https://files.pythonhosted.org/packages/07/06/272aca07a38804d93b6050813de41ca7ab0e29ba7a9dd098e12037c919a9/matplotlib-3.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18ebcf248030173b59a868fda1fe42397253f6698995b55e81e1f57431d85e50", size = 8312711 }, + { url = "https://files.pythonhosted.org/packages/98/37/f13e23b233c526b7e27ad61be0a771894a079e0f7494a10d8d81557e0e9a/matplotlib-3.9.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:974896ec43c672ec23f3f8c648981e8bc880ee163146e0312a9b8def2fac66f5", size = 9090622 }, + { url = "https://files.pythonhosted.org/packages/4f/8c/b1f5bd2bd70e60f93b1b54c4d5ba7a992312021d0ddddf572f9a1a6d9348/matplotlib-3.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:4598c394ae9711cec135639374e70871fa36b56afae17bdf032a345be552a88d", size = 7828211 }, + { url = "https://files.pythonhosted.org/packages/74/4b/65be7959a8fa118a3929b49a842de5b78bb55475236fcf64f3e308ff74a0/matplotlib-3.9.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d4dd29641d9fb8bc4492420c5480398dd40a09afd73aebe4eb9d0071a05fbe0c", size = 7894430 }, + { url = "https://files.pythonhosted.org/packages/e9/18/80f70d91896e0a517b4a051c3fd540daa131630fd75e02e250365353b253/matplotlib-3.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30e5b22e8bcfb95442bf7d48b0d7f3bdf4a450cbf68986ea45fca3d11ae9d099", size = 7780045 }, + { url = "https://files.pythonhosted.org/packages/a2/73/ccb381026e3238c5c25c3609ba4157b2d1a617ec98d65a8b4ee4e1e74d02/matplotlib-3.9.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bb0030d1d447fd56dcc23b4c64a26e44e898f0416276cac1ebc25522e0ac249", size = 8209906 }, + { url = "https://files.pythonhosted.org/packages/ab/33/1648da77b74741c89f5ea95cbf42a291b4b364f2660b316318811404ed97/matplotlib-3.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aca90ed222ac3565d2752b83dbb27627480d27662671e4d39da72e97f657a423", size = 8322873 }, + { url = "https://files.pythonhosted.org/packages/57/d3/8447ba78bc6593c9044c372d1609f8ea10fb1e071e7a9e0747bea74fc16c/matplotlib-3.9.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a181b2aa2906c608fcae72f977a4a2d76e385578939891b91c2550c39ecf361e", size = 9099566 }, + { url = "https://files.pythonhosted.org/packages/23/e1/4f0e237bf349c02ff9d1b6e7109f1a17f745263809b9714a8576dc17752b/matplotlib-3.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:1f6882828231eca17f501c4dcd98a05abb3f03d157fbc0769c6911fe08b6cfd3", size = 7838065 }, + { url = "https://files.pythonhosted.org/packages/1a/2b/c918bf6c19d6445d1cefe3d2e42cb740fb997e14ab19d4daeb6a7ab8a157/matplotlib-3.9.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:dfc48d67e6661378a21c2983200a654b72b5c5cdbd5d2cf6e5e1ece860f0cc70", size = 7891131 }, + { url = "https://files.pythonhosted.org/packages/c1/e5/b4e8fc601ca302afeeabf45f30e706a445c7979a180e3a978b78b2b681a4/matplotlib-3.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:47aef0fab8332d02d68e786eba8113ffd6f862182ea2999379dec9e237b7e483", size = 7776365 }, + { url = "https://files.pythonhosted.org/packages/99/06/b991886c506506476e5d83625c5970c656a491b9f80161458fed94597808/matplotlib-3.9.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fba1f52c6b7dc764097f52fd9ab627b90db452c9feb653a59945de16752e965f", size = 8200707 }, + { url = "https://files.pythonhosted.org/packages/c3/e2/556b627498cb27e61026f2d1ba86a78ad1b836fef0996bef5440e8bc9559/matplotlib-3.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:173ac3748acaac21afcc3fa1633924609ba1b87749006bc25051c52c422a5d00", size = 8313761 }, + { url = "https://files.pythonhosted.org/packages/58/ff/165af33ec766ff818306ea88e91f9f60d2a6ed543be1eb122a98acbf3b0d/matplotlib-3.9.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320edea0cadc07007765e33f878b13b3738ffa9745c5f707705692df70ffe0e0", size = 9095284 }, + { url = "https://files.pythonhosted.org/packages/9f/8b/3d0c7a002db3b1ed702731c2a9a06d78d035f1f2fb0fb936a8e43cc1e9f4/matplotlib-3.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:a4a4cfc82330b27042a7169533da7991e8789d180dd5b3daeaee57d75cd5a03b", size = 7841160 }, + { url = "https://files.pythonhosted.org/packages/56/eb/501b465c9fef28f158e414ea3a417913dc2ac748564c7ed41535f23445b4/matplotlib-3.9.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3c3724d89a387ddf78ff88d2a30ca78ac2b4c89cf37f2db4bd453c34799e933c", size = 7885919 }, + { url = "https://files.pythonhosted.org/packages/da/36/236fbd868b6c91309a5206bd90c3f881f4f44b2d997cd1d6239ef652f878/matplotlib-3.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d5f0a8430ffe23d7e32cfd86445864ccad141797f7d25b7c41759a5b5d17cfd7", size = 7771486 }, + { url = "https://files.pythonhosted.org/packages/e0/4b/105caf2d54d5ed11d9f4335398f5103001a03515f2126c936a752ccf1461/matplotlib-3.9.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6bb0141a21aef3b64b633dc4d16cbd5fc538b727e4958be82a0e1c92a234160e", size = 8201838 }, + { url = "https://files.pythonhosted.org/packages/5d/a7/bb01188fb4013d34d274caf44a2f8091255b0497438e8b6c0a7c1710c692/matplotlib-3.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57aa235109e9eed52e2c2949db17da185383fa71083c00c6c143a60e07e0888c", size = 8314492 }, + { url = "https://files.pythonhosted.org/packages/33/19/02e1a37f7141fc605b193e927d0a9cdf9dc124a20b9e68793f4ffea19695/matplotlib-3.9.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:b18c600061477ccfdd1e6fd050c33d8be82431700f3452b297a56d9ed7037abb", size = 9092500 }, + { url = "https://files.pythonhosted.org/packages/57/68/c2feb4667adbf882ffa4b3e0ac9967f848980d9f8b5bebd86644aa67ce6a/matplotlib-3.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:ef5f2d1b67d2d2145ff75e10f8c008bfbf71d45137c4b648c87193e7dd053eac", size = 7822962 }, + { url = "https://files.pythonhosted.org/packages/0c/22/2ef6a364cd3f565442b0b055e0599744f1e4314ec7326cdaaa48a4d864d7/matplotlib-3.9.4-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:44e0ed786d769d85bc787b0606a53f2d8d2d1d3c8a2608237365e9121c1a338c", size = 7877995 }, + { url = "https://files.pythonhosted.org/packages/87/b8/2737456e566e9f4d94ae76b8aa0d953d9acb847714f9a7ad80184474f5be/matplotlib-3.9.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:09debb9ce941eb23ecdbe7eab972b1c3e0276dcf01688073faff7b0f61d6c6ca", size = 7769300 }, + { url = "https://files.pythonhosted.org/packages/b2/1f/e709c6ec7b5321e6568769baa288c7178e60a93a9da9e682b39450da0e29/matplotlib-3.9.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcc53cf157a657bfd03afab14774d54ba73aa84d42cfe2480c91bd94873952db", size = 8313423 }, + { url = "https://files.pythonhosted.org/packages/5e/b6/5a1f868782cd13f053a679984e222007ecff654a9bfbac6b27a65f4eeb05/matplotlib-3.9.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ad45da51be7ad02387801fd154ef74d942f49fe3fcd26a64c94842ba7ec0d865", size = 7854624 }, +] + +[[package]] +name = "mccabe" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/ff/0ffefdcac38932a54d2b5eed4e0ba8a408f215002cd178ad1df0f2806ff8/mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", size = 9658 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/1a/1f68f9ba0c207934b35b86a8ca3aad8395a3d6dd7921c0686e23853ff5a9/mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e", size = 7350 }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, +] + +[[package]] +name = "more-itertools" +version = "10.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/51/78/65922308c4248e0eb08ebcbe67c95d48615cc6f27854b6f2e57143e9178f/more-itertools-10.5.0.tar.gz", hash = "sha256:5482bfef7849c25dc3c6dd53a6173ae4795da2a41a80faea6700d9f5846c5da6", size = 121020 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/48/7e/3a64597054a70f7c86eb0a7d4fc315b8c1ab932f64883a297bdffeb5f967/more_itertools-10.5.0-py3-none-any.whl", hash = "sha256:037b0d3203ce90cca8ab1defbbdac29d5f993fc20131f3664dc8d6acfa872aef", size = 60952 }, +] + +[[package]] +name = "msgpack" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/d0/7555686ae7ff5731205df1012ede15dd9d927f6227ea151e901c7406af4f/msgpack-1.1.0.tar.gz", hash = "sha256:dd432ccc2c72b914e4cb77afce64aab761c1137cc698be3984eee260bcb2896e", size = 167260 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4b/f9/a892a6038c861fa849b11a2bb0502c07bc698ab6ea53359e5771397d883b/msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd", size = 150428 }, + { url = "https://files.pythonhosted.org/packages/df/7a/d174cc6a3b6bb85556e6a046d3193294a92f9a8e583cdbd46dc8a1d7e7f4/msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d", size = 84131 }, + { url = "https://files.pythonhosted.org/packages/08/52/bf4fbf72f897a23a56b822997a72c16de07d8d56d7bf273242f884055682/msgpack-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:914571a2a5b4e7606997e169f64ce53a8b1e06f2cf2c3a7273aa106236d43dd5", size = 81215 }, + { url = "https://files.pythonhosted.org/packages/02/95/dc0044b439b518236aaf012da4677c1b8183ce388411ad1b1e63c32d8979/msgpack-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c921af52214dcbb75e6bdf6a661b23c3e6417f00c603dd2070bccb5c3ef499f5", size = 371229 }, + { url = "https://files.pythonhosted.org/packages/ff/75/09081792db60470bef19d9c2be89f024d366b1e1973c197bb59e6aabc647/msgpack-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8ce0b22b890be5d252de90d0e0d119f363012027cf256185fc3d474c44b1b9e", size = 378034 }, + { url = "https://files.pythonhosted.org/packages/32/d3/c152e0c55fead87dd948d4b29879b0f14feeeec92ef1fd2ec21b107c3f49/msgpack-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73322a6cc57fcee3c0c57c4463d828e9428275fb85a27aa2aa1a92fdc42afd7b", size = 363070 }, + { url = "https://files.pythonhosted.org/packages/d9/2c/82e73506dd55f9e43ac8aa007c9dd088c6f0de2aa19e8f7330e6a65879fc/msgpack-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e1f3c3d21f7cf67bcf2da8e494d30a75e4cf60041d98b3f79875afb5b96f3a3f", size = 359863 }, + { url = "https://files.pythonhosted.org/packages/cb/a0/3d093b248837094220e1edc9ec4337de3443b1cfeeb6e0896af8ccc4cc7a/msgpack-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64fc9068d701233effd61b19efb1485587560b66fe57b3e50d29c5d78e7fef68", size = 368166 }, + { url = "https://files.pythonhosted.org/packages/e4/13/7646f14f06838b406cf5a6ddbb7e8dc78b4996d891ab3b93c33d1ccc8678/msgpack-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:42f754515e0f683f9c79210a5d1cad631ec3d06cea5172214d2176a42e67e19b", size = 370105 }, + { url = "https://files.pythonhosted.org/packages/67/fa/dbbd2443e4578e165192dabbc6a22c0812cda2649261b1264ff515f19f15/msgpack-1.1.0-cp310-cp310-win32.whl", hash = "sha256:3df7e6b05571b3814361e8464f9304c42d2196808e0119f55d0d3e62cd5ea044", size = 68513 }, + { url = "https://files.pythonhosted.org/packages/24/ce/c2c8fbf0ded750cb63cbcbb61bc1f2dfd69e16dca30a8af8ba80ec182dcd/msgpack-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:685ec345eefc757a7c8af44a3032734a739f8c45d1b0ac45efc5d8977aa4720f", size = 74687 }, + { url = "https://files.pythonhosted.org/packages/b7/5e/a4c7154ba65d93be91f2f1e55f90e76c5f91ccadc7efc4341e6f04c8647f/msgpack-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d364a55082fb2a7416f6c63ae383fbd903adb5a6cf78c5b96cc6316dc1cedc7", size = 150803 }, + { url = "https://files.pythonhosted.org/packages/60/c2/687684164698f1d51c41778c838d854965dd284a4b9d3a44beba9265c931/msgpack-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79ec007767b9b56860e0372085f8504db5d06bd6a327a335449508bbee9648fa", size = 84343 }, + { url = "https://files.pythonhosted.org/packages/42/ae/d3adea9bb4a1342763556078b5765e666f8fdf242e00f3f6657380920972/msgpack-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6ad622bf7756d5a497d5b6836e7fc3752e2dd6f4c648e24b1803f6048596f701", size = 81408 }, + { url = "https://files.pythonhosted.org/packages/dc/17/6313325a6ff40ce9c3207293aee3ba50104aed6c2c1559d20d09e5c1ff54/msgpack-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e59bca908d9ca0de3dc8684f21ebf9a690fe47b6be93236eb40b99af28b6ea6", size = 396096 }, + { url = "https://files.pythonhosted.org/packages/a8/a1/ad7b84b91ab5a324e707f4c9761633e357820b011a01e34ce658c1dda7cc/msgpack-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1da8f11a3dd397f0a32c76165cf0c4eb95b31013a94f6ecc0b280c05c91b59", size = 403671 }, + { url = "https://files.pythonhosted.org/packages/bb/0b/fd5b7c0b308bbf1831df0ca04ec76fe2f5bf6319833646b0a4bd5e9dc76d/msgpack-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452aff037287acb1d70a804ffd022b21fa2bb7c46bee884dbc864cc9024128a0", size = 387414 }, + { url = "https://files.pythonhosted.org/packages/f0/03/ff8233b7c6e9929a1f5da3c7860eccd847e2523ca2de0d8ef4878d354cfa/msgpack-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8da4bf6d54ceed70e8861f833f83ce0814a2b72102e890cbdfe4b34764cdd66e", size = 383759 }, + { url = "https://files.pythonhosted.org/packages/1f/1b/eb82e1fed5a16dddd9bc75f0854b6e2fe86c0259c4353666d7fab37d39f4/msgpack-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:41c991beebf175faf352fb940bf2af9ad1fb77fd25f38d9142053914947cdbf6", size = 394405 }, + { url = "https://files.pythonhosted.org/packages/90/2e/962c6004e373d54ecf33d695fb1402f99b51832631e37c49273cc564ffc5/msgpack-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a52a1f3a5af7ba1c9ace055b659189f6c669cf3657095b50f9602af3a3ba0fe5", size = 396041 }, + { url = "https://files.pythonhosted.org/packages/f8/20/6e03342f629474414860c48aeffcc2f7f50ddaf351d95f20c3f1c67399a8/msgpack-1.1.0-cp311-cp311-win32.whl", hash = "sha256:58638690ebd0a06427c5fe1a227bb6b8b9fdc2bd07701bec13c2335c82131a88", size = 68538 }, + { url = "https://files.pythonhosted.org/packages/aa/c4/5a582fc9a87991a3e6f6800e9bb2f3c82972912235eb9539954f3e9997c7/msgpack-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd2906780f25c8ed5d7b323379f6138524ba793428db5d0e9d226d3fa6aa1788", size = 74871 }, + { url = "https://files.pythonhosted.org/packages/e1/d6/716b7ca1dbde63290d2973d22bbef1b5032ca634c3ff4384a958ec3f093a/msgpack-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d46cf9e3705ea9485687aa4001a76e44748b609d260af21c4ceea7f2212a501d", size = 152421 }, + { url = "https://files.pythonhosted.org/packages/70/da/5312b067f6773429cec2f8f08b021c06af416bba340c912c2ec778539ed6/msgpack-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5dbad74103df937e1325cc4bfeaf57713be0b4f15e1c2da43ccdd836393e2ea2", size = 85277 }, + { url = "https://files.pythonhosted.org/packages/28/51/da7f3ae4462e8bb98af0d5bdf2707f1b8c65a0d4f496e46b6afb06cbc286/msgpack-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58dfc47f8b102da61e8949708b3eafc3504509a5728f8b4ddef84bd9e16ad420", size = 82222 }, + { url = "https://files.pythonhosted.org/packages/33/af/dc95c4b2a49cff17ce47611ca9ba218198806cad7796c0b01d1e332c86bb/msgpack-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676e5be1b472909b2ee6356ff425ebedf5142427842aa06b4dfd5117d1ca8a2", size = 392971 }, + { url = "https://files.pythonhosted.org/packages/f1/54/65af8de681fa8255402c80eda2a501ba467921d5a7a028c9c22a2c2eedb5/msgpack-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17fb65dd0bec285907f68b15734a993ad3fc94332b5bb21b0435846228de1f39", size = 401403 }, + { url = "https://files.pythonhosted.org/packages/97/8c/e333690777bd33919ab7024269dc3c41c76ef5137b211d776fbb404bfead/msgpack-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a51abd48c6d8ac89e0cfd4fe177c61481aca2d5e7ba42044fd218cfd8ea9899f", size = 385356 }, + { url = "https://files.pythonhosted.org/packages/57/52/406795ba478dc1c890559dd4e89280fa86506608a28ccf3a72fbf45df9f5/msgpack-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2137773500afa5494a61b1208619e3871f75f27b03bcfca7b3a7023284140247", size = 383028 }, + { url = "https://files.pythonhosted.org/packages/e7/69/053b6549bf90a3acadcd8232eae03e2fefc87f066a5b9fbb37e2e608859f/msgpack-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:398b713459fea610861c8a7b62a6fec1882759f308ae0795b5413ff6a160cf3c", size = 391100 }, + { url = "https://files.pythonhosted.org/packages/23/f0/d4101d4da054f04274995ddc4086c2715d9b93111eb9ed49686c0f7ccc8a/msgpack-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06f5fd2f6bb2a7914922d935d3b8bb4a7fff3a9a91cfce6d06c13bc42bec975b", size = 394254 }, + { url = "https://files.pythonhosted.org/packages/1c/12/cf07458f35d0d775ff3a2dc5559fa2e1fcd06c46f1ef510e594ebefdca01/msgpack-1.1.0-cp312-cp312-win32.whl", hash = "sha256:ad33e8400e4ec17ba782f7b9cf868977d867ed784a1f5f2ab46e7ba53b6e1e1b", size = 69085 }, + { url = "https://files.pythonhosted.org/packages/73/80/2708a4641f7d553a63bc934a3eb7214806b5b39d200133ca7f7afb0a53e8/msgpack-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:115a7af8ee9e8cddc10f87636767857e7e3717b7a2e97379dc2054712693e90f", size = 75347 }, + { url = "https://files.pythonhosted.org/packages/f7/3b/544a5c5886042b80e1f4847a4757af3430f60d106d8d43bb7be72c9e9650/msgpack-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:53258eeb7a80fc46f62fd59c876957a2d0e15e6449a9e71842b6d24419d88ca1", size = 150713 }, + { url = "https://files.pythonhosted.org/packages/93/af/d63f25bcccd3d6f06fd518ba4a321f34a4370c67b579ca5c70b4a37721b4/msgpack-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e7b853bbc44fb03fbdba34feb4bd414322180135e2cb5164f20ce1c9795ee48", size = 84277 }, + { url = "https://files.pythonhosted.org/packages/92/9b/5c0dfb0009b9f96328664fecb9f8e4e9c8a1ae919e6d53986c1b813cb493/msgpack-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3e9b4936df53b970513eac1758f3882c88658a220b58dcc1e39606dccaaf01c", size = 81357 }, + { url = "https://files.pythonhosted.org/packages/d1/7c/3a9ee6ec9fc3e47681ad39b4d344ee04ff20a776b594fba92d88d8b68356/msgpack-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46c34e99110762a76e3911fc923222472c9d681f1094096ac4102c18319e6468", size = 371256 }, + { url = "https://files.pythonhosted.org/packages/f7/0a/8a213cecea7b731c540f25212ba5f9a818f358237ac51a44d448bd753690/msgpack-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a706d1e74dd3dea05cb54580d9bd8b2880e9264856ce5068027eed09680aa74", size = 377868 }, + { url = "https://files.pythonhosted.org/packages/1b/94/a82b0db0981e9586ed5af77d6cfb343da05d7437dceaae3b35d346498110/msgpack-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:534480ee5690ab3cbed89d4c8971a5c631b69a8c0883ecfea96c19118510c846", size = 363370 }, + { url = "https://files.pythonhosted.org/packages/93/fc/6c7f0dcc1c913e14861e16eaf494c07fc1dde454ec726ff8cebcf348ae53/msgpack-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8cf9e8c3a2153934a23ac160cc4cba0ec035f6867c8013cc6077a79823370346", size = 358970 }, + { url = "https://files.pythonhosted.org/packages/1f/c6/e4a04c0089deace870dabcdef5c9f12798f958e2e81d5012501edaff342f/msgpack-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3180065ec2abbe13a4ad37688b61b99d7f9e012a535b930e0e683ad6bc30155b", size = 366358 }, + { url = "https://files.pythonhosted.org/packages/b6/54/7d8317dac590cf16b3e08e3fb74d2081e5af44eb396f0effa13f17777f30/msgpack-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c5a91481a3cc573ac8c0d9aace09345d989dc4a0202b7fcb312c88c26d4e71a8", size = 370336 }, + { url = "https://files.pythonhosted.org/packages/dc/6f/a5a1f43b6566831e9630e5bc5d86034a8884386297302be128402555dde1/msgpack-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f80bc7d47f76089633763f952e67f8214cb7b3ee6bfa489b3cb6a84cfac114cd", size = 68683 }, + { url = "https://files.pythonhosted.org/packages/5f/e8/2162621e18dbc36e2bc8492fd0e97b3975f5d89fe0472ae6d5f7fbdd8cf7/msgpack-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:4d1b7ff2d6146e16e8bd665ac726a89c74163ef8cd39fa8c1087d4e52d3a2325", size = 74787 }, +] + +[[package]] +name = "multimethod" +version = "1.12" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ed/f3/930a6dc1d35b2ab65faffa2a75bbcc67f12d8227857188273783df4e5134/multimethod-1.12.tar.gz", hash = "sha256:8db8ef2a8d2a247e3570cc23317680892fdf903d84c8c1053667c8e8f7671a67", size = 17423 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/98/cff14d53a2f2f67d7fe8a4e235a383ee71aba6a1da12aeea24b325d0c72a/multimethod-1.12-py3-none-any.whl", hash = "sha256:fd0c473c43558908d97cc06e4d68e8f69202f167db46f7b4e4058893e7dbdf60", size = 10646 }, +] + +[[package]] +name = "mypy" +version = "1.14.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mypy-extensions" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b9/eb/2c92d8ea1e684440f54fa49ac5d9a5f19967b7b472a281f419e69a8d228e/mypy-1.14.1.tar.gz", hash = "sha256:7ec88144fe9b510e8475ec2f5f251992690fcf89ccb4500b214b4226abcd32d6", size = 3216051 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/7a/87ae2adb31d68402da6da1e5f30c07ea6063e9f09b5e7cfc9dfa44075e74/mypy-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb", size = 11211002 }, + { url = "https://files.pythonhosted.org/packages/e1/23/eada4c38608b444618a132be0d199b280049ded278b24cbb9d3fc59658e4/mypy-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0", size = 10358400 }, + { url = "https://files.pythonhosted.org/packages/43/c9/d6785c6f66241c62fd2992b05057f404237deaad1566545e9f144ced07f5/mypy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90716d8b2d1f4cd503309788e51366f07c56635a3309b0f6a32547eaaa36a64d", size = 12095172 }, + { url = "https://files.pythonhosted.org/packages/c3/62/daa7e787770c83c52ce2aaf1a111eae5893de9e004743f51bfcad9e487ec/mypy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae753f5c9fef278bcf12e1a564351764f2a6da579d4a81347e1d5a15819997b", size = 12828732 }, + { url = "https://files.pythonhosted.org/packages/1b/a2/5fb18318a3637f29f16f4e41340b795da14f4751ef4f51c99ff39ab62e52/mypy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e0fe0f5feaafcb04505bcf439e991c6d8f1bf8b15f12b05feeed96e9e7bf1427", size = 13012197 }, + { url = "https://files.pythonhosted.org/packages/28/99/e153ce39105d164b5f02c06c35c7ba958aaff50a2babba7d080988b03fe7/mypy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:7d54bd85b925e501c555a3227f3ec0cfc54ee8b6930bd6141ec872d1c572f81f", size = 9780836 }, + { url = "https://files.pythonhosted.org/packages/da/11/a9422850fd506edbcdc7f6090682ecceaf1f87b9dd847f9df79942da8506/mypy-1.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f995e511de847791c3b11ed90084a7a0aafdc074ab88c5a9711622fe4751138c", size = 11120432 }, + { url = "https://files.pythonhosted.org/packages/b6/9e/47e450fd39078d9c02d620545b2cb37993a8a8bdf7db3652ace2f80521ca/mypy-1.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d64169ec3b8461311f8ce2fd2eb5d33e2d0f2c7b49116259c51d0d96edee48d1", size = 10279515 }, + { url = "https://files.pythonhosted.org/packages/01/b5/6c8d33bd0f851a7692a8bfe4ee75eb82b6983a3cf39e5e32a5d2a723f0c1/mypy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba24549de7b89b6381b91fbc068d798192b1b5201987070319889e93038967a8", size = 12025791 }, + { url = "https://files.pythonhosted.org/packages/f0/4c/e10e2c46ea37cab5c471d0ddaaa9a434dc1d28650078ac1b56c2d7b9b2e4/mypy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:183cf0a45457d28ff9d758730cd0210419ac27d4d3f285beda038c9083363b1f", size = 12749203 }, + { url = "https://files.pythonhosted.org/packages/88/55/beacb0c69beab2153a0f57671ec07861d27d735a0faff135a494cd4f5020/mypy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f2a0ecc86378f45347f586e4163d1769dd81c5a223d577fe351f26b179e148b1", size = 12885900 }, + { url = "https://files.pythonhosted.org/packages/a2/75/8c93ff7f315c4d086a2dfcde02f713004357d70a163eddb6c56a6a5eff40/mypy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:ad3301ebebec9e8ee7135d8e3109ca76c23752bac1e717bc84cd3836b4bf3eae", size = 9777869 }, + { url = "https://files.pythonhosted.org/packages/43/1b/b38c079609bb4627905b74fc6a49849835acf68547ac33d8ceb707de5f52/mypy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:30ff5ef8519bbc2e18b3b54521ec319513a26f1bba19a7582e7b1f58a6e69f14", size = 11266668 }, + { url = "https://files.pythonhosted.org/packages/6b/75/2ed0d2964c1ffc9971c729f7a544e9cd34b2cdabbe2d11afd148d7838aa2/mypy-1.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb9f255c18052343c70234907e2e532bc7e55a62565d64536dbc7706a20b78b9", size = 10254060 }, + { url = "https://files.pythonhosted.org/packages/a1/5f/7b8051552d4da3c51bbe8fcafffd76a6823779101a2b198d80886cd8f08e/mypy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b4e3413e0bddea671012b063e27591b953d653209e7a4fa5e48759cda77ca11", size = 11933167 }, + { url = "https://files.pythonhosted.org/packages/04/90/f53971d3ac39d8b68bbaab9a4c6c58c8caa4d5fd3d587d16f5927eeeabe1/mypy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:553c293b1fbdebb6c3c4030589dab9fafb6dfa768995a453d8a5d3b23784af2e", size = 12864341 }, + { url = "https://files.pythonhosted.org/packages/03/d2/8bc0aeaaf2e88c977db41583559319f1821c069e943ada2701e86d0430b7/mypy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fad79bfe3b65fe6a1efaed97b445c3d37f7be9fdc348bdb2d7cac75579607c89", size = 12972991 }, + { url = "https://files.pythonhosted.org/packages/6f/17/07815114b903b49b0f2cf7499f1c130e5aa459411596668267535fe9243c/mypy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fa2220e54d2946e94ab6dbb3ba0a992795bd68b16dc852db33028df2b00191b", size = 9879016 }, + { url = "https://files.pythonhosted.org/packages/ca/1f/186d133ae2514633f8558e78cd658070ba686c0e9275c5a5c24a1e1f0d67/mypy-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3888a1816d69f7ab92092f785a462944b3ca16d7c470d564165fe703b0970c35", size = 11200493 }, + { url = "https://files.pythonhosted.org/packages/af/fc/4842485d034e38a4646cccd1369f6b1ccd7bc86989c52770d75d719a9941/mypy-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46c756a444117c43ee984bd055db99e498bc613a70bbbc120272bd13ca579fbc", size = 10357702 }, + { url = "https://files.pythonhosted.org/packages/b4/e6/457b83f2d701e23869cfec013a48a12638f75b9d37612a9ddf99072c1051/mypy-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:27fc248022907e72abfd8e22ab1f10e903915ff69961174784a3900a8cba9ad9", size = 12091104 }, + { url = "https://files.pythonhosted.org/packages/f1/bf/76a569158db678fee59f4fd30b8e7a0d75bcbaeef49edd882a0d63af6d66/mypy-1.14.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:499d6a72fb7e5de92218db961f1a66d5f11783f9ae549d214617edab5d4dbdbb", size = 12830167 }, + { url = "https://files.pythonhosted.org/packages/43/bc/0bc6b694b3103de9fed61867f1c8bd33336b913d16831431e7cb48ef1c92/mypy-1.14.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57961db9795eb566dc1d1b4e9139ebc4c6b0cb6e7254ecde69d1552bf7613f60", size = 13013834 }, + { url = "https://files.pythonhosted.org/packages/b0/79/5f5ec47849b6df1e6943d5fd8e6632fbfc04b4fd4acfa5a5a9535d11b4e2/mypy-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:07ba89fdcc9451f2ebb02853deb6aaaa3d2239a236669a63ab3801bbf923ef5c", size = 9781231 }, + { url = "https://files.pythonhosted.org/packages/a0/b5/32dd67b69a16d088e533962e5044e51004176a9952419de0370cdaead0f8/mypy-1.14.1-py3-none-any.whl", hash = "sha256:b66a60cc4073aeb8ae00057f9c1f64d49e90f918fbcef9a977eb121da8b8f1d1", size = 2752905 }, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 }, +] + +[[package]] +name = "narwhals" +version = "1.20.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f7/f0/3179615405104a90dc31a56fea27c9135f646cf4476e2904fcde125f5287/narwhals-1.20.1.tar.gz", hash = "sha256:ffc6a44c1bc651531198c5f7fc38d349dff898ecfe51c1ef96aaaf429ec4dc19", size = 224429 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3d/a2/c91fedeb24e622b30d240e89e5ecf40cb3c2a8e50f61b5b28f0eb1fbb458/narwhals-1.20.1-py3-none-any.whl", hash = "sha256:77fc10fed31534a4ecf0c5e1e091c91c454cb2fa73937f36be3fcb0c2dfdabc6", size = 262391 }, +] + +[[package]] +name = "networkx" +version = "3.2.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +sdist = { url = "https://files.pythonhosted.org/packages/c4/80/a84676339aaae2f1cfdf9f418701dd634aef9cc76f708ef55c36ff39c3ca/networkx-3.2.1.tar.gz", hash = "sha256:9f1bb5cf3409bf324e0a722c20bdb4c20ee39bf1c30ce8ae499c8502b0b5e0c6", size = 2073928 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/f0/8fbc882ca80cf077f1b246c0e3c3465f7f415439bdea6b899f6b19f61f70/networkx-3.2.1-py3-none-any.whl", hash = "sha256:f18c69adc97877c42332c170849c96cefa91881c99a7cb3e95b7c659ebdc1ec2", size = 1647772 }, +] + +[[package]] +name = "networkx" +version = "3.4.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12'", + "python_full_version == '3.11.*'", + "python_full_version == '3.10.*'", +] +sdist = { url = "https://files.pythonhosted.org/packages/fd/1d/06475e1cd5264c0b870ea2cc6fdb3e37177c1e565c43f56ff17a10e3937f/networkx-3.4.2.tar.gz", hash = "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1", size = 2151368 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b9/54/dd730b32ea14ea797530a4479b2ed46a6fb250f682a9cfb997e968bf0261/networkx-3.4.2-py3-none-any.whl", hash = "sha256:df5d4365b724cf81b8c6a7312509d0c22386097011ad1abe274afd5e9d3bbc5f", size = 1723263 }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314 }, +] + +[[package]] +name = "numba" +version = "0.60.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "llvmlite" }, + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3c/93/2849300a9184775ba274aba6f82f303343669b0592b7bb0849ea713dabb0/numba-0.60.0.tar.gz", hash = "sha256:5df6158e5584eece5fc83294b949fd30b9f1125df7708862205217e068aabf16", size = 2702171 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/cf/baa13a7e3556d73d9e38021e6d6aa4aeb30d8b94545aa8b70d0f24a1ccc4/numba-0.60.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5d761de835cd38fb400d2c26bb103a2726f548dc30368853121d66201672e651", size = 2647627 }, + { url = "https://files.pythonhosted.org/packages/ac/ba/4b57fa498564457c3cc9fc9e570a6b08e6086c74220f24baaf04e54b995f/numba-0.60.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:159e618ef213fba758837f9837fb402bbe65326e60ba0633dbe6c7f274d42c1b", size = 2650322 }, + { url = "https://files.pythonhosted.org/packages/28/98/7ea97ee75870a54f938a8c70f7e0be4495ba5349c5f9db09d467c4a5d5b7/numba-0.60.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1527dc578b95c7c4ff248792ec33d097ba6bef9eda466c948b68dfc995c25781", size = 3407390 }, + { url = "https://files.pythonhosted.org/packages/79/58/cb4ac5b8f7ec64200460aef1fed88258fb872ceef504ab1f989d2ff0f684/numba-0.60.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe0b28abb8d70f8160798f4de9d486143200f34458d34c4a214114e445d7124e", size = 3699694 }, + { url = "https://files.pythonhosted.org/packages/1c/b0/c61a93ca947d12233ff45de506ddbf52af3f752066a0b8be4d27426e16da/numba-0.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:19407ced081d7e2e4b8d8c36aa57b7452e0283871c296e12d798852bc7d7f198", size = 2687030 }, + { url = "https://files.pythonhosted.org/packages/98/ad/df18d492a8f00d29a30db307904b9b296e37507034eedb523876f3a2e13e/numba-0.60.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a17b70fc9e380ee29c42717e8cc0bfaa5556c416d94f9aa96ba13acb41bdece8", size = 2647254 }, + { url = "https://files.pythonhosted.org/packages/9a/51/a4dc2c01ce7a850b8e56ff6d5381d047a5daea83d12bad08aa071d34b2ee/numba-0.60.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3fb02b344a2a80efa6f677aa5c40cd5dd452e1b35f8d1c2af0dfd9ada9978e4b", size = 2649970 }, + { url = "https://files.pythonhosted.org/packages/f9/4c/8889ac94c0b33dca80bed11564b8c6d9ea14d7f094e674c58e5c5b05859b/numba-0.60.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5f4fde652ea604ea3c86508a3fb31556a6157b2c76c8b51b1d45eb40c8598703", size = 3412492 }, + { url = "https://files.pythonhosted.org/packages/57/03/2b4245b05b71c0cee667e6a0b51606dfa7f4157c9093d71c6b208385a611/numba-0.60.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4142d7ac0210cc86432b818338a2bc368dc773a2f5cf1e32ff7c5b378bd63ee8", size = 3705018 }, + { url = "https://files.pythonhosted.org/packages/79/89/2d924ca60dbf949f18a6fec223a2445f5f428d9a5f97a6b29c2122319015/numba-0.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:cac02c041e9b5bc8cf8f2034ff6f0dbafccd1ae9590dc146b3a02a45e53af4e2", size = 2686920 }, + { url = "https://files.pythonhosted.org/packages/eb/5c/b5ec752c475e78a6c3676b67c514220dbde2725896bbb0b6ec6ea54b2738/numba-0.60.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7da4098db31182fc5ffe4bc42c6f24cd7d1cb8a14b59fd755bfee32e34b8404", size = 2647866 }, + { url = "https://files.pythonhosted.org/packages/65/42/39559664b2e7c15689a638c2a38b3b74c6e69a04e2b3019b9f7742479188/numba-0.60.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:38d6ea4c1f56417076ecf8fc327c831ae793282e0ff51080c5094cb726507b1c", size = 2650208 }, + { url = "https://files.pythonhosted.org/packages/67/88/c4459ccc05674ef02119abf2888ccd3e2fed12a323f52255f4982fc95876/numba-0.60.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:62908d29fb6a3229c242e981ca27e32a6e606cc253fc9e8faeb0e48760de241e", size = 3466946 }, + { url = "https://files.pythonhosted.org/packages/8b/41/ac11cf33524def12aa5bd698226ae196a1185831c05ed29dc0c56eaa308b/numba-0.60.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0ebaa91538e996f708f1ab30ef4d3ddc344b64b5227b67a57aa74f401bb68b9d", size = 3761463 }, + { url = "https://files.pythonhosted.org/packages/ca/bd/0fe29fcd1b6a8de479a4ed25c6e56470e467e3611c079d55869ceef2b6d1/numba-0.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:f75262e8fe7fa96db1dca93d53a194a38c46da28b112b8a4aca168f0df860347", size = 2707588 }, + { url = "https://files.pythonhosted.org/packages/68/1a/87c53f836cdf557083248c3f47212271f220280ff766538795e77c8c6bbf/numba-0.60.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:01ef4cd7d83abe087d644eaa3d95831b777aa21d441a23703d649e06b8e06b74", size = 2647186 }, + { url = "https://files.pythonhosted.org/packages/28/14/a5baa1f2edea7b49afa4dc1bb1b126645198cf1075186853b5b497be826e/numba-0.60.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:819a3dfd4630d95fd574036f99e47212a1af41cbcb019bf8afac63ff56834449", size = 2650038 }, + { url = "https://files.pythonhosted.org/packages/3b/bd/f1985719ff34e37e07bb18f9d3acd17e5a21da255f550c8eae031e2ddf5f/numba-0.60.0-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0b983bd6ad82fe868493012487f34eae8bf7dd94654951404114f23c3466d34b", size = 3403010 }, + { url = "https://files.pythonhosted.org/packages/54/9b/cd73d3f6617ddc8398a63ef97d8dc9139a9879b9ca8a7ca4b8789056ea46/numba-0.60.0-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c151748cd269ddeab66334bd754817ffc0cabd9433acb0f551697e5151917d25", size = 3695086 }, + { url = "https://files.pythonhosted.org/packages/01/01/8b7b670c77c5ea0e47e283d82332969bf672ab6410d0b2610cac5b7a3ded/numba-0.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:3031547a015710140e8c87226b4cfe927cac199835e5bf7d4fe5cb64e814e3ab", size = 2686978 }, +] + +[[package]] +name = "numpy" +version = "2.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a9/75/10dd1f8116a8b796cb2c737b674e02d02e80454bda953fa7e65d8c12b016/numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78", size = 18902015 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/21/91/3495b3237510f79f5d81f2508f9f13fea78ebfdf07538fc7444badda173d/numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece", size = 21165245 }, + { url = "https://files.pythonhosted.org/packages/05/33/26178c7d437a87082d11019292dce6d3fe6f0e9026b7b2309cbf3e489b1d/numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04", size = 13738540 }, + { url = "https://files.pythonhosted.org/packages/ec/31/cc46e13bf07644efc7a4bf68df2df5fb2a1a88d0cd0da9ddc84dc0033e51/numpy-2.0.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66", size = 5300623 }, + { url = "https://files.pythonhosted.org/packages/6e/16/7bfcebf27bb4f9d7ec67332ffebee4d1bf085c84246552d52dbb548600e7/numpy-2.0.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b", size = 6901774 }, + { url = "https://files.pythonhosted.org/packages/f9/a3/561c531c0e8bf082c5bef509d00d56f82e0ea7e1e3e3a7fc8fa78742a6e5/numpy-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd", size = 13907081 }, + { url = "https://files.pythonhosted.org/packages/fa/66/f7177ab331876200ac7563a580140643d1179c8b4b6a6b0fc9838de2a9b8/numpy-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318", size = 19523451 }, + { url = "https://files.pythonhosted.org/packages/25/7f/0b209498009ad6453e4efc2c65bcdf0ae08a182b2b7877d7ab38a92dc542/numpy-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8", size = 19927572 }, + { url = "https://files.pythonhosted.org/packages/3e/df/2619393b1e1b565cd2d4c4403bdd979621e2c4dea1f8532754b2598ed63b/numpy-2.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326", size = 14400722 }, + { url = "https://files.pythonhosted.org/packages/22/ad/77e921b9f256d5da36424ffb711ae79ca3f451ff8489eeca544d0701d74a/numpy-2.0.2-cp310-cp310-win32.whl", hash = "sha256:984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97", size = 6472170 }, + { url = "https://files.pythonhosted.org/packages/10/05/3442317535028bc29cf0c0dd4c191a4481e8376e9f0db6bcf29703cadae6/numpy-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131", size = 15905558 }, + { url = "https://files.pythonhosted.org/packages/8b/cf/034500fb83041aa0286e0fb16e7c76e5c8b67c0711bb6e9e9737a717d5fe/numpy-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448", size = 21169137 }, + { url = "https://files.pythonhosted.org/packages/4a/d9/32de45561811a4b87fbdee23b5797394e3d1504b4a7cf40c10199848893e/numpy-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195", size = 13703552 }, + { url = "https://files.pythonhosted.org/packages/c1/ca/2f384720020c7b244d22508cb7ab23d95f179fcfff33c31a6eeba8d6c512/numpy-2.0.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57", size = 5298957 }, + { url = "https://files.pythonhosted.org/packages/0e/78/a3e4f9fb6aa4e6fdca0c5428e8ba039408514388cf62d89651aade838269/numpy-2.0.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a", size = 6905573 }, + { url = "https://files.pythonhosted.org/packages/a0/72/cfc3a1beb2caf4efc9d0b38a15fe34025230da27e1c08cc2eb9bfb1c7231/numpy-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669", size = 13914330 }, + { url = "https://files.pythonhosted.org/packages/ba/a8/c17acf65a931ce551fee11b72e8de63bf7e8a6f0e21add4c937c83563538/numpy-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951", size = 19534895 }, + { url = "https://files.pythonhosted.org/packages/ba/86/8767f3d54f6ae0165749f84648da9dcc8cd78ab65d415494962c86fac80f/numpy-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9", size = 19937253 }, + { url = "https://files.pythonhosted.org/packages/df/87/f76450e6e1c14e5bb1eae6836478b1028e096fd02e85c1c37674606ab752/numpy-2.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15", size = 14414074 }, + { url = "https://files.pythonhosted.org/packages/5c/ca/0f0f328e1e59f73754f06e1adfb909de43726d4f24c6a3f8805f34f2b0fa/numpy-2.0.2-cp311-cp311-win32.whl", hash = "sha256:a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4", size = 6470640 }, + { url = "https://files.pythonhosted.org/packages/eb/57/3a3f14d3a759dcf9bf6e9eda905794726b758819df4663f217d658a58695/numpy-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc", size = 15910230 }, + { url = "https://files.pythonhosted.org/packages/45/40/2e117be60ec50d98fa08c2f8c48e09b3edea93cfcabd5a9ff6925d54b1c2/numpy-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:df55d490dea7934f330006d0f81e8551ba6010a5bf035a249ef61a94f21c500b", size = 20895803 }, + { url = "https://files.pythonhosted.org/packages/46/92/1b8b8dee833f53cef3e0a3f69b2374467789e0bb7399689582314df02651/numpy-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8df823f570d9adf0978347d1f926b2a867d5608f434a7cff7f7908c6570dcf5e", size = 13471835 }, + { url = "https://files.pythonhosted.org/packages/7f/19/e2793bde475f1edaea6945be141aef6c8b4c669b90c90a300a8954d08f0a/numpy-2.0.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9a92ae5c14811e390f3767053ff54eaee3bf84576d99a2456391401323f4ec2c", size = 5038499 }, + { url = "https://files.pythonhosted.org/packages/e3/ff/ddf6dac2ff0dd50a7327bcdba45cb0264d0e96bb44d33324853f781a8f3c/numpy-2.0.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a842d573724391493a97a62ebbb8e731f8a5dcc5d285dfc99141ca15a3302d0c", size = 6633497 }, + { url = "https://files.pythonhosted.org/packages/72/21/67f36eac8e2d2cd652a2e69595a54128297cdcb1ff3931cfc87838874bd4/numpy-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05e238064fc0610c840d1cf6a13bf63d7e391717d247f1bf0318172e759e692", size = 13621158 }, + { url = "https://files.pythonhosted.org/packages/39/68/e9f1126d757653496dbc096cb429014347a36b228f5a991dae2c6b6cfd40/numpy-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a", size = 19236173 }, + { url = "https://files.pythonhosted.org/packages/d1/e9/1f5333281e4ebf483ba1c888b1d61ba7e78d7e910fdd8e6499667041cc35/numpy-2.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:96a55f64139912d61de9137f11bf39a55ec8faec288c75a54f93dfd39f7eb40c", size = 19634174 }, + { url = "https://files.pythonhosted.org/packages/71/af/a469674070c8d8408384e3012e064299f7a2de540738a8e414dcfd639996/numpy-2.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec9852fb39354b5a45a80bdab5ac02dd02b15f44b3804e9f00c556bf24b4bded", size = 14099701 }, + { url = "https://files.pythonhosted.org/packages/d0/3d/08ea9f239d0e0e939b6ca52ad403c84a2bce1bde301a8eb4888c1c1543f1/numpy-2.0.2-cp312-cp312-win32.whl", hash = "sha256:671bec6496f83202ed2d3c8fdc486a8fc86942f2e69ff0e986140339a63bcbe5", size = 6174313 }, + { url = "https://files.pythonhosted.org/packages/b2/b5/4ac39baebf1fdb2e72585c8352c56d063b6126be9fc95bd2bb5ef5770c20/numpy-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:cfd41e13fdc257aa5778496b8caa5e856dc4896d4ccf01841daee1d96465467a", size = 15606179 }, + { url = "https://files.pythonhosted.org/packages/43/c1/41c8f6df3162b0c6ffd4437d729115704bd43363de0090c7f913cfbc2d89/numpy-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c", size = 21169942 }, + { url = "https://files.pythonhosted.org/packages/39/bc/fd298f308dcd232b56a4031fd6ddf11c43f9917fbc937e53762f7b5a3bb1/numpy-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd", size = 13711512 }, + { url = "https://files.pythonhosted.org/packages/96/ff/06d1aa3eeb1c614eda245c1ba4fb88c483bee6520d361641331872ac4b82/numpy-2.0.2-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b", size = 5306976 }, + { url = "https://files.pythonhosted.org/packages/2d/98/121996dcfb10a6087a05e54453e28e58694a7db62c5a5a29cee14c6e047b/numpy-2.0.2-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729", size = 6906494 }, + { url = "https://files.pythonhosted.org/packages/15/31/9dffc70da6b9bbf7968f6551967fc21156207366272c2a40b4ed6008dc9b/numpy-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1", size = 13912596 }, + { url = "https://files.pythonhosted.org/packages/b9/14/78635daab4b07c0930c919d451b8bf8c164774e6a3413aed04a6d95758ce/numpy-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd", size = 19526099 }, + { url = "https://files.pythonhosted.org/packages/26/4c/0eeca4614003077f68bfe7aac8b7496f04221865b3a5e7cb230c9d055afd/numpy-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d", size = 19932823 }, + { url = "https://files.pythonhosted.org/packages/f1/46/ea25b98b13dccaebddf1a803f8c748680d972e00507cd9bc6dcdb5aa2ac1/numpy-2.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d", size = 14404424 }, + { url = "https://files.pythonhosted.org/packages/c8/a6/177dd88d95ecf07e722d21008b1b40e681a929eb9e329684d449c36586b2/numpy-2.0.2-cp39-cp39-win32.whl", hash = "sha256:905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa", size = 6476809 }, + { url = "https://files.pythonhosted.org/packages/ea/2b/7fc9f4e7ae5b507c1a3a21f0f15ed03e794c1242ea8a242ac158beb56034/numpy-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73", size = 15911314 }, + { url = "https://files.pythonhosted.org/packages/8f/3b/df5a870ac6a3be3a86856ce195ef42eec7ae50d2a202be1f5a4b3b340e14/numpy-2.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8", size = 21025288 }, + { url = "https://files.pythonhosted.org/packages/2c/97/51af92f18d6f6f2d9ad8b482a99fb74e142d71372da5d834b3a2747a446e/numpy-2.0.2-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4", size = 6762793 }, + { url = "https://files.pythonhosted.org/packages/12/46/de1fbd0c1b5ccaa7f9a005b66761533e2f6a3e560096682683a223631fe9/numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c", size = 19334885 }, + { url = "https://files.pythonhosted.org/packages/cc/dc/d330a6faefd92b446ec0f0dfea4c3207bb1fef3c4771d19cf4543efd2c78/numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385", size = 15828784 }, +] + +[[package]] +name = "openai" +version = "1.58.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "distro" }, + { name = "httpx" }, + { name = "jiter" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "tqdm" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/27/3c/b1ecce430ed56fa3ac1b0676966d3250aab9c70a408232b71e419ea62148/openai-1.58.1.tar.gz", hash = "sha256:f5a035fd01e141fc743f4b0e02c41ca49be8fab0866d3b67f5f29b4f4d3c0973", size = 343411 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8e/5a/d22cd07f1a99b9e8b3c92ee0c1959188db4318828a3d88c9daac120bdd69/openai-1.58.1-py3-none-any.whl", hash = "sha256:e2910b1170a6b7f88ef491ac3a42c387f08bd3db533411f7ee391d166571d63c", size = 454279 }, +] + +[[package]] +name = "ordered-set" +version = "4.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4c/ca/bfac8bc689799bcca4157e0e0ced07e70ce125193fc2e166d2e685b7e2fe/ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8", size = 12826 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/55/af02708f230eb77084a299d7b08175cff006dea4f2721074b92cdb0296c0/ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562", size = 7634 }, +] + +[[package]] +name = "packaging" +version = "24.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 }, +] + +[[package]] +name = "pandas" +version = "2.2.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "python-dateutil" }, + { name = "pytz" }, + { name = "tzdata" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9c/d6/9f8431bacc2e19dca897724cd097b1bb224a6ad5433784a44b587c7c13af/pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667", size = 4399213 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/aa/70/c853aec59839bceed032d52010ff5f1b8d87dc3114b762e4ba2727661a3b/pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5", size = 12580827 }, + { url = "https://files.pythonhosted.org/packages/99/f2/c4527768739ffa4469b2b4fff05aa3768a478aed89a2f271a79a40eee984/pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348", size = 11303897 }, + { url = "https://files.pythonhosted.org/packages/ed/12/86c1747ea27989d7a4064f806ce2bae2c6d575b950be087837bdfcabacc9/pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed", size = 66480908 }, + { url = "https://files.pythonhosted.org/packages/44/50/7db2cd5e6373ae796f0ddad3675268c8d59fb6076e66f0c339d61cea886b/pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57", size = 13064210 }, + { url = "https://files.pythonhosted.org/packages/61/61/a89015a6d5536cb0d6c3ba02cebed51a95538cf83472975275e28ebf7d0c/pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42", size = 16754292 }, + { url = "https://files.pythonhosted.org/packages/ce/0d/4cc7b69ce37fac07645a94e1d4b0880b15999494372c1523508511b09e40/pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f", size = 14416379 }, + { url = "https://files.pythonhosted.org/packages/31/9e/6ebb433de864a6cd45716af52a4d7a8c3c9aaf3a98368e61db9e69e69a9c/pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645", size = 11598471 }, + { url = "https://files.pythonhosted.org/packages/a8/44/d9502bf0ed197ba9bf1103c9867d5904ddcaf869e52329787fc54ed70cc8/pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039", size = 12602222 }, + { url = "https://files.pythonhosted.org/packages/52/11/9eac327a38834f162b8250aab32a6781339c69afe7574368fffe46387edf/pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd", size = 11321274 }, + { url = "https://files.pythonhosted.org/packages/45/fb/c4beeb084718598ba19aa9f5abbc8aed8b42f90930da861fcb1acdb54c3a/pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698", size = 15579836 }, + { url = "https://files.pythonhosted.org/packages/cd/5f/4dba1d39bb9c38d574a9a22548c540177f78ea47b32f99c0ff2ec499fac5/pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc", size = 13058505 }, + { url = "https://files.pythonhosted.org/packages/b9/57/708135b90391995361636634df1f1130d03ba456e95bcf576fada459115a/pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3", size = 16744420 }, + { url = "https://files.pythonhosted.org/packages/86/4a/03ed6b7ee323cf30404265c284cee9c65c56a212e0a08d9ee06984ba2240/pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32", size = 14440457 }, + { url = "https://files.pythonhosted.org/packages/ed/8c/87ddf1fcb55d11f9f847e3c69bb1c6f8e46e2f40ab1a2d2abadb2401b007/pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5", size = 11617166 }, + { url = "https://files.pythonhosted.org/packages/17/a3/fb2734118db0af37ea7433f57f722c0a56687e14b14690edff0cdb4b7e58/pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9", size = 12529893 }, + { url = "https://files.pythonhosted.org/packages/e1/0c/ad295fd74bfac85358fd579e271cded3ac969de81f62dd0142c426b9da91/pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4", size = 11363475 }, + { url = "https://files.pythonhosted.org/packages/c6/2a/4bba3f03f7d07207481fed47f5b35f556c7441acddc368ec43d6643c5777/pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3", size = 15188645 }, + { url = "https://files.pythonhosted.org/packages/38/f8/d8fddee9ed0d0c0f4a2132c1dfcf0e3e53265055da8df952a53e7eaf178c/pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319", size = 12739445 }, + { url = "https://files.pythonhosted.org/packages/20/e8/45a05d9c39d2cea61ab175dbe6a2de1d05b679e8de2011da4ee190d7e748/pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8", size = 16359235 }, + { url = "https://files.pythonhosted.org/packages/1d/99/617d07a6a5e429ff90c90da64d428516605a1ec7d7bea494235e1c3882de/pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a", size = 14056756 }, + { url = "https://files.pythonhosted.org/packages/29/d4/1244ab8edf173a10fd601f7e13b9566c1b525c4f365d6bee918e68381889/pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13", size = 11504248 }, + { url = "https://files.pythonhosted.org/packages/ca/8c/8848a4c9b8fdf5a534fe2077af948bf53cd713d77ffbcd7bd15710348fd7/pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39", size = 12595535 }, + { url = "https://files.pythonhosted.org/packages/9c/b9/5cead4f63b6d31bdefeb21a679bc5a7f4aaf262ca7e07e2bc1c341b68470/pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30", size = 11319822 }, + { url = "https://files.pythonhosted.org/packages/31/af/89e35619fb573366fa68dc26dad6ad2c08c17b8004aad6d98f1a31ce4bb3/pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c", size = 15625439 }, + { url = "https://files.pythonhosted.org/packages/3d/dd/bed19c2974296661493d7acc4407b1d2db4e2a482197df100f8f965b6225/pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c", size = 13068928 }, + { url = "https://files.pythonhosted.org/packages/31/a3/18508e10a31ea108d746c848b5a05c0711e0278fa0d6f1c52a8ec52b80a5/pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea", size = 16783266 }, + { url = "https://files.pythonhosted.org/packages/c4/a5/3429bd13d82bebc78f4d78c3945efedef63a7cd0c15c17b2eeb838d1121f/pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761", size = 14450871 }, + { url = "https://files.pythonhosted.org/packages/2f/49/5c30646e96c684570925b772eac4eb0a8cb0ca590fa978f56c5d3ae73ea1/pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e", size = 11618011 }, +] + +[[package]] +name = "parsedatetime" +version = "2.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a8/20/cb587f6672dbe585d101f590c3871d16e7aec5a576a1694997a3777312ac/parsedatetime-2.6.tar.gz", hash = "sha256:4cb368fbb18a0b7231f4d76119165451c8d2e35951455dfee97c62a87b04d455", size = 60114 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9d/a4/3dd804926a42537bf69fb3ebb9fd72a50ba84f807d95df5ae016606c976c/parsedatetime-2.6-py3-none-any.whl", hash = "sha256:cb96edd7016872f58479e35879294258c71437195760746faffedb692aef000b", size = 42548 }, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191 }, +] + +[[package]] +name = "patsy" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/81/74f6a65b848ffd16c18f920620ce999fe45fe27f01ab3911260ce4ed85e4/patsy-1.0.1.tar.gz", hash = "sha256:e786a9391eec818c054e359b737bbce692f051aee4c661f4141cc88fb459c0c4", size = 396010 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/2b/b50d3d08ea0fc419c183a84210571eba005328efa62b6b98bc28e9ead32a/patsy-1.0.1-py2.py3-none-any.whl", hash = "sha256:751fb38f9e97e62312e921a1954b81e1bb2bcda4f5eeabaf94db251ee791509c", size = 232923 }, +] + +[[package]] +name = "phik" +version = "0.12.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "joblib" }, + { name = "matplotlib" }, + { name = "numpy" }, + { name = "pandas" }, + { name = "scipy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cb/8c/1bf452e93dd23523f18660ffdc24092038a5781964085d766bd792e0495b/phik-0.12.4.tar.gz", hash = "sha256:d4d53274685e56fb08088505b4eec70be07f2f8044e7961ca02b399e42c37025", size = 621326 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8e/d3/6cbfcd06de7aa26df397bcc404991e68b03112bb14a403f14d711a165cf8/phik-0.12.4-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:778d00e33762c1e85681f65ef011933faabdc80ab53262f221cccf75eea535d5", size = 659209 }, + { url = "https://files.pythonhosted.org/packages/40/9f/f7cb4513c8a31c7c887b838e2ee4c2ab968983cf905c69e79ada1971b1f7/phik-0.12.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d92cc961ee60b317896589bab087901440b2bc749dbd5e266bc3dfe25dbff19a", size = 655899 }, + { url = "https://files.pythonhosted.org/packages/ee/3e/d9bea3b996c60858b396cdb8ccc2ab35d6562db8fc3a7f1fb6e3a2d7b13d/phik-0.12.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f48d0dd94323401ed069bbaa673a879f3f002e5ef6fabda19eb3d0a5f8e3947f", size = 686110 }, + { url = "https://files.pythonhosted.org/packages/49/e6/b78302b9efb85ef2df2ac52073d1be1f6bebdf2cdfe572ed8646f6257109/phik-0.12.4-cp310-cp310-win_amd64.whl", hash = "sha256:ea5030640fda8380d7db9ea28fbde37a1565c0b1699bcb7152d6772a6ad278af", size = 666210 }, + { url = "https://files.pythonhosted.org/packages/c1/c0/1c18b27b8fd73c72a7a9b4d883afc61c6a3ecf347fe2ec34fece300d89b1/phik-0.12.4-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:2b2f518310c6f3144a5e3d1bc3489c8be17ebe4da6b8520f4e01fa3e544b0fed", size = 660622 }, + { url = "https://files.pythonhosted.org/packages/b9/69/a42d9320b68b0d43d78db46787669b2287cba5e8db2e83fe0d63c1fd6945/phik-0.12.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f7a6614184eac1b55100c4a7c9899f370ae97599b41b2982f59f7e1da9511cd", size = 657274 }, + { url = "https://files.pythonhosted.org/packages/3d/66/799d5cebe086c9f1f02cbdf8a72128aaf4c5b413f3a618a023f91a5e6cfc/phik-0.12.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ea158b31d51e34153241bd3cac24c9a9a463af575c063abb8ca8d30352b4b12", size = 687781 }, + { url = "https://files.pythonhosted.org/packages/95/e3/250ce99ed761389028543fcb876176ba619f6bf387d6cce3bab736bd92cc/phik-0.12.4-cp311-cp311-win_amd64.whl", hash = "sha256:f315699c695e5646b29911b577d584ae76d0fcc1dee539634e512518fcd4108d", size = 667137 }, + { url = "https://files.pythonhosted.org/packages/a6/72/a2cbf1b19becffb4f675de4be9a939b904a014844de15cf1fb18f80db1f2/phik-0.12.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:951b06ed32fa0fe6ee73f98407e4d435f90a1750ecb0f250df46eb75741a33bf", size = 659042 }, + { url = "https://files.pythonhosted.org/packages/f8/c2/c89371bb805f9801929ceb70730604fc35a4f4a26f0a4a55f85af7075d12/phik-0.12.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b6ba2fa65c4b2a3c36aded0f47333c3069c0520bb426c3f937656a58a5041957", size = 655652 }, + { url = "https://files.pythonhosted.org/packages/9c/50/bb6a9f9cff78f88c10608bd0b7dd77b335c2388746132c3b2abb910f2021/phik-0.12.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3868a8f9277ab338eacb634bb06dd83278344dc19154f77e06c9cb8712959404", size = 687247 }, + { url = "https://files.pythonhosted.org/packages/eb/12/720f5c721df98c718b22bc15da93efb32b1ed5371da75874827a3bb8af7d/phik-0.12.4-cp312-cp312-win_amd64.whl", hash = "sha256:247ea90b2d067bb360e798e5645dbcea7753b3bf78436287d92247285c4aa58a", size = 666426 }, + { url = "https://files.pythonhosted.org/packages/76/7c/8cf317aca8477994318ddfcffd222e8be01b5bd9065aecf6a191da848518/phik-0.12.4-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:c2c7482e8ca1e9f688eacd69baccf838fc535b9d3c13523b2d3b53b4aff04c5d", size = 659307 }, + { url = "https://files.pythonhosted.org/packages/36/f4/81b2272c66bf5f4390650d2efbf1acfa4f1a3f261a9f9e58aaf2fbb73431/phik-0.12.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7eb9c0a22d01007a4c51d48489c4f3ebe738461e092061c90da7c1ccf8d51e60", size = 655988 }, + { url = "https://files.pythonhosted.org/packages/3c/c6/73d4815925b5e01843f6148bf84e49fd7dd52977cf9948175baf0c21bb28/phik-0.12.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1dd26c71de023852aa452897e41a55176d6d87c268323d0814514cd32a9fadc1", size = 686314 }, + { url = "https://files.pythonhosted.org/packages/53/e2/d1f5e9ce10f2cb9a8b3866f17cf7b47db95f086987565ecda6df542fe30b/phik-0.12.4-cp39-cp39-win_amd64.whl", hash = "sha256:c15e987d90d34990fee0ef157fb00c9c69befdf520689ac5f320ff0ab74fa399", size = 666288 }, +] + +[[package]] +name = "pillow" +version = "11.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/26/0d95c04c868f6bdb0c447e3ee2de5564411845e36a858cfd63766bc7b563/pillow-11.0.0.tar.gz", hash = "sha256:72bacbaf24ac003fea9bff9837d1eedb6088758d41e100c1552930151f677739", size = 46737780 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/fb/a6ce6836bd7fd93fbf9144bf54789e02babc27403b50a9e1583ee877d6da/pillow-11.0.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:6619654954dc4936fcff82db8eb6401d3159ec6be81e33c6000dfd76ae189947", size = 3154708 }, + { url = "https://files.pythonhosted.org/packages/6a/1d/1f51e6e912d8ff316bb3935a8cda617c801783e0b998bf7a894e91d3bd4c/pillow-11.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b3c5ac4bed7519088103d9450a1107f76308ecf91d6dabc8a33a2fcfb18d0fba", size = 2979223 }, + { url = "https://files.pythonhosted.org/packages/90/83/e2077b0192ca8a9ef794dbb74700c7e48384706467067976c2a95a0f40a1/pillow-11.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a65149d8ada1055029fcb665452b2814fe7d7082fcb0c5bed6db851cb69b2086", size = 4183167 }, + { url = "https://files.pythonhosted.org/packages/0e/74/467af0146970a98349cdf39e9b79a6cc8a2e7558f2c01c28a7b6b85c5bda/pillow-11.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88a58d8ac0cc0e7f3a014509f0455248a76629ca9b604eca7dc5927cc593c5e9", size = 4283912 }, + { url = "https://files.pythonhosted.org/packages/85/b1/d95d4f7ca3a6c1ae120959605875a31a3c209c4e50f0029dc1a87566cf46/pillow-11.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c26845094b1af3c91852745ae78e3ea47abf3dbcd1cf962f16b9a5fbe3ee8488", size = 4195815 }, + { url = "https://files.pythonhosted.org/packages/41/c3/94f33af0762ed76b5a237c5797e088aa57f2b7fa8ee7932d399087be66a8/pillow-11.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:1a61b54f87ab5786b8479f81c4b11f4d61702830354520837f8cc791ebba0f5f", size = 4366117 }, + { url = "https://files.pythonhosted.org/packages/ba/3c/443e7ef01f597497268899e1cca95c0de947c9bbf77a8f18b3c126681e5d/pillow-11.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:674629ff60030d144b7bca2b8330225a9b11c482ed408813924619c6f302fdbb", size = 4278607 }, + { url = "https://files.pythonhosted.org/packages/26/95/1495304448b0081e60c0c5d63f928ef48bb290acee7385804426fa395a21/pillow-11.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:598b4e238f13276e0008299bd2482003f48158e2b11826862b1eb2ad7c768b97", size = 4410685 }, + { url = "https://files.pythonhosted.org/packages/45/da/861e1df971ef0de9870720cb309ca4d553b26a9483ec9be3a7bf1de4a095/pillow-11.0.0-cp310-cp310-win32.whl", hash = "sha256:9a0f748eaa434a41fccf8e1ee7a3eed68af1b690e75328fd7a60af123c193b50", size = 2249185 }, + { url = "https://files.pythonhosted.org/packages/d5/4e/78f7c5202ea2a772a5ab05069c1b82503e6353cd79c7e474d4945f4b82c3/pillow-11.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:a5629742881bcbc1f42e840af185fd4d83a5edeb96475a575f4da50d6ede337c", size = 2566726 }, + { url = "https://files.pythonhosted.org/packages/77/e4/6e84eada35cbcc646fc1870f72ccfd4afacb0fae0c37ffbffe7f5dc24bf1/pillow-11.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:ee217c198f2e41f184f3869f3e485557296d505b5195c513b2bfe0062dc537f1", size = 2254585 }, + { url = "https://files.pythonhosted.org/packages/f0/eb/f7e21b113dd48a9c97d364e0915b3988c6a0b6207652f5a92372871b7aa4/pillow-11.0.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1c1d72714f429a521d8d2d018badc42414c3077eb187a59579f28e4270b4b0fc", size = 3154705 }, + { url = "https://files.pythonhosted.org/packages/25/b3/2b54a1d541accebe6bd8b1358b34ceb2c509f51cb7dcda8687362490da5b/pillow-11.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:499c3a1b0d6fc8213519e193796eb1a86a1be4b1877d678b30f83fd979811d1a", size = 2979222 }, + { url = "https://files.pythonhosted.org/packages/20/12/1a41eddad8265c5c19dda8fb6c269ce15ee25e0b9f8f26286e6202df6693/pillow-11.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8b2351c85d855293a299038e1f89db92a2f35e8d2f783489c6f0b2b5f3fe8a3", size = 4190220 }, + { url = "https://files.pythonhosted.org/packages/a9/9b/8a8c4d07d77447b7457164b861d18f5a31ae6418ef5c07f6f878fa09039a/pillow-11.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f4dba50cfa56f910241eb7f883c20f1e7b1d8f7d91c750cd0b318bad443f4d5", size = 4291399 }, + { url = "https://files.pythonhosted.org/packages/fc/e4/130c5fab4a54d3991129800dd2801feeb4b118d7630148cd67f0e6269d4c/pillow-11.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:5ddbfd761ee00c12ee1be86c9c0683ecf5bb14c9772ddbd782085779a63dd55b", size = 4202709 }, + { url = "https://files.pythonhosted.org/packages/39/63/b3fc299528d7df1f678b0666002b37affe6b8751225c3d9c12cf530e73ed/pillow-11.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:45c566eb10b8967d71bf1ab8e4a525e5a93519e29ea071459ce517f6b903d7fa", size = 4372556 }, + { url = "https://files.pythonhosted.org/packages/c6/a6/694122c55b855b586c26c694937d36bb8d3b09c735ff41b2f315c6e66a10/pillow-11.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b4fd7bd29610a83a8c9b564d457cf5bd92b4e11e79a4ee4716a63c959699b306", size = 4287187 }, + { url = "https://files.pythonhosted.org/packages/ba/a9/f9d763e2671a8acd53d29b1e284ca298bc10a595527f6be30233cdb9659d/pillow-11.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cb929ca942d0ec4fac404cbf520ee6cac37bf35be479b970c4ffadf2b6a1cad9", size = 4418468 }, + { url = "https://files.pythonhosted.org/packages/6e/0e/b5cbad2621377f11313a94aeb44ca55a9639adabcaaa073597a1925f8c26/pillow-11.0.0-cp311-cp311-win32.whl", hash = "sha256:006bcdd307cc47ba43e924099a038cbf9591062e6c50e570819743f5607404f5", size = 2249249 }, + { url = "https://files.pythonhosted.org/packages/dc/83/1470c220a4ff06cd75fc609068f6605e567ea51df70557555c2ab6516b2c/pillow-11.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:52a2d8323a465f84faaba5236567d212c3668f2ab53e1c74c15583cf507a0291", size = 2566769 }, + { url = "https://files.pythonhosted.org/packages/52/98/def78c3a23acee2bcdb2e52005fb2810ed54305602ec1bfcfab2bda6f49f/pillow-11.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:16095692a253047fe3ec028e951fa4221a1f3ed3d80c397e83541a3037ff67c9", size = 2254611 }, + { url = "https://files.pythonhosted.org/packages/1c/a3/26e606ff0b2daaf120543e537311fa3ae2eb6bf061490e4fea51771540be/pillow-11.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2c0a187a92a1cb5ef2c8ed5412dd8d4334272617f532d4ad4de31e0495bd923", size = 3147642 }, + { url = "https://files.pythonhosted.org/packages/4f/d5/1caabedd8863526a6cfa44ee7a833bd97f945dc1d56824d6d76e11731939/pillow-11.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:084a07ef0821cfe4858fe86652fffac8e187b6ae677e9906e192aafcc1b69903", size = 2978999 }, + { url = "https://files.pythonhosted.org/packages/d9/ff/5a45000826a1aa1ac6874b3ec5a856474821a1b59d838c4f6ce2ee518fe9/pillow-11.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8069c5179902dcdce0be9bfc8235347fdbac249d23bd90514b7a47a72d9fecf4", size = 4196794 }, + { url = "https://files.pythonhosted.org/packages/9d/21/84c9f287d17180f26263b5f5c8fb201de0f88b1afddf8a2597a5c9fe787f/pillow-11.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f02541ef64077f22bf4924f225c0fd1248c168f86e4b7abdedd87d6ebaceab0f", size = 4300762 }, + { url = "https://files.pythonhosted.org/packages/84/39/63fb87cd07cc541438b448b1fed467c4d687ad18aa786a7f8e67b255d1aa/pillow-11.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:fcb4621042ac4b7865c179bb972ed0da0218a076dc1820ffc48b1d74c1e37fe9", size = 4210468 }, + { url = "https://files.pythonhosted.org/packages/7f/42/6e0f2c2d5c60f499aa29be14f860dd4539de322cd8fb84ee01553493fb4d/pillow-11.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:00177a63030d612148e659b55ba99527803288cea7c75fb05766ab7981a8c1b7", size = 4381824 }, + { url = "https://files.pythonhosted.org/packages/31/69/1ef0fb9d2f8d2d114db982b78ca4eeb9db9a29f7477821e160b8c1253f67/pillow-11.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8853a3bf12afddfdf15f57c4b02d7ded92c7a75a5d7331d19f4f9572a89c17e6", size = 4296436 }, + { url = "https://files.pythonhosted.org/packages/44/ea/dad2818c675c44f6012289a7c4f46068c548768bc6c7f4e8c4ae5bbbc811/pillow-11.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3107c66e43bda25359d5ef446f59c497de2b5ed4c7fdba0894f8d6cf3822dafc", size = 4429714 }, + { url = "https://files.pythonhosted.org/packages/af/3a/da80224a6eb15bba7a0dcb2346e2b686bb9bf98378c0b4353cd88e62b171/pillow-11.0.0-cp312-cp312-win32.whl", hash = "sha256:86510e3f5eca0ab87429dd77fafc04693195eec7fd6a137c389c3eeb4cfb77c6", size = 2249631 }, + { url = "https://files.pythonhosted.org/packages/57/97/73f756c338c1d86bb802ee88c3cab015ad7ce4b838f8a24f16b676b1ac7c/pillow-11.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:8ec4a89295cd6cd4d1058a5e6aec6bf51e0eaaf9714774e1bfac7cfc9051db47", size = 2567533 }, + { url = "https://files.pythonhosted.org/packages/0b/30/2b61876e2722374558b871dfbfcbe4e406626d63f4f6ed92e9c8e24cac37/pillow-11.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:27a7860107500d813fcd203b4ea19b04babe79448268403172782754870dac25", size = 2254890 }, + { url = "https://files.pythonhosted.org/packages/f3/8b/01849a820686bf309b7d79a935d57bcafbfd016f1d78fc3d37ed2ba00f96/pillow-11.0.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2e46773dc9f35a1dd28bd6981332fd7f27bec001a918a72a79b4133cf5291dba", size = 3154738 }, + { url = "https://files.pythonhosted.org/packages/35/e8/ff71a40ca8e24cfd6bb333cc4ca8cc24ebecb6942bb4ad1e5ec61f33d1b8/pillow-11.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2679d2258b7f1192b378e2893a8a0a0ca472234d4c2c0e6bdd3380e8dfa21b6a", size = 2979272 }, + { url = "https://files.pythonhosted.org/packages/09/4f/2280ad43f5639174a0227920a59664fb78c5096a0b3fd865fee5184d4526/pillow-11.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda2616eb2313cbb3eebbe51f19362eb434b18e3bb599466a1ffa76a033fb916", size = 4179756 }, + { url = "https://files.pythonhosted.org/packages/14/b1/c8f428bae932a27ce9c87e7b21aba8ea3e820aa11413c5a795868c37e039/pillow-11.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ec184af98a121fb2da42642dea8a29ec80fc3efbaefb86d8fdd2606619045d", size = 4280488 }, + { url = "https://files.pythonhosted.org/packages/78/66/7c5e44ab2c0123710a5d4692a4ee5931ac438efd7730ac395e305902346e/pillow-11.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:8594f42df584e5b4bb9281799698403f7af489fba84c34d53d1c4bfb71b7c4e7", size = 4192772 }, + { url = "https://files.pythonhosted.org/packages/36/5d/a9a00f8251ce93144f0250c0f0aece31b83ff33ffc243cdf987a8d584818/pillow-11.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:c12b5ae868897c7338519c03049a806af85b9b8c237b7d675b8c5e089e4a618e", size = 4363533 }, + { url = "https://files.pythonhosted.org/packages/fd/21/d8182fc1f3233078eb744f9f2950992f537655174febb8b3f7bdc61847b1/pillow-11.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:70fbbdacd1d271b77b7721fe3cdd2d537bbbd75d29e6300c672ec6bb38d9672f", size = 4275415 }, + { url = "https://files.pythonhosted.org/packages/c9/ee/93e02e8c29210ba7383843405b8b39bd19a164770f14d8569096dd123781/pillow-11.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5178952973e588b3f1360868847334e9e3bf49d19e169bbbdfaf8398002419ae", size = 4407081 }, + { url = "https://files.pythonhosted.org/packages/6e/77/8cda03af2b5177a18d645ad4a7446cda6c1292d1a2fb6e772a06fa9fc86b/pillow-11.0.0-cp39-cp39-win32.whl", hash = "sha256:8c676b587da5673d3c75bd67dd2a8cdfeb282ca38a30f37950511766b26858c4", size = 2249213 }, + { url = "https://files.pythonhosted.org/packages/9f/e4/c90bf7889489f3a14803bd00d3645945dd476020ab67579985af8233ab30/pillow-11.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:94f3e1780abb45062287b4614a5bc0874519c86a777d4a7ad34978e86428b8dd", size = 2566862 }, + { url = "https://files.pythonhosted.org/packages/27/a6/77d2ed085055237581d6276ac1e85f562f1b1848614647d8427e49d83c03/pillow-11.0.0-cp39-cp39-win_arm64.whl", hash = "sha256:290f2cc809f9da7d6d622550bbf4c1e57518212da51b6a30fe8e0a270a5b78bd", size = 2254605 }, + { url = "https://files.pythonhosted.org/packages/36/57/42a4dd825eab762ba9e690d696d894ba366e06791936056e26e099398cda/pillow-11.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1187739620f2b365de756ce086fdb3604573337cc28a0d3ac4a01ab6b2d2a6d2", size = 3119239 }, + { url = "https://files.pythonhosted.org/packages/98/f7/25f9f9e368226a1d6cf3507081a1a7944eddd3ca7821023377043f5a83c8/pillow-11.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fbbcb7b57dc9c794843e3d1258c0fbf0f48656d46ffe9e09b63bbd6e8cd5d0a2", size = 2950803 }, + { url = "https://files.pythonhosted.org/packages/59/01/98ead48a6c2e31e6185d4c16c978a67fe3ccb5da5c2ff2ba8475379bb693/pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d203af30149ae339ad1b4f710d9844ed8796e97fda23ffbc4cc472968a47d0b", size = 3281098 }, + { url = "https://files.pythonhosted.org/packages/51/c0/570255b2866a0e4d500a14f950803a2ec273bac7badc43320120b9262450/pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a0d3b115009ebb8ac3d2ebec5c2982cc693da935f4ab7bb5c8ebe2f47d36f2", size = 3323665 }, + { url = "https://files.pythonhosted.org/packages/0e/75/689b4ec0483c42bfc7d1aacd32ade7a226db4f4fac57c6fdcdf90c0731e3/pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:73853108f56df97baf2bb8b522f3578221e56f646ba345a372c78326710d3830", size = 3310533 }, + { url = "https://files.pythonhosted.org/packages/3d/30/38bd6149cf53da1db4bad304c543ade775d225961c4310f30425995cb9ec/pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e58876c91f97b0952eb766123bfef372792ab3f4e3e1f1a2267834c2ab131734", size = 3414886 }, + { url = "https://files.pythonhosted.org/packages/ec/3d/c32a51d848401bd94cabb8767a39621496491ee7cd5199856b77da9b18ad/pillow-11.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:224aaa38177597bb179f3ec87eeefcce8e4f85e608025e9cfac60de237ba6316", size = 2567508 }, + { url = "https://files.pythonhosted.org/packages/67/21/fbb4222399f72d6e9c828818ff4ef8391c1e8e71623368295c8dbc789bd1/pillow-11.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5bd2d3bdb846d757055910f0a59792d33b555800813c3b39ada1829c372ccb06", size = 2950706 }, + { url = "https://files.pythonhosted.org/packages/a2/b6/6aeb6e018b705ea4076db50aac078c9db8715a901f4c65698edc31375d0f/pillow-11.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:375b8dd15a1f5d2feafff536d47e22f69625c1aa92f12b339ec0b2ca40263273", size = 3323524 }, + { url = "https://files.pythonhosted.org/packages/48/26/36cc90e9932c5fe7c8876c32d6091ef5a09e8137e8e0633045bd35085fdd/pillow-11.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:daffdf51ee5db69a82dd127eabecce20729e21f7a3680cf7cbb23f0829189790", size = 3414787 }, + { url = "https://files.pythonhosted.org/packages/44/5c/089154029fcca7729ae142ac820057f74ca4b0b59617734276c31281af15/pillow-11.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7326a1787e3c7b0429659e0a944725e1b03eeaa10edd945a86dead1913383944", size = 2567664 }, +] + +[[package]] +name = "platformdirs" +version = "4.3.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/13/fc/128cc9cb8f03208bdbf93d3aa862e16d376844a14f9a0ce5cf4507372de4/platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907", size = 21302 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/a6/bc1012356d8ece4d66dd75c4b9fc6c1f6650ddd5991e421177d9f8f671be/platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb", size = 18439 }, +] + +[[package]] +name = "pluggy" +version = "1.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1", size = 67955 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669", size = 20556 }, +] + +[[package]] +name = "pre-commit" +version = "4.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cfgv" }, + { name = "identify" }, + { name = "nodeenv" }, + { name = "pyyaml" }, + { name = "virtualenv" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2e/c8/e22c292035f1bac8b9f5237a2622305bc0304e776080b246f3df57c4ff9f/pre_commit-4.0.1.tar.gz", hash = "sha256:80905ac375958c0444c65e9cebebd948b3cdb518f335a091a670a89d652139d2", size = 191678 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/16/8f/496e10d51edd6671ebe0432e33ff800aa86775d2d147ce7d43389324a525/pre_commit-4.0.1-py2.py3-none-any.whl", hash = "sha256:efde913840816312445dc98787724647c65473daefe420785f885e8ed9a06878", size = 218713 }, +] + +[[package]] +name = "protobuf" +version = "5.29.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a5/73/4e6295c1420a9d20c9c351db3a36109b4c9aa601916cb7c6871e3196a1ca/protobuf-5.29.2.tar.gz", hash = "sha256:b2cc8e8bb7c9326996f0e160137b0861f1a82162502658df2951209d0cb0309e", size = 424901 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/42/6db5387124708d619ffb990a846fb123bee546f52868039f8fa964c5bc54/protobuf-5.29.2-cp310-abi3-win32.whl", hash = "sha256:c12ba8249f5624300cf51c3d0bfe5be71a60c63e4dcf51ffe9a68771d958c851", size = 422697 }, + { url = "https://files.pythonhosted.org/packages/6c/38/2fcc968b377b531882d6ab2ac99b10ca6d00108394f6ff57c2395fb7baff/protobuf-5.29.2-cp310-abi3-win_amd64.whl", hash = "sha256:842de6d9241134a973aab719ab42b008a18a90f9f07f06ba480df268f86432f9", size = 434495 }, + { url = "https://files.pythonhosted.org/packages/cb/26/41debe0f6615fcb7e97672057524687ed86fcd85e3da3f031c30af8f0c51/protobuf-5.29.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a0c53d78383c851bfa97eb42e3703aefdc96d2036a41482ffd55dc5f529466eb", size = 417812 }, + { url = "https://files.pythonhosted.org/packages/e4/20/38fc33b60dcfb380507b99494aebe8c34b68b8ac7d32808c4cebda3f6f6b/protobuf-5.29.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:494229ecd8c9009dd71eda5fd57528395d1eacdf307dbece6c12ad0dd09e912e", size = 319562 }, + { url = "https://files.pythonhosted.org/packages/90/4d/c3d61e698e0e41d926dbff6aa4e57428ab1a6fc3b5e1deaa6c9ec0fd45cf/protobuf-5.29.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:b6b0d416bbbb9d4fbf9d0561dbfc4e324fd522f61f7af0fe0f282ab67b22477e", size = 319662 }, + { url = "https://files.pythonhosted.org/packages/5e/d0/76d086c744c8252b35c2bc9c49c3be7c815b806191e58ad82c6d228c07a8/protobuf-5.29.2-cp39-cp39-win32.whl", hash = "sha256:36000f97ea1e76e8398a3f02936aac2a5d2b111aae9920ec1b769fc4a222c4d9", size = 422665 }, + { url = "https://files.pythonhosted.org/packages/84/08/be8223de1967ae8a100aaa1f7076f65c42ed1ff5ed413ff5dd718cff9fa8/protobuf-5.29.2-cp39-cp39-win_amd64.whl", hash = "sha256:2d2e674c58a06311c8e99e74be43e7f3a8d1e2b2fdf845eaa347fbd866f23355", size = 434584 }, + { url = "https://files.pythonhosted.org/packages/f3/fd/c7924b4c2a1c61b8f4b64edd7a31ffacf63432135a2606f03a2f0d75a750/protobuf-5.29.2-py3-none-any.whl", hash = "sha256:fde4554c0e578a5a0bcc9a276339594848d1e89f9ea47b4427c80e5d72f90181", size = 172539 }, +] + +[[package]] +name = "psycopg2-binary" +version = "2.9.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/0e/bdc8274dc0585090b4e3432267d7be4dfbfd8971c0fa59167c711105a6bf/psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2", size = 385764 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/81/331257dbf2801cdb82105306042f7a1637cc752f65f2bb688188e0de5f0b/psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f", size = 3043397 }, + { url = "https://files.pythonhosted.org/packages/e7/9a/7f4f2f031010bbfe6a02b4a15c01e12eb6b9b7b358ab33229f28baadbfc1/psycopg2_binary-2.9.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:3e9c76f0ac6f92ecfc79516a8034a544926430f7b080ec5a0537bca389ee0906", size = 3274806 }, + { url = "https://files.pythonhosted.org/packages/e5/57/8ddd4b374fa811a0b0a0f49b6abad1cde9cb34df73ea3348cc283fcd70b4/psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad26b467a405c798aaa1458ba09d7e2b6e5f96b1ce0ac15d82fd9f95dc38a92", size = 2851361 }, + { url = "https://files.pythonhosted.org/packages/f9/66/d1e52c20d283f1f3a8e7e5c1e06851d432f123ef57b13043b4f9b21ffa1f/psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:270934a475a0e4b6925b5f804e3809dd5f90f8613621d062848dd82f9cd62007", size = 3080836 }, + { url = "https://files.pythonhosted.org/packages/a0/cb/592d44a9546aba78f8a1249021fe7c59d3afb8a0ba51434d6610cc3462b6/psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48b338f08d93e7be4ab2b5f1dbe69dc5e9ef07170fe1f86514422076d9c010d0", size = 3264552 }, + { url = "https://files.pythonhosted.org/packages/64/33/c8548560b94b7617f203d7236d6cdf36fe1a5a3645600ada6efd79da946f/psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4152f8f76d2023aac16285576a9ecd2b11a9895373a1f10fd9db54b3ff06b4", size = 3019789 }, + { url = "https://files.pythonhosted.org/packages/b0/0e/c2da0db5bea88a3be52307f88b75eec72c4de62814cbe9ee600c29c06334/psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32581b3020c72d7a421009ee1c6bf4a131ef5f0a968fab2e2de0c9d2bb4577f1", size = 2871776 }, + { url = "https://files.pythonhosted.org/packages/15/d7/774afa1eadb787ddf41aab52d4c62785563e29949613c958955031408ae6/psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2ce3e21dc3437b1d960521eca599d57408a695a0d3c26797ea0f72e834c7ffe5", size = 2820959 }, + { url = "https://files.pythonhosted.org/packages/5e/ed/440dc3f5991a8c6172a1cde44850ead0e483a375277a1aef7cfcec00af07/psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e984839e75e0b60cfe75e351db53d6db750b00de45644c5d1f7ee5d1f34a1ce5", size = 2919329 }, + { url = "https://files.pythonhosted.org/packages/03/be/2cc8f4282898306732d2ae7b7378ae14e8df3c1231b53579efa056aae887/psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c4745a90b78e51d9ba06e2088a2fe0c693ae19cc8cb051ccda44e8df8a6eb53", size = 2957659 }, + { url = "https://files.pythonhosted.org/packages/d0/12/fb8e4f485d98c570e00dad5800e9a2349cfe0f71a767c856857160d343a5/psycopg2_binary-2.9.10-cp310-cp310-win32.whl", hash = "sha256:e5720a5d25e3b99cd0dc5c8a440570469ff82659bb09431c1439b92caf184d3b", size = 1024605 }, + { url = "https://files.pythonhosted.org/packages/22/4f/217cd2471ecf45d82905dd09085e049af8de6cfdc008b6663c3226dc1c98/psycopg2_binary-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:3c18f74eb4386bf35e92ab2354a12c17e5eb4d9798e4c0ad3a00783eae7cd9f1", size = 1163817 }, + { url = "https://files.pythonhosted.org/packages/9c/8f/9feb01291d0d7a0a4c6a6bab24094135c2b59c6a81943752f632c75896d6/psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff", size = 3043397 }, + { url = "https://files.pythonhosted.org/packages/15/30/346e4683532011561cd9c8dfeac6a8153dd96452fee0b12666058ab7893c/psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c", size = 3274806 }, + { url = "https://files.pythonhosted.org/packages/66/6e/4efebe76f76aee7ec99166b6c023ff8abdc4e183f7b70913d7c047701b79/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c", size = 2851370 }, + { url = "https://files.pythonhosted.org/packages/7f/fd/ff83313f86b50f7ca089b161b8e0a22bb3c319974096093cd50680433fdb/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb", size = 3080780 }, + { url = "https://files.pythonhosted.org/packages/e6/c4/bfadd202dcda8333a7ccafdc51c541dbdfce7c2c7cda89fa2374455d795f/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341", size = 3264583 }, + { url = "https://files.pythonhosted.org/packages/5d/f1/09f45ac25e704ac954862581f9f9ae21303cc5ded3d0b775532b407f0e90/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a", size = 3019831 }, + { url = "https://files.pythonhosted.org/packages/9e/2e/9beaea078095cc558f215e38f647c7114987d9febfc25cb2beed7c3582a5/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b", size = 2871822 }, + { url = "https://files.pythonhosted.org/packages/01/9e/ef93c5d93f3dc9fc92786ffab39e323b9aed066ba59fdc34cf85e2722271/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7", size = 2820975 }, + { url = "https://files.pythonhosted.org/packages/a5/f0/049e9631e3268fe4c5a387f6fc27e267ebe199acf1bc1bc9cbde4bd6916c/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e", size = 2919320 }, + { url = "https://files.pythonhosted.org/packages/dc/9a/bcb8773b88e45fb5a5ea8339e2104d82c863a3b8558fbb2aadfe66df86b3/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68", size = 2957617 }, + { url = "https://files.pythonhosted.org/packages/e2/6b/144336a9bf08a67d217b3af3246abb1d027095dab726f0687f01f43e8c03/psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392", size = 1024618 }, + { url = "https://files.pythonhosted.org/packages/61/69/3b3d7bd583c6d3cbe5100802efa5beacaacc86e37b653fc708bf3d6853b8/psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4", size = 1163816 }, + { url = "https://files.pythonhosted.org/packages/49/7d/465cc9795cf76f6d329efdafca74693714556ea3891813701ac1fee87545/psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0", size = 3044771 }, + { url = "https://files.pythonhosted.org/packages/8b/31/6d225b7b641a1a2148e3ed65e1aa74fc86ba3fee850545e27be9e1de893d/psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a", size = 3275336 }, + { url = "https://files.pythonhosted.org/packages/30/b7/a68c2b4bff1cbb1728e3ec864b2d92327c77ad52edcd27922535a8366f68/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539", size = 2851637 }, + { url = "https://files.pythonhosted.org/packages/0b/b1/cfedc0e0e6f9ad61f8657fd173b2f831ce261c02a08c0b09c652b127d813/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526", size = 3082097 }, + { url = "https://files.pythonhosted.org/packages/18/ed/0a8e4153c9b769f59c02fb5e7914f20f0b2483a19dae7bf2db54b743d0d0/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1", size = 3264776 }, + { url = "https://files.pythonhosted.org/packages/10/db/d09da68c6a0cdab41566b74e0a6068a425f077169bed0946559b7348ebe9/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e", size = 3020968 }, + { url = "https://files.pythonhosted.org/packages/94/28/4d6f8c255f0dfffb410db2b3f9ac5218d959a66c715c34cac31081e19b95/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f", size = 2872334 }, + { url = "https://files.pythonhosted.org/packages/05/f7/20d7bf796593c4fea95e12119d6cc384ff1f6141a24fbb7df5a668d29d29/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00", size = 2822722 }, + { url = "https://files.pythonhosted.org/packages/4d/e4/0c407ae919ef626dbdb32835a03b6737013c3cc7240169843965cada2bdf/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5", size = 2920132 }, + { url = "https://files.pythonhosted.org/packages/2d/70/aa69c9f69cf09a01da224909ff6ce8b68faeef476f00f7ec377e8f03be70/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47", size = 2959312 }, + { url = "https://files.pythonhosted.org/packages/d3/bd/213e59854fafe87ba47814bf413ace0dcee33a89c8c8c814faca6bc7cf3c/psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64", size = 1025191 }, + { url = "https://files.pythonhosted.org/packages/92/29/06261ea000e2dc1e22907dbbc483a1093665509ea586b29b8986a0e56733/psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0", size = 1164031 }, + { url = "https://files.pythonhosted.org/packages/a2/bc/e77648009b6e61af327c607543f65fdf25bcfb4100f5a6f3bdb62ddac03c/psycopg2_binary-2.9.10-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:7a813c8bdbaaaab1f078014b9b0b13f5de757e2b5d9be6403639b298a04d218b", size = 3043437 }, + { url = "https://files.pythonhosted.org/packages/e0/e8/5a12211a1f5b959f3e3ccd342eace60c1f26422f53e06d687821dc268780/psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00924255d7fc916ef66e4bf22f354a940c67179ad3fd7067d7a0a9c84d2fbfc", size = 2851340 }, + { url = "https://files.pythonhosted.org/packages/47/ed/5932b0458a7fc61237b653df050513c8d18a6f4083cc7f90dcef967f7bce/psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7559bce4b505762d737172556a4e6ea8a9998ecac1e39b5233465093e8cee697", size = 3080905 }, + { url = "https://files.pythonhosted.org/packages/71/df/8047d85c3d23864aca4613c3be1ea0fe61dbe4e050a89ac189f9dce4403e/psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8b58f0a96e7a1e341fc894f62c1177a7c83febebb5ff9123b579418fdc8a481", size = 3264640 }, + { url = "https://files.pythonhosted.org/packages/f3/de/6157e4ef242920e8f2749f7708d5cc8815414bdd4a27a91996e7cd5c80df/psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b269105e59ac96aba877c1707c600ae55711d9dcd3fc4b5012e4af68e30c648", size = 3019812 }, + { url = "https://files.pythonhosted.org/packages/25/f9/0fc49efd2d4d6db3a8d0a3f5749b33a0d3fdd872cad49fbf5bfce1c50027/psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:79625966e176dc97ddabc142351e0409e28acf4660b88d1cf6adb876d20c490d", size = 2871933 }, + { url = "https://files.pythonhosted.org/packages/57/bc/2ed1bd182219065692ed458d218d311b0b220b20662d25d913bc4e8d3549/psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8aabf1c1a04584c168984ac678a668094d831f152859d06e055288fa515e4d30", size = 2820990 }, + { url = "https://files.pythonhosted.org/packages/71/2a/43f77a9b8ee0b10e2de784d97ddc099d9fe0d9eec462a006e4d2cc74756d/psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:19721ac03892001ee8fdd11507e6a2e01f4e37014def96379411ca99d78aeb2c", size = 2919352 }, + { url = "https://files.pythonhosted.org/packages/57/86/d2943df70469e6afab3b5b8e1367fccc61891f46de436b24ddee6f2c8404/psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7f5d859928e635fa3ce3477704acee0f667b3a3d3e4bb109f2b18d4005f38287", size = 2957614 }, + { url = "https://files.pythonhosted.org/packages/85/21/195d69371330983aa16139e60ba855d0a18164c9295f3a3696be41bbcd54/psycopg2_binary-2.9.10-cp39-cp39-win32.whl", hash = "sha256:3216ccf953b3f267691c90c6fe742e45d890d8272326b4a8b20850a03d05b7b8", size = 1025341 }, + { url = "https://files.pythonhosted.org/packages/ad/53/73196ebc19d6fbfc22427b982fbc98698b7b9c361e5e7707e3a3247cf06d/psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5", size = 1163958 }, +] + +[[package]] +name = "pyarrow" +version = "18.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7f/7b/640785a9062bb00314caa8a387abce547d2a420cf09bd6c715fe659ccffb/pyarrow-18.1.0.tar.gz", hash = "sha256:9386d3ca9c145b5539a1cfc75df07757dff870168c959b473a0bccbc3abc8c73", size = 1118671 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1a/bb/8d4a1573f66e0684f190dd2b55fd0b97a7214de8882d58a3867e777bf640/pyarrow-18.1.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e21488d5cfd3d8b500b3238a6c4b075efabc18f0f6d80b29239737ebd69caa6c", size = 29531620 }, + { url = "https://files.pythonhosted.org/packages/30/90/893acfad917533b624a97b9e498c0e8393908508a0a72d624fe935e632bf/pyarrow-18.1.0-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:b516dad76f258a702f7ca0250885fc93d1fa5ac13ad51258e39d402bd9e2e1e4", size = 30836521 }, + { url = "https://files.pythonhosted.org/packages/a3/2a/526545a7464b5fb2fa6e2c4bad16ca90e59e1843025c534fd907b7f73e5a/pyarrow-18.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f443122c8e31f4c9199cb23dca29ab9427cef990f283f80fe15b8e124bcc49b", size = 39213905 }, + { url = "https://files.pythonhosted.org/packages/8a/77/4b3fab91a30e19e233e738d0c5eca5a8f6dd05758bc349a2ca262c65de79/pyarrow-18.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0a03da7f2758645d17b7b4f83c8bffeae5bbb7f974523fe901f36288d2eab71", size = 40128881 }, + { url = "https://files.pythonhosted.org/packages/aa/e2/a88e16c5e45e562449c52305bd3bc2f9d704295322d3434656e7ccac1444/pyarrow-18.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ba17845efe3aa358ec266cf9cc2800fa73038211fb27968bfa88acd09261a470", size = 38627517 }, + { url = "https://files.pythonhosted.org/packages/6d/84/8037c20005ccc7b869726465be0957bd9c29cfc88612962030f08292ad06/pyarrow-18.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:3c35813c11a059056a22a3bef520461310f2f7eea5c8a11ef9de7062a23f8d56", size = 40060187 }, + { url = "https://files.pythonhosted.org/packages/2a/38/d6435c723ff73df8ae74626ea778262fbcc2b9b0d1a4f3db915b61711b05/pyarrow-18.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9736ba3c85129d72aefa21b4f3bd715bc4190fe4426715abfff90481e7d00812", size = 25118314 }, + { url = "https://files.pythonhosted.org/packages/9e/4d/a4988e7d82f4fbc797715db4185939a658eeffb07a25bab7262bed1ea076/pyarrow-18.1.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:eaeabf638408de2772ce3d7793b2668d4bb93807deed1725413b70e3156a7854", size = 29554860 }, + { url = "https://files.pythonhosted.org/packages/59/03/3a42c5c1e4bd4c900ab62aa1ff6b472bdb159ba8f1c3e5deadab7222244f/pyarrow-18.1.0-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:3b2e2239339c538f3464308fd345113f886ad031ef8266c6f004d49769bb074c", size = 30867076 }, + { url = "https://files.pythonhosted.org/packages/75/7e/332055ac913373e89256dce9d14b7708f55f7bd5be631456c897f0237738/pyarrow-18.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f39a2e0ed32a0970e4e46c262753417a60c43a3246972cfc2d3eb85aedd01b21", size = 39212135 }, + { url = "https://files.pythonhosted.org/packages/8c/64/5099cdb325828722ef7ffeba9a4696f238eb0cdeae227f831c2d77fcf1bd/pyarrow-18.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31e9417ba9c42627574bdbfeada7217ad8a4cbbe45b9d6bdd4b62abbca4c6f6", size = 40125195 }, + { url = "https://files.pythonhosted.org/packages/83/88/1938d783727db1b178ff71bc6a6143d7939e406db83a9ec23cad3dad325c/pyarrow-18.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:01c034b576ce0eef554f7c3d8c341714954be9b3f5d5bc7117006b85fcf302fe", size = 38641884 }, + { url = "https://files.pythonhosted.org/packages/5e/b5/9e14e9f7590e0eaa435ecea84dabb137284a4dbba7b3c337b58b65b76d95/pyarrow-18.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:f266a2c0fc31995a06ebd30bcfdb7f615d7278035ec5b1cd71c48d56daaf30b0", size = 40076877 }, + { url = "https://files.pythonhosted.org/packages/4d/a3/817ac7fe0891a2d66e247e223080f3a6a262d8aefd77e11e8c27e6acf4e1/pyarrow-18.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:d4f13eee18433f99adefaeb7e01d83b59f73360c231d4782d9ddfaf1c3fbde0a", size = 25119811 }, + { url = "https://files.pythonhosted.org/packages/6a/50/12829e7111b932581e51dda51d5cb39207a056c30fe31ef43f14c63c4d7e/pyarrow-18.1.0-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:9f3a76670b263dc41d0ae877f09124ab96ce10e4e48f3e3e4257273cee61ad0d", size = 29514620 }, + { url = "https://files.pythonhosted.org/packages/d1/41/468c944eab157702e96abab3d07b48b8424927d4933541ab43788bb6964d/pyarrow-18.1.0-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:da31fbca07c435be88a0c321402c4e31a2ba61593ec7473630769de8346b54ee", size = 30856494 }, + { url = "https://files.pythonhosted.org/packages/68/f9/29fb659b390312a7345aeb858a9d9c157552a8852522f2c8bad437c29c0a/pyarrow-18.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:543ad8459bc438efc46d29a759e1079436290bd583141384c6f7a1068ed6f992", size = 39203624 }, + { url = "https://files.pythonhosted.org/packages/6e/f6/19360dae44200e35753c5c2889dc478154cd78e61b1f738514c9f131734d/pyarrow-18.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0743e503c55be0fdb5c08e7d44853da27f19dc854531c0570f9f394ec9671d54", size = 40139341 }, + { url = "https://files.pythonhosted.org/packages/bb/e6/9b3afbbcf10cc724312e824af94a2e993d8ace22994d823f5c35324cebf5/pyarrow-18.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:d4b3d2a34780645bed6414e22dda55a92e0fcd1b8a637fba86800ad737057e33", size = 38618629 }, + { url = "https://files.pythonhosted.org/packages/3a/2e/3b99f8a3d9e0ccae0e961978a0d0089b25fb46ebbcfb5ebae3cca179a5b3/pyarrow-18.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c52f81aa6f6575058d8e2c782bf79d4f9fdc89887f16825ec3a66607a5dd8e30", size = 40078661 }, + { url = "https://files.pythonhosted.org/packages/76/52/f8da04195000099d394012b8d42c503d7041b79f778d854f410e5f05049a/pyarrow-18.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:0ad4892617e1a6c7a551cfc827e072a633eaff758fa09f21c4ee548c30bcaf99", size = 25092330 }, + { url = "https://files.pythonhosted.org/packages/fd/9b/60516e3876ec6f25b0909afa70f90a15de83b48c7c0d8042fac4e64c4411/pyarrow-18.1.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:0b331e477e40f07238adc7ba7469c36b908f07c89b95dd4bd3a0ec84a3d1e21e", size = 29543752 }, + { url = "https://files.pythonhosted.org/packages/14/a7/bd08b6f1a2bd2e71dc6bb0451fc1872607e44c83daf1ee63c82764a2d233/pyarrow-18.1.0-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:2c4dd0c9010a25ba03e198fe743b1cc03cd33c08190afff371749c52ccbbaf76", size = 30850753 }, + { url = "https://files.pythonhosted.org/packages/84/c9/62ef9c6281c0e5b4ee1afa9d7bd556e72e06da6706b7906c32c15e69b3d6/pyarrow-18.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f97b31b4c4e21ff58c6f330235ff893cc81e23da081b1a4b1c982075e0ed4e9", size = 39226870 }, + { url = "https://files.pythonhosted.org/packages/b2/99/a6e89e71655a38475e76b060777c8bf69c078b772bec3b7daf7361440f05/pyarrow-18.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a4813cb8ecf1809871fd2d64a8eff740a1bd3691bbe55f01a3cf6c5ec869754", size = 40139114 }, + { url = "https://files.pythonhosted.org/packages/64/a9/06d79923890682e4fe7a16524abee307407008a413115354aaf3226b8410/pyarrow-18.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:05a5636ec3eb5cc2a36c6edb534a38ef57b2ab127292a716d00eabb887835f1e", size = 38639231 }, + { url = "https://files.pythonhosted.org/packages/3b/8c/4c3ed19026a00740b81fe1c87f3ff235b2763a0a1ddf5711a9d026b775ce/pyarrow-18.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:73eeed32e724ea3568bb06161cad5fa7751e45bc2228e33dcb10c614044165c7", size = 40070949 }, + { url = "https://files.pythonhosted.org/packages/87/d8/94161a7ca5c55199484e926165e9e33f318ea1d1b0d7cdbcbc3652b933ec/pyarrow-18.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:a1880dd6772b685e803011a6b43a230c23b566859a6e0c9a276c1e0faf4f4052", size = 25301373 }, +] + +[[package]] +name = "pydantic" +version = "2.10.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/70/7e/fb60e6fee04d0ef8f15e4e01ff187a196fa976eb0f0ab524af4599e5754c/pydantic-2.10.4.tar.gz", hash = "sha256:82f12e9723da6de4fe2ba888b5971157b3be7ad914267dea8f05f82b28254f06", size = 762094 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f3/26/3e1bbe954fde7ee22a6e7d31582c642aad9e84ffe4b5fb61e63b87cd326f/pydantic-2.10.4-py3-none-any.whl", hash = "sha256:597e135ea68be3a37552fb524bc7d0d66dcf93d395acd93a00682f1efcb8ee3d", size = 431765 }, +] + +[[package]] +name = "pydantic-core" +version = "2.27.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/01/f3e5ac5e7c25833db5eb555f7b7ab24cd6f8c322d3a3ad2d67a952dc0abc/pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39", size = 413443 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/bc/fed5f74b5d802cf9a03e83f60f18864e90e3aed7223adaca5ffb7a8d8d64/pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa", size = 1895938 }, + { url = "https://files.pythonhosted.org/packages/71/2a/185aff24ce844e39abb8dd680f4e959f0006944f4a8a0ea372d9f9ae2e53/pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c", size = 1815684 }, + { url = "https://files.pythonhosted.org/packages/c3/43/fafabd3d94d159d4f1ed62e383e264f146a17dd4d48453319fd782e7979e/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a", size = 1829169 }, + { url = "https://files.pythonhosted.org/packages/a2/d1/f2dfe1a2a637ce6800b799aa086d079998959f6f1215eb4497966efd2274/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5", size = 1867227 }, + { url = "https://files.pythonhosted.org/packages/7d/39/e06fcbcc1c785daa3160ccf6c1c38fea31f5754b756e34b65f74e99780b5/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c", size = 2037695 }, + { url = "https://files.pythonhosted.org/packages/7a/67/61291ee98e07f0650eb756d44998214231f50751ba7e13f4f325d95249ab/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7", size = 2741662 }, + { url = "https://files.pythonhosted.org/packages/32/90/3b15e31b88ca39e9e626630b4c4a1f5a0dfd09076366f4219429e6786076/pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a", size = 1993370 }, + { url = "https://files.pythonhosted.org/packages/ff/83/c06d333ee3a67e2e13e07794995c1535565132940715931c1c43bfc85b11/pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236", size = 1996813 }, + { url = "https://files.pythonhosted.org/packages/7c/f7/89be1c8deb6e22618a74f0ca0d933fdcb8baa254753b26b25ad3acff8f74/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962", size = 2005287 }, + { url = "https://files.pythonhosted.org/packages/b7/7d/8eb3e23206c00ef7feee17b83a4ffa0a623eb1a9d382e56e4aa46fd15ff2/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9", size = 2128414 }, + { url = "https://files.pythonhosted.org/packages/4e/99/fe80f3ff8dd71a3ea15763878d464476e6cb0a2db95ff1c5c554133b6b83/pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af", size = 2155301 }, + { url = "https://files.pythonhosted.org/packages/2b/a3/e50460b9a5789ca1451b70d4f52546fa9e2b420ba3bfa6100105c0559238/pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4", size = 1816685 }, + { url = "https://files.pythonhosted.org/packages/57/4c/a8838731cb0f2c2a39d3535376466de6049034d7b239c0202a64aaa05533/pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31", size = 1982876 }, + { url = "https://files.pythonhosted.org/packages/c2/89/f3450af9d09d44eea1f2c369f49e8f181d742f28220f88cc4dfaae91ea6e/pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc", size = 1893421 }, + { url = "https://files.pythonhosted.org/packages/9e/e3/71fe85af2021f3f386da42d291412e5baf6ce7716bd7101ea49c810eda90/pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7", size = 1814998 }, + { url = "https://files.pythonhosted.org/packages/a6/3c/724039e0d848fd69dbf5806894e26479577316c6f0f112bacaf67aa889ac/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15", size = 1826167 }, + { url = "https://files.pythonhosted.org/packages/2b/5b/1b29e8c1fb5f3199a9a57c1452004ff39f494bbe9bdbe9a81e18172e40d3/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306", size = 1865071 }, + { url = "https://files.pythonhosted.org/packages/89/6c/3985203863d76bb7d7266e36970d7e3b6385148c18a68cc8915fd8c84d57/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99", size = 2036244 }, + { url = "https://files.pythonhosted.org/packages/0e/41/f15316858a246b5d723f7d7f599f79e37493b2e84bfc789e58d88c209f8a/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459", size = 2737470 }, + { url = "https://files.pythonhosted.org/packages/a8/7c/b860618c25678bbd6d1d99dbdfdf0510ccb50790099b963ff78a124b754f/pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048", size = 1992291 }, + { url = "https://files.pythonhosted.org/packages/bf/73/42c3742a391eccbeab39f15213ecda3104ae8682ba3c0c28069fbcb8c10d/pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d", size = 1994613 }, + { url = "https://files.pythonhosted.org/packages/94/7a/941e89096d1175d56f59340f3a8ebaf20762fef222c298ea96d36a6328c5/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b", size = 2002355 }, + { url = "https://files.pythonhosted.org/packages/6e/95/2359937a73d49e336a5a19848713555605d4d8d6940c3ec6c6c0ca4dcf25/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474", size = 2126661 }, + { url = "https://files.pythonhosted.org/packages/2b/4c/ca02b7bdb6012a1adef21a50625b14f43ed4d11f1fc237f9d7490aa5078c/pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6", size = 2153261 }, + { url = "https://files.pythonhosted.org/packages/72/9d/a241db83f973049a1092a079272ffe2e3e82e98561ef6214ab53fe53b1c7/pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c", size = 1812361 }, + { url = "https://files.pythonhosted.org/packages/e8/ef/013f07248041b74abd48a385e2110aa3a9bbfef0fbd97d4e6d07d2f5b89a/pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc", size = 1982484 }, + { url = "https://files.pythonhosted.org/packages/10/1c/16b3a3e3398fd29dca77cea0a1d998d6bde3902fa2706985191e2313cc76/pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4", size = 1867102 }, + { url = "https://files.pythonhosted.org/packages/d6/74/51c8a5482ca447871c93e142d9d4a92ead74de6c8dc5e66733e22c9bba89/pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0", size = 1893127 }, + { url = "https://files.pythonhosted.org/packages/d3/f3/c97e80721735868313c58b89d2de85fa80fe8dfeeed84dc51598b92a135e/pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef", size = 1811340 }, + { url = "https://files.pythonhosted.org/packages/9e/91/840ec1375e686dbae1bd80a9e46c26a1e0083e1186abc610efa3d9a36180/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7", size = 1822900 }, + { url = "https://files.pythonhosted.org/packages/f6/31/4240bc96025035500c18adc149aa6ffdf1a0062a4b525c932065ceb4d868/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934", size = 1869177 }, + { url = "https://files.pythonhosted.org/packages/fa/20/02fbaadb7808be578317015c462655c317a77a7c8f0ef274bc016a784c54/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6", size = 2038046 }, + { url = "https://files.pythonhosted.org/packages/06/86/7f306b904e6c9eccf0668248b3f272090e49c275bc488a7b88b0823444a4/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c", size = 2685386 }, + { url = "https://files.pythonhosted.org/packages/8d/f0/49129b27c43396581a635d8710dae54a791b17dfc50c70164866bbf865e3/pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2", size = 1997060 }, + { url = "https://files.pythonhosted.org/packages/0d/0f/943b4af7cd416c477fd40b187036c4f89b416a33d3cc0ab7b82708a667aa/pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4", size = 2004870 }, + { url = "https://files.pythonhosted.org/packages/35/40/aea70b5b1a63911c53a4c8117c0a828d6790483f858041f47bab0b779f44/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3", size = 1999822 }, + { url = "https://files.pythonhosted.org/packages/f2/b3/807b94fd337d58effc5498fd1a7a4d9d59af4133e83e32ae39a96fddec9d/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4", size = 2130364 }, + { url = "https://files.pythonhosted.org/packages/fc/df/791c827cd4ee6efd59248dca9369fb35e80a9484462c33c6649a8d02b565/pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57", size = 2158303 }, + { url = "https://files.pythonhosted.org/packages/9b/67/4e197c300976af185b7cef4c02203e175fb127e414125916bf1128b639a9/pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc", size = 1834064 }, + { url = "https://files.pythonhosted.org/packages/1f/ea/cd7209a889163b8dcca139fe32b9687dd05249161a3edda62860430457a5/pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9", size = 1989046 }, + { url = "https://files.pythonhosted.org/packages/bc/49/c54baab2f4658c26ac633d798dab66b4c3a9bbf47cff5284e9c182f4137a/pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b", size = 1885092 }, + { url = "https://files.pythonhosted.org/packages/27/97/3aef1ddb65c5ccd6eda9050036c956ff6ecbfe66cb7eb40f280f121a5bb0/pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993", size = 1896475 }, + { url = "https://files.pythonhosted.org/packages/ad/d3/5668da70e373c9904ed2f372cb52c0b996426f302e0dee2e65634c92007d/pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308", size = 1772279 }, + { url = "https://files.pythonhosted.org/packages/8a/9e/e44b8cb0edf04a2f0a1f6425a65ee089c1d6f9c4c2dcab0209127b6fdfc2/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4", size = 1829112 }, + { url = "https://files.pythonhosted.org/packages/1c/90/1160d7ac700102effe11616e8119e268770f2a2aa5afb935f3ee6832987d/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf", size = 1866780 }, + { url = "https://files.pythonhosted.org/packages/ee/33/13983426df09a36d22c15980008f8d9c77674fc319351813b5a2739b70f3/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76", size = 2037943 }, + { url = "https://files.pythonhosted.org/packages/01/d7/ced164e376f6747e9158c89988c293cd524ab8d215ae4e185e9929655d5c/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118", size = 2740492 }, + { url = "https://files.pythonhosted.org/packages/8b/1f/3dc6e769d5b7461040778816aab2b00422427bcaa4b56cc89e9c653b2605/pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630", size = 1995714 }, + { url = "https://files.pythonhosted.org/packages/07/d7/a0bd09bc39283530b3f7c27033a814ef254ba3bd0b5cfd040b7abf1fe5da/pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54", size = 1997163 }, + { url = "https://files.pythonhosted.org/packages/2d/bb/2db4ad1762e1c5699d9b857eeb41959191980de6feb054e70f93085e1bcd/pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f", size = 2005217 }, + { url = "https://files.pythonhosted.org/packages/53/5f/23a5a3e7b8403f8dd8fc8a6f8b49f6b55c7d715b77dcf1f8ae919eeb5628/pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362", size = 2127899 }, + { url = "https://files.pythonhosted.org/packages/c2/ae/aa38bb8dd3d89c2f1d8362dd890ee8f3b967330821d03bbe08fa01ce3766/pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96", size = 2155726 }, + { url = "https://files.pythonhosted.org/packages/98/61/4f784608cc9e98f70839187117ce840480f768fed5d386f924074bf6213c/pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e", size = 1817219 }, + { url = "https://files.pythonhosted.org/packages/57/82/bb16a68e4a1a858bb3768c2c8f1ff8d8978014e16598f001ea29a25bf1d1/pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67", size = 1985382 }, + { url = "https://files.pythonhosted.org/packages/46/72/af70981a341500419e67d5cb45abe552a7c74b66326ac8877588488da1ac/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e", size = 1891159 }, + { url = "https://files.pythonhosted.org/packages/ad/3d/c5913cccdef93e0a6a95c2d057d2c2cba347815c845cda79ddd3c0f5e17d/pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8", size = 1768331 }, + { url = "https://files.pythonhosted.org/packages/f6/f0/a3ae8fbee269e4934f14e2e0e00928f9346c5943174f2811193113e58252/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3", size = 1822467 }, + { url = "https://files.pythonhosted.org/packages/d7/7a/7bbf241a04e9f9ea24cd5874354a83526d639b02674648af3f350554276c/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f", size = 1979797 }, + { url = "https://files.pythonhosted.org/packages/4f/5f/4784c6107731f89e0005a92ecb8a2efeafdb55eb992b8e9d0a2be5199335/pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133", size = 1987839 }, + { url = "https://files.pythonhosted.org/packages/6d/a7/61246562b651dff00de86a5f01b6e4befb518df314c54dec187a78d81c84/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc", size = 1998861 }, + { url = "https://files.pythonhosted.org/packages/86/aa/837821ecf0c022bbb74ca132e117c358321e72e7f9702d1b6a03758545e2/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50", size = 2116582 }, + { url = "https://files.pythonhosted.org/packages/81/b0/5e74656e95623cbaa0a6278d16cf15e10a51f6002e3ec126541e95c29ea3/pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9", size = 2151985 }, + { url = "https://files.pythonhosted.org/packages/63/37/3e32eeb2a451fddaa3898e2163746b0cffbbdbb4740d38372db0490d67f3/pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151", size = 2004715 }, + { url = "https://files.pythonhosted.org/packages/29/0e/dcaea00c9dbd0348b723cae82b0e0c122e0fa2b43fa933e1622fd237a3ee/pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656", size = 1891733 }, + { url = "https://files.pythonhosted.org/packages/86/d3/e797bba8860ce650272bda6383a9d8cad1d1c9a75a640c9d0e848076f85e/pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278", size = 1768375 }, + { url = "https://files.pythonhosted.org/packages/41/f7/f847b15fb14978ca2b30262548f5fc4872b2724e90f116393eb69008299d/pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb", size = 1822307 }, + { url = "https://files.pythonhosted.org/packages/9c/63/ed80ec8255b587b2f108e514dc03eed1546cd00f0af281e699797f373f38/pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd", size = 1979971 }, + { url = "https://files.pythonhosted.org/packages/a9/6d/6d18308a45454a0de0e975d70171cadaf454bc7a0bf86b9c7688e313f0bb/pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc", size = 1987616 }, + { url = "https://files.pythonhosted.org/packages/82/8a/05f8780f2c1081b800a7ca54c1971e291c2d07d1a50fb23c7e4aef4ed403/pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b", size = 1998943 }, + { url = "https://files.pythonhosted.org/packages/5e/3e/fe5b6613d9e4c0038434396b46c5303f5ade871166900b357ada4766c5b7/pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b", size = 2116654 }, + { url = "https://files.pythonhosted.org/packages/db/ad/28869f58938fad8cc84739c4e592989730bfb69b7c90a8fff138dff18e1e/pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2", size = 2152292 }, + { url = "https://files.pythonhosted.org/packages/a1/0c/c5c5cd3689c32ed1fe8c5d234b079c12c281c051759770c05b8bed6412b5/pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35", size = 2004961 }, +] + +[[package]] +name = "pydeck" +version = "0.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "jinja2" }, + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/ca/40e14e196864a0f61a92abb14d09b3d3da98f94ccb03b49cf51688140dab/pydeck-0.9.1.tar.gz", hash = "sha256:f74475ae637951d63f2ee58326757f8d4f9cd9f2a457cf42950715003e2cb605", size = 3832240 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ab/4c/b888e6cf58bd9db9c93f40d1c6be8283ff49d88919231afe93a6bcf61626/pydeck-0.9.1-py2.py3-none-any.whl", hash = "sha256:b3f75ba0d273fc917094fa61224f3f6076ca8752b93d46faf3bcfd9f9d59b038", size = 6900403 }, +] + +[[package]] +name = "pygments" +version = "2.18.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/62/8336eff65bcbc8e4cb5d05b55faf041285951b6e80f33e2bff2024788f31/pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199", size = 4891905 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/3f/01c8b82017c199075f8f788d0d906b9ffbbc5a47dc9918a945e13d5a2bda/pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a", size = 1205513 }, +] + +[[package]] +name = "pylint" +version = "3.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "astroid" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "dill" }, + { name = "isort" }, + { name = "mccabe" }, + { name = "platformdirs" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, + { name = "tomlkit" }, + { name = "typing-extensions", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/17/fd/e9a739afac274a39596bbe562e9d966db6f3917fdb2bd7322ffc56da0ba2/pylint-3.3.3.tar.gz", hash = "sha256:07c607523b17e6d16e2ae0d7ef59602e332caa762af64203c24b41c27139f36a", size = 1516550 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/91/e1/26d55acea92b1ea4d33672e48f09ceeb274e84d7d542a4fb9a32a556db46/pylint-3.3.3-py3-none-any.whl", hash = "sha256:26e271a2bc8bce0fc23833805a9076dd9b4d5194e2a02164942cb3cdc37b4183", size = 521918 }, +] + +[[package]] +name = "pyparsing" +version = "3.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8c/d5/e5aeee5387091148a19e1145f63606619cb5f20b83fccb63efae6474e7b2/pyparsing-3.2.0.tar.gz", hash = "sha256:cbf74e27246d595d9a74b186b810f6fbb86726dbf3b9532efb343f6d7294fe9c", size = 920984 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/ec/2eb3cd785efd67806c46c13a17339708ddc346cbb684eade7a6e6f79536a/pyparsing-3.2.0-py3-none-any.whl", hash = "sha256:93d9577b88da0bbea8cc8334ee8b918ed014968fd2ec383e868fb8afb1ccef84", size = 106921 }, +] + +[[package]] +name = "pytest" +version = "7.4.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/80/1f/9d8e98e4133ffb16c90f3b405c43e38d3abb715bb5d7a63a5a684f7e46a3/pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280", size = 1357116 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/ff/f6e8b8f39e08547faece4bd80f89d5a8de68a38b2d179cc1c4490ffa3286/pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8", size = 325287 }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 }, +] + +[[package]] +name = "python-slugify" +version = "8.0.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "text-unidecode" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/87/c7/5e1547c44e31da50a460df93af11a535ace568ef89d7a811069ead340c4a/python-slugify-8.0.4.tar.gz", hash = "sha256:59202371d1d05b54a9e7720c5e038f928f45daaffe41dd10822f3907b937c856", size = 10921 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/62/02da182e544a51a5c3ccf4b03ab79df279f9c60c5e82d5e8bec7ca26ac11/python_slugify-8.0.4-py2.py3-none-any.whl", hash = "sha256:276540b79961052b66b7d116620b36518847f52d5fd9e3a70164fc8c50faa6b8", size = 10051 }, +] + +[[package]] +name = "pytimeparse" +version = "1.1.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/37/5d/231f5f33c81e09682708fb323f9e4041408d8223e2f0fb9742843328778f/pytimeparse-1.1.8.tar.gz", hash = "sha256:e86136477be924d7e670646a98561957e8ca7308d44841e21f5ddea757556a0a", size = 9403 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1b/b4/afd75551a3b910abd1d922dbd45e49e5deeb4d47dc50209ce489ba9844dd/pytimeparse-1.1.8-py2.py3-none-any.whl", hash = "sha256:04b7be6cc8bd9f5647a6325444926c3ac34ee6bc7e69da4367ba282f076036bd", size = 9969 }, +] + +[[package]] +name = "pytz" +version = "2024.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3a/31/3c70bf7603cc2dca0f19bdc53b4537a797747a58875b552c8c413d963a3f/pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a", size = 319692 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/c3/005fcca25ce078d2cc29fd559379817424e94885510568bc1bc53d7d5846/pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725", size = 508002 }, +] + +[[package]] +name = "pywavelets" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version < '3.10'", +] +dependencies = [ + { name = "numpy", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/96/9b/f124c8a8d1a69258a4d4d5cbb30a58dc059f836494cd3f067f419705bc72/pywavelets-1.6.0.tar.gz", hash = "sha256:ea027c70977122c5fc27b2510f0a0d9528f9c3df6ea3e4c577ca55fd00325a5b", size = 3939946 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/6b/034e7b6b60cbe9df91f6542cf8e9a30d10bd25e23f1a1bc19145556eb1ac/pywavelets-1.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ddc1ff5ad706313d930f857f9656f565dfb81b85bbe58a9db16ad8fa7d1537c5", size = 4361518 }, + { url = "https://files.pythonhosted.org/packages/9e/00/a6905c12ccbd1e7775fefe1fb8483e762b3ff1c30d90443166119bc3ca6e/pywavelets-1.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:78feab4e0c25fa32034b6b64cb854c6ce15663b4f0ffb25d8f0ee58915300f9b", size = 4325177 }, + { url = "https://files.pythonhosted.org/packages/08/53/aff7f7932b61c61a77a6432ed25717eb5aaf94143dabe7d2bba208ebfcd6/pywavelets-1.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be36f08efe9bc3abf40cf40cd2ee0aa0db26e4894e13ce5ac178442864161e8c", size = 4446509 }, + { url = "https://files.pythonhosted.org/packages/6e/01/a7157eec994747d3825df3e327365e072e5c80408000a358f2431cf91eb2/pywavelets-1.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0595c51472c9c5724fe087cb73e2797053fd25c788d6553fdad6ff61abc60e91", size = 4526165 }, + { url = "https://files.pythonhosted.org/packages/4b/9c/288d7318cd91062869f1d494c10a57f1732293768afc75fa0ecb7a60b9f7/pywavelets-1.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:058a750477dde633ac53b8806f835af3559d52db6532fb2b93c1f4b5441365b8", size = 4466055 }, + { url = "https://files.pythonhosted.org/packages/7c/8b/551363c22c4f8d3ceddbe54b346290a9af33a4566b0c4f1e263a1747508f/pywavelets-1.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:538795d9c4181152b414285b5a7f72ac52581ecdcdce74b6cca3fa0b8a5ab0aa", size = 4545637 }, + { url = "https://files.pythonhosted.org/packages/05/35/b1cbea9e0aaf1759240be7e936f3cc1522b5fed8289c48863250eafb0cf5/pywavelets-1.6.0-cp310-cp310-win32.whl", hash = "sha256:47de024ba4f9df97e98b5f540340e1a9edd82d2c477450bef8c9b5381487128e", size = 4178228 }, + { url = "https://files.pythonhosted.org/packages/f6/f9/359035b5d6ce2bc63d419c5920a2f2ec56192288e83c5fd0cb12e5abb3e6/pywavelets-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:e2c44760c0906ddf2176920a2613287f6eea947f166ce7eee9546081b06a6835", size = 4251413 }, + { url = "https://files.pythonhosted.org/packages/fa/45/985b5f9b76d38ef8cf71bc89ad9e6b985931a3c68228eab77a59954375e3/pywavelets-1.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d91aaaf6de53b758bcdc96c81cdb5a8607758602be49f691188c0e108cf1e738", size = 4362791 }, + { url = "https://files.pythonhosted.org/packages/59/fe/b22161950b56ce83309dfc342c4060957ce4c8a2f4b33b767068050cfd88/pywavelets-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3b5302edb6d1d1ff6636d37c9ff29c4892f2a3648d736cc1df01f3f36e25c8cf", size = 4325322 }, + { url = "https://files.pythonhosted.org/packages/ef/f6/9e2734482abe865abf2cf57b1e76c2180cf268195e63ceb94699ed686f53/pywavelets-1.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5e655446e37a3c87213d5c6386b86f65c4d61736b4432d720171e7dd6523d6a", size = 4447295 }, + { url = "https://files.pythonhosted.org/packages/a7/8e/b5f46eefc199592b48c4edea6117748e674b603de151e83d1e8896049293/pywavelets-1.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ec7d69b746a0eaa327b829a3252a63619f2345e263177be5dd9bf30d7933c8d", size = 4521305 }, + { url = "https://files.pythonhosted.org/packages/02/63/af53bd581bbe17083218727561fae9db3f92a5e6e0fa141018b30da087d4/pywavelets-1.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:97ea9613bd6b7108ebb44b709060adc7e2d5fac73be7152342bdd5513d75f84e", size = 4473243 }, + { url = "https://files.pythonhosted.org/packages/e4/00/a19c4f9284b9a2b57e1e7ae195052aa267361beafa86d8f69816c830ed52/pywavelets-1.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:48b3813c6d1a7a8194f37dbb5dbbdf2fe1112152c91445ea2e54f64ff6350c36", size = 4544912 }, + { url = "https://files.pythonhosted.org/packages/fa/d4/89c211ca6150a373002303c9d9ef77e5bed8ccc58bf41acb25a5fff84f5f/pywavelets-1.6.0-cp311-cp311-win32.whl", hash = "sha256:4ffb484d096a5eb10af7121e0203546a03e1369328df321a33ef91f67bac40cf", size = 4175349 }, + { url = "https://files.pythonhosted.org/packages/21/97/a4ed461234e9c9fa0a06f1618401e0660148819910ddd6501611bed133fe/pywavelets-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:274bc47b289585383aa65519b3fcae5b4dee5e31db3d4198d4fad701a70e59f7", size = 4252219 }, + { url = "https://files.pythonhosted.org/packages/68/5f/12fc75debf47855f180c266c82fce66f83a738c684bb845f63a85ca42e6e/pywavelets-1.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d6ec113386a432e04103f95e351d2657b42145bd1e1ed26513423391bcb5f011", size = 4366209 }, + { url = "https://files.pythonhosted.org/packages/be/93/53db0d37cd5993be59409e9b7eac183400af3e9209b47999a6877b756987/pywavelets-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ab652112d3932d21f020e281e06926a751354c2b5629fb716f5eb9d0104b84e5", size = 4327939 }, + { url = "https://files.pythonhosted.org/packages/f4/62/9a1bacca3846e2b65d43103bad73a1540a210e3c4ad013b84b9c833acea9/pywavelets-1.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47b0314a22616c5f3f08760f0e00b4a15b7c7dadca5e39bb701cf7869a4207c5", size = 4414147 }, + { url = "https://files.pythonhosted.org/packages/3c/c7/aa29a95d3197404c1f2502f9154fc0d8ba5d8b5becd9f0c5f5ce285e1e0b/pywavelets-1.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:138471513bc0a4cd2ddc4e50c7ec04e3468c268e101a0d02f698f6aedd1d5e79", size = 4493560 }, + { url = "https://files.pythonhosted.org/packages/15/db/e79ad980f56481ae9d41c3fe1a744dc8d90ebd775860276db85d85f3870f/pywavelets-1.6.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:67936491ae3e5f957c428e34fdaed21f131535b8d60c7c729a1b539ce8864837", size = 4440372 }, + { url = "https://files.pythonhosted.org/packages/77/6e/331f42142e5e4783e64286f31d31667017fb474cd2b263000a0204c79c22/pywavelets-1.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:dd798cee3d28fb3d32a26a00d9831a20bf316c36d685e4ced01b4e4a8f36f5ce", size = 4504579 }, + { url = "https://files.pythonhosted.org/packages/a6/2e/3a43ac2cfdb122614a0a1c68869f8497831bbacb012b2739c784a197b8fb/pywavelets-1.6.0-cp312-cp312-win32.whl", hash = "sha256:e772f7f0c16bfc3be8ac3cd10d29a9920bb7a39781358856223c491b899e6e79", size = 4169388 }, + { url = "https://files.pythonhosted.org/packages/7a/11/7cebd91be700652f786305754ef13cacfde4598c320b59cbe2c340e8c646/pywavelets-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:4ef15a63a72afa67ae9f4f3b06c95c5382730fb3075e668d49a880e65f2f089c", size = 4244482 }, + { url = "https://files.pythonhosted.org/packages/26/68/e1453bd31c7c942e6ffcc0c71e7df488f36dfc16f8effef1f331a2e3c373/pywavelets-1.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:627df378e63e9c789b6f2e7060cb4264ebae6f6b0efc1da287a2c060de454a1f", size = 4363690 }, + { url = "https://files.pythonhosted.org/packages/02/c5/a225bfdf5532dfe4fbc823f1bb038949a4e7cd9d43a3ad23fe84cd365b98/pywavelets-1.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a413b51dc19e05243fe0b0864a8e8a16b5ca9bf2e4713da00a95b1b5747a5367", size = 4327278 }, + { url = "https://files.pythonhosted.org/packages/0a/51/981d04e4c9b66565e0a2a1f394572f67bedeadaff9bf9282ef9b711d31b1/pywavelets-1.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be615c6c1873e189c265d4a76d1751ec49b17e29725e6dd2e9c74f1868f590b7", size = 4449088 }, + { url = "https://files.pythonhosted.org/packages/82/ac/2430589380ab236891481c2261b2743010927db10437cffe0329de342ffd/pywavelets-1.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4021ef69ec9f3862f66580fc4417be728bd78722914394594b48212fd1fcaf21", size = 4528220 }, + { url = "https://files.pythonhosted.org/packages/70/5a/32ecb4a8b43745fbda380aafbcb0da51640aacf96c7f3cf4ab890d22f193/pywavelets-1.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8fbf7b61b28b5457693c034e58a01622756d1fd60a80ae13ac5888b1d3e57e80", size = 4469082 }, + { url = "https://files.pythonhosted.org/packages/7f/ce/9964a6f611aca4d87af395d8bf1991ce209277ca5da12242b02296882710/pywavelets-1.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f58ddbb0a6cd243928876edfc463b990763a24fb94498607d6fea690e32cca4c", size = 4547619 }, + { url = "https://files.pythonhosted.org/packages/c3/46/ce31fab7866c41984a0ed76452c239ecaa3e70d9b32911167dd8e5ac72b4/pywavelets-1.6.0-cp39-cp39-win32.whl", hash = "sha256:42a22e68e345b6de7d387ef752111ab4530c98048d2b4bdac8ceefb078b4ead6", size = 4179812 }, + { url = "https://files.pythonhosted.org/packages/5c/17/5da7f90673319b664025ade3e55d68ea2eb53bead6a8aece042a804977f2/pywavelets-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:32198de321892743c1a3d1957fe1cd8a8ecc078bfbba6b8f3982518e897271d7", size = 4253407 }, +] + +[[package]] +name = "pywavelets" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.12'", + "python_full_version == '3.11.*'", + "python_full_version == '3.10.*'", +] +dependencies = [ + { name = "numpy", marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/48/45/bfaaab38545a33a9f06c61211fc3bea2e23e8a8e00fedeb8e57feda722ff/pywavelets-1.8.0.tar.gz", hash = "sha256:f3800245754840adc143cbc29534a1b8fc4b8cff6e9d403326bd52b7bb5c35aa", size = 3935274 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/65/7e/c5e398f25c70558ca195dd4144ee004666401f6167084c1e76059d7e68d8/pywavelets-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f5c86fcb203c8e61d1f3d4afbfc08d626c64e4e3708207315577264c724632bf", size = 4323291 }, + { url = "https://files.pythonhosted.org/packages/d0/d7/2fc8067c3520ce25f7632b0f47b89d1b75653cab804a42700e95126f2679/pywavelets-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fafb5fa126277e1690c3d6329287122fc08e4d25a262ce126e3d81b1f5709308", size = 4291864 }, + { url = "https://files.pythonhosted.org/packages/2f/17/a868aa26e45c104613d9069f9d8ec0123687cb6945062d274f20a3992436/pywavelets-1.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dec23dfe6d5a3f4312b12456b8c546aa90a11c1138e425a885987505f0658ae0", size = 4447532 }, + { url = "https://files.pythonhosted.org/packages/53/7a/7f5889a57177e2b1182080fc2c52236d1e03a0fad5e0b3d7c5312070c0be/pywavelets-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:880a0197e9fa108939af50a95e97c1bf9b7d3e148e0fad92ea60a9ed8c8947c0", size = 4487695 }, + { url = "https://files.pythonhosted.org/packages/f9/e6/04d76d93c158919ef0d8e1d40d1d453168305031eca6733fdc844f7cbb07/pywavelets-1.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8bfa833d08b60d0bf53a7939fbbf3d98015dd34efe89cbe4e53ced880d085fc1", size = 4473752 }, + { url = "https://files.pythonhosted.org/packages/3b/a7/42ea5bbb6055abd312e45b27d931200fd6eed5414a87ec5d62020a4c651b/pywavelets-1.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e10c3fc7f4a796e94da4bca9871be2186a7bb7a3b3536a0ca9376d84263140f0", size = 4504191 }, + { url = "https://files.pythonhosted.org/packages/8c/7e/52df87a9e77adfb12c1b8be79a2053f2eb4c2507dec96ebfd2333b15ff03/pywavelets-1.8.0-cp310-cp310-win32.whl", hash = "sha256:31baf4be6940fde72cc85663154360857ac1b93c251822deaf72bb804da95031", size = 4143794 }, + { url = "https://files.pythonhosted.org/packages/01/e2/06e08230c26049740b2773952fbb12cc7186e5df655a73b1c30ba493e864/pywavelets-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:560c39f1ff8cb37f8b8ea4b7b6eb8a14f6926c11f5cf8c09f013a58f895ed5bc", size = 4214262 }, + { url = "https://files.pythonhosted.org/packages/6c/8a/9f8e794120b55caa1c4ae8d72696111bc408251615f351a8e54a5d8c4d4e/pywavelets-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e8dd5be4faed994581a8a4b3c0169be20567a9346e523f0b57f903c8f6722bce", size = 4324170 }, + { url = "https://files.pythonhosted.org/packages/3e/b8/f6246be5c78e9fa73fcbba9ab4cbfe0d4dcb79ea5491f28d673a53466134/pywavelets-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8d8abaf7c120b151ef309c9ff57e0a44ba9febf49045056dbc1577526ecec6c8", size = 4294254 }, + { url = "https://files.pythonhosted.org/packages/2c/dc/ba1f212e9b43117ed28e0fd092e72e817790427400f88937ea742d260153/pywavelets-1.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b43a4c58707b1e8d941bec7f1d83e67c482278575ff0db3189d5c0dfae23a57", size = 4447178 }, + { url = "https://files.pythonhosted.org/packages/58/10/e59c162a11d2fedb4454abbf7b74a52390aba5edc9605bf829bfa8708dac/pywavelets-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1aad0b97714e3079a2bfe48e4fb8ccd60778d0427e9ee5e0a9ff922e6c61e4", size = 4486799 }, + { url = "https://files.pythonhosted.org/packages/03/ee/90c3d0a0a3bda74e6e097e4c06bff9446ff2a4c90b8617aaf4902c46966b/pywavelets-1.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a0e1db96dcf3ce08156859df8b359e9ff66fa15061a1b90e70e020bf4cd077a0", size = 4486403 }, + { url = "https://files.pythonhosted.org/packages/05/54/58b87f8b636a9f044f3f9814d2ec696cf25f3b33af97c11811f13c364085/pywavelets-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e62c8fb52ab0e8ff212fff9acae681a8f12d68b76c36fe24cc48809d5b6825ba", size = 4515011 }, + { url = "https://files.pythonhosted.org/packages/a1/d0/f755cee11ff20668114942d0e777e2b502a8e4665e1fdb2553b587aac637/pywavelets-1.8.0-cp311-cp311-win32.whl", hash = "sha256:bf327528d10de471b04bb725c4e10677fac5a49e13d41bf0d0b3a1f6d7097abf", size = 4139934 }, + { url = "https://files.pythonhosted.org/packages/7b/0b/f4b92d4f00565280ea3e62a8e3dc81a667d67ed7bd59232f2f18d55f9aff/pywavelets-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:3814d354dd109e244ffaac3d480d29a5202212fe24570c920268237c8d276f95", size = 4214321 }, + { url = "https://files.pythonhosted.org/packages/2d/8b/4870f11559307416470158a5aa6f61e5c2a910f1645a7a836ffae580b7ad/pywavelets-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3f431c9e2aff1a2240765eff5e804975d0fcc24c82d6f3d4271243f228e5963b", size = 4326187 }, + { url = "https://files.pythonhosted.org/packages/c4/35/66835d889fd7fbf3119c7a9bd9d9bd567fc0bb603dfba408e9226db7cb44/pywavelets-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e39b0e2314e928cb850ee89b9042733a10ea044176a495a54dc84d2c98407a51", size = 4295428 }, + { url = "https://files.pythonhosted.org/packages/63/1c/42e5130226538c70d4bbbaee00eb1bc06ec3287f7ea43d5fcf85bfc761ce/pywavelets-1.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cae701117f5c7244b7c8d48b9e92a0289637cdc02a9c205e8be83361f0c11fae", size = 4421259 }, + { url = "https://files.pythonhosted.org/packages/6f/c5/1ce93657432e22a5debc21e8b52ec6980f819ecb7fa727bb86744224d967/pywavelets-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:649936baee933e80083788e0adc4d8bc2da7cdd8b10464d3b113475be2cc5308", size = 4447650 }, + { url = "https://files.pythonhosted.org/packages/b9/d6/b54ef30daca71824f811f9d2322a978b0a58d27674b8e3af6520f67e9ec6/pywavelets-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8c68e9d072c536bc646e8bdce443bb1826eeb9aa21b2cb2479a43954dea692a3", size = 4448538 }, + { url = "https://files.pythonhosted.org/packages/ce/8c/1688b790e55674667ad644262f174405c2c9873cb13e773432e78b1b33e4/pywavelets-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:63f67fa2ee1610445de64f746fb9c1df31980ad13d896ea2331fc3755f49b3ae", size = 4485228 }, + { url = "https://files.pythonhosted.org/packages/c9/9b/69de31c3b663dadd76d1da6bf8af68d8cefff55df8e880fe96a94bb8c9ac/pywavelets-1.8.0-cp312-cp312-win32.whl", hash = "sha256:4b3c2ab669c91e3474fd63294355487b7dd23f0b51d32f811327ddf3546f4f3d", size = 4134850 }, + { url = "https://files.pythonhosted.org/packages/1c/88/9e2aa9d5fde08bfc0fb18ffb1b5307c1ed49c24930b4147e5f48571a7251/pywavelets-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:810a23a631da596fef7196ddec49b345b1aab13525bb58547eeebe1769edbbc1", size = 4210786 }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199 }, + { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758 }, + { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463 }, + { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280 }, + { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239 }, + { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802 }, + { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527 }, + { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052 }, + { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774 }, + { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612 }, + { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040 }, + { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829 }, + { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167 }, + { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952 }, + { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301 }, + { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638 }, + { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850 }, + { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980 }, + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873 }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302 }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154 }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223 }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542 }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164 }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611 }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591 }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338 }, + { url = "https://files.pythonhosted.org/packages/65/d8/b7a1db13636d7fb7d4ff431593c510c8b8fca920ade06ca8ef20015493c5/PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d", size = 184777 }, + { url = "https://files.pythonhosted.org/packages/0a/02/6ec546cd45143fdf9840b2c6be8d875116a64076218b61d68e12548e5839/PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f", size = 172318 }, + { url = "https://files.pythonhosted.org/packages/0e/9a/8cc68be846c972bda34f6c2a93abb644fb2476f4dcc924d52175786932c9/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290", size = 720891 }, + { url = "https://files.pythonhosted.org/packages/e9/6c/6e1b7f40181bc4805e2e07f4abc10a88ce4648e7e95ff1abe4ae4014a9b2/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12", size = 722614 }, + { url = "https://files.pythonhosted.org/packages/3d/32/e7bd8535d22ea2874cef6a81021ba019474ace0d13a4819c2a4bce79bd6a/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19", size = 737360 }, + { url = "https://files.pythonhosted.org/packages/d7/12/7322c1e30b9be969670b672573d45479edef72c9a0deac3bb2868f5d7469/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e", size = 699006 }, + { url = "https://files.pythonhosted.org/packages/82/72/04fcad41ca56491995076630c3ec1e834be241664c0c09a64c9a2589b507/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725", size = 723577 }, + { url = "https://files.pythonhosted.org/packages/ed/5e/46168b1f2757f1fcd442bc3029cd8767d88a98c9c05770d8b420948743bb/PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631", size = 144593 }, + { url = "https://files.pythonhosted.org/packages/19/87/5124b1c1f2412bb95c59ec481eaf936cd32f0fe2a7b16b97b81c4c017a6a/PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8", size = 162312 }, +] + +[[package]] +name = "referencing" +version = "0.35.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "rpds-py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/99/5b/73ca1f8e72fff6fa52119dbd185f73a907b1989428917b24cff660129b6d/referencing-0.35.1.tar.gz", hash = "sha256:25b42124a6c8b632a425174f24087783efb348a6f1e0008e63cd4466fedf703c", size = 62991 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/59/2056f61236782a2c86b33906c025d4f4a0b17be0161b63b70fd9e8775d36/referencing-0.35.1-py3-none-any.whl", hash = "sha256:eda6d3234d62814d1c64e305c1331c9a3a6132da475ab6382eaa997b21ee75de", size = 26684 }, +] + +[[package]] +name = "requests" +version = "2.32.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, +] + +[[package]] +name = "rich" +version = "13.9.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ab/3a/0316b28d0761c6734d6bc14e770d85506c986c85ffb239e688eeaab2c2bc/rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098", size = 223149 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/19/71/39c7c0d87f8d4e6c020a393182060eaefeeae6c01dab6a84ec346f2567df/rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90", size = 242424 }, +] + +[[package]] +name = "rpds-py" +version = "0.22.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/80/cce854d0921ff2f0a9fa831ba3ad3c65cee3a46711addf39a2af52df2cfd/rpds_py-0.22.3.tar.gz", hash = "sha256:e32fee8ab45d3c2db6da19a5323bc3362237c8b653c70194414b892fd06a080d", size = 26771 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/2a/ead1d09e57449b99dcc190d8d2323e3a167421d8f8fdf0f217c6f6befe47/rpds_py-0.22.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:6c7b99ca52c2c1752b544e310101b98a659b720b21db00e65edca34483259967", size = 359514 }, + { url = "https://files.pythonhosted.org/packages/8f/7e/1254f406b7793b586c68e217a6a24ec79040f85e030fff7e9049069284f4/rpds_py-0.22.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be2eb3f2495ba669d2a985f9b426c1797b7d48d6963899276d22f23e33d47e37", size = 349031 }, + { url = "https://files.pythonhosted.org/packages/aa/da/17c6a2c73730d426df53675ff9cc6653ac7a60b6438d03c18e1c822a576a/rpds_py-0.22.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70eb60b3ae9245ddea20f8a4190bd79c705a22f8028aaf8bbdebe4716c3fab24", size = 381485 }, + { url = "https://files.pythonhosted.org/packages/aa/13/2dbacd820466aa2a3c4b747afb18d71209523d353cf865bf8f4796c969ea/rpds_py-0.22.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4041711832360a9b75cfb11b25a6a97c8fb49c07b8bd43d0d02b45d0b499a4ff", size = 386794 }, + { url = "https://files.pythonhosted.org/packages/6d/62/96905d0a35ad4e4bc3c098b2f34b2e7266e211d08635baa690643d2227be/rpds_py-0.22.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:64607d4cbf1b7e3c3c8a14948b99345eda0e161b852e122c6bb71aab6d1d798c", size = 423523 }, + { url = "https://files.pythonhosted.org/packages/eb/1b/d12770f2b6a9fc2c3ec0d810d7d440f6d465ccd8b7f16ae5385952c28b89/rpds_py-0.22.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e69b0a0e2537f26d73b4e43ad7bc8c8efb39621639b4434b76a3de50c6966e", size = 446695 }, + { url = "https://files.pythonhosted.org/packages/4d/cf/96f1fd75512a017f8e07408b6d5dbeb492d9ed46bfe0555544294f3681b3/rpds_py-0.22.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc27863442d388870c1809a87507727b799c8460573cfbb6dc0eeaef5a11b5ec", size = 381959 }, + { url = "https://files.pythonhosted.org/packages/ab/f0/d1c5b501c8aea85aeb938b555bfdf7612110a2f8cdc21ae0482c93dd0c24/rpds_py-0.22.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e79dd39f1e8c3504be0607e5fc6e86bb60fe3584bec8b782578c3b0fde8d932c", size = 410420 }, + { url = "https://files.pythonhosted.org/packages/33/3b/45b6c58fb6aad5a569ae40fb890fc494c6b02203505a5008ee6dc68e65f7/rpds_py-0.22.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e0fa2d4ec53dc51cf7d3bb22e0aa0143966119f42a0c3e4998293a3dd2856b09", size = 557620 }, + { url = "https://files.pythonhosted.org/packages/83/62/3fdd2d3d47bf0bb9b931c4c73036b4ab3ec77b25e016ae26fab0f02be2af/rpds_py-0.22.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fda7cb070f442bf80b642cd56483b5548e43d366fe3f39b98e67cce780cded00", size = 584202 }, + { url = "https://files.pythonhosted.org/packages/04/f2/5dced98b64874b84ca824292f9cee2e3f30f3bcf231d15a903126684f74d/rpds_py-0.22.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cff63a0272fcd259dcc3be1657b07c929c466b067ceb1c20060e8d10af56f5bf", size = 552787 }, + { url = "https://files.pythonhosted.org/packages/67/13/2273dea1204eda0aea0ef55145da96a9aa28b3f88bb5c70e994f69eda7c3/rpds_py-0.22.3-cp310-cp310-win32.whl", hash = "sha256:9bd7228827ec7bb817089e2eb301d907c0d9827a9e558f22f762bb690b131652", size = 220088 }, + { url = "https://files.pythonhosted.org/packages/4e/80/8c8176b67ad7f4a894967a7a4014ba039626d96f1d4874d53e409b58d69f/rpds_py-0.22.3-cp310-cp310-win_amd64.whl", hash = "sha256:9beeb01d8c190d7581a4d59522cd3d4b6887040dcfc744af99aa59fef3e041a8", size = 231737 }, + { url = "https://files.pythonhosted.org/packages/15/ad/8d1ddf78f2805a71253fcd388017e7b4a0615c22c762b6d35301fef20106/rpds_py-0.22.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:d20cfb4e099748ea39e6f7b16c91ab057989712d31761d3300d43134e26e165f", size = 359773 }, + { url = "https://files.pythonhosted.org/packages/c8/75/68c15732293a8485d79fe4ebe9045525502a067865fa4278f178851b2d87/rpds_py-0.22.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:68049202f67380ff9aa52f12e92b1c30115f32e6895cd7198fa2a7961621fc5a", size = 349214 }, + { url = "https://files.pythonhosted.org/packages/3c/4c/7ce50f3070083c2e1b2bbd0fb7046f3da55f510d19e283222f8f33d7d5f4/rpds_py-0.22.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb4f868f712b2dd4bcc538b0a0c1f63a2b1d584c925e69a224d759e7070a12d5", size = 380477 }, + { url = "https://files.pythonhosted.org/packages/9a/e9/835196a69cb229d5c31c13b8ae603bd2da9a6695f35fe4270d398e1db44c/rpds_py-0.22.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bc51abd01f08117283c5ebf64844a35144a0843ff7b2983e0648e4d3d9f10dbb", size = 386171 }, + { url = "https://files.pythonhosted.org/packages/f9/8e/33fc4eba6683db71e91e6d594a2cf3a8fbceb5316629f0477f7ece5e3f75/rpds_py-0.22.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f3cec041684de9a4684b1572fe28c7267410e02450f4561700ca5a3bc6695a2", size = 422676 }, + { url = "https://files.pythonhosted.org/packages/37/47/2e82d58f8046a98bb9497a8319604c92b827b94d558df30877c4b3c6ccb3/rpds_py-0.22.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7ef9d9da710be50ff6809fed8f1963fecdfecc8b86656cadfca3bc24289414b0", size = 446152 }, + { url = "https://files.pythonhosted.org/packages/e1/78/79c128c3e71abbc8e9739ac27af11dc0f91840a86fce67ff83c65d1ba195/rpds_py-0.22.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59f4a79c19232a5774aee369a0c296712ad0e77f24e62cad53160312b1c1eaa1", size = 381300 }, + { url = "https://files.pythonhosted.org/packages/c9/5b/2e193be0e8b228c1207f31fa3ea79de64dadb4f6a4833111af8145a6bc33/rpds_py-0.22.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a60bce91f81ddaac922a40bbb571a12c1070cb20ebd6d49c48e0b101d87300d", size = 409636 }, + { url = "https://files.pythonhosted.org/packages/c2/3f/687c7100b762d62186a1c1100ffdf99825f6fa5ea94556844bbbd2d0f3a9/rpds_py-0.22.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e89391e6d60251560f0a8f4bd32137b077a80d9b7dbe6d5cab1cd80d2746f648", size = 556708 }, + { url = "https://files.pythonhosted.org/packages/8c/a2/c00cbc4b857e8b3d5e7f7fc4c81e23afd8c138b930f4f3ccf9a41a23e9e4/rpds_py-0.22.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e3fb866d9932a3d7d0c82da76d816996d1667c44891bd861a0f97ba27e84fc74", size = 583554 }, + { url = "https://files.pythonhosted.org/packages/d0/08/696c9872cf56effdad9ed617ac072f6774a898d46b8b8964eab39ec562d2/rpds_py-0.22.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1352ae4f7c717ae8cba93421a63373e582d19d55d2ee2cbb184344c82d2ae55a", size = 552105 }, + { url = "https://files.pythonhosted.org/packages/18/1f/4df560be1e994f5adf56cabd6c117e02de7c88ee238bb4ce03ed50da9d56/rpds_py-0.22.3-cp311-cp311-win32.whl", hash = "sha256:b0b4136a252cadfa1adb705bb81524eee47d9f6aab4f2ee4fa1e9d3cd4581f64", size = 220199 }, + { url = "https://files.pythonhosted.org/packages/b8/1b/c29b570bc5db8237553002788dc734d6bd71443a2ceac2a58202ec06ef12/rpds_py-0.22.3-cp311-cp311-win_amd64.whl", hash = "sha256:8bd7c8cfc0b8247c8799080fbff54e0b9619e17cdfeb0478ba7295d43f635d7c", size = 231775 }, + { url = "https://files.pythonhosted.org/packages/75/47/3383ee3bd787a2a5e65a9b9edc37ccf8505c0a00170e3a5e6ea5fbcd97f7/rpds_py-0.22.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:27e98004595899949bd7a7b34e91fa7c44d7a97c40fcaf1d874168bb652ec67e", size = 352334 }, + { url = "https://files.pythonhosted.org/packages/40/14/aa6400fa8158b90a5a250a77f2077c0d0cd8a76fce31d9f2b289f04c6dec/rpds_py-0.22.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1978d0021e943aae58b9b0b196fb4895a25cc53d3956b8e35e0b7682eefb6d56", size = 342111 }, + { url = "https://files.pythonhosted.org/packages/7d/06/395a13bfaa8a28b302fb433fb285a67ce0ea2004959a027aea8f9c52bad4/rpds_py-0.22.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:655ca44a831ecb238d124e0402d98f6212ac527a0ba6c55ca26f616604e60a45", size = 384286 }, + { url = "https://files.pythonhosted.org/packages/43/52/d8eeaffab047e6b7b7ef7f00d5ead074a07973968ffa2d5820fa131d7852/rpds_py-0.22.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:feea821ee2a9273771bae61194004ee2fc33f8ec7db08117ef9147d4bbcbca8e", size = 391739 }, + { url = "https://files.pythonhosted.org/packages/83/31/52dc4bde85c60b63719610ed6f6d61877effdb5113a72007679b786377b8/rpds_py-0.22.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22bebe05a9ffc70ebfa127efbc429bc26ec9e9b4ee4d15a740033efda515cf3d", size = 427306 }, + { url = "https://files.pythonhosted.org/packages/70/d5/1bab8e389c2261dba1764e9e793ed6830a63f830fdbec581a242c7c46bda/rpds_py-0.22.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3af6e48651c4e0d2d166dc1b033b7042ea3f871504b6805ba5f4fe31581d8d38", size = 442717 }, + { url = "https://files.pythonhosted.org/packages/82/a1/a45f3e30835b553379b3a56ea6c4eb622cf11e72008229af840e4596a8ea/rpds_py-0.22.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ba3c290821343c192f7eae1d8fd5999ca2dc99994114643e2f2d3e6138b15", size = 385721 }, + { url = "https://files.pythonhosted.org/packages/a6/27/780c942de3120bdd4d0e69583f9c96e179dfff082f6ecbb46b8d6488841f/rpds_py-0.22.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:02fbb9c288ae08bcb34fb41d516d5eeb0455ac35b5512d03181d755d80810059", size = 415824 }, + { url = "https://files.pythonhosted.org/packages/94/0b/aa0542ca88ad20ea719b06520f925bae348ea5c1fdf201b7e7202d20871d/rpds_py-0.22.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f56a6b404f74ab372da986d240e2e002769a7d7102cc73eb238a4f72eec5284e", size = 561227 }, + { url = "https://files.pythonhosted.org/packages/0d/92/3ed77d215f82c8f844d7f98929d56cc321bb0bcfaf8f166559b8ec56e5f1/rpds_py-0.22.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0a0461200769ab3b9ab7e513f6013b7a97fdeee41c29b9db343f3c5a8e2b9e61", size = 587424 }, + { url = "https://files.pythonhosted.org/packages/09/42/cacaeb047a22cab6241f107644f230e2935d4efecf6488859a7dd82fc47d/rpds_py-0.22.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8633e471c6207a039eff6aa116e35f69f3156b3989ea3e2d755f7bc41754a4a7", size = 555953 }, + { url = "https://files.pythonhosted.org/packages/e6/52/c921dc6d5f5d45b212a456c1f5b17df1a471127e8037eb0972379e39dff4/rpds_py-0.22.3-cp312-cp312-win32.whl", hash = "sha256:593eba61ba0c3baae5bc9be2f5232430453fb4432048de28399ca7376de9c627", size = 221339 }, + { url = "https://files.pythonhosted.org/packages/f2/c7/f82b5be1e8456600395366f86104d1bd8d0faed3802ad511ef6d60c30d98/rpds_py-0.22.3-cp312-cp312-win_amd64.whl", hash = "sha256:d115bffdd417c6d806ea9069237a4ae02f513b778e3789a359bc5856e0404cc4", size = 235786 }, + { url = "https://files.pythonhosted.org/packages/db/0f/a8ad17ddac7c880f48d5da50733dd25bfc35ba2be1bec9f23453e8c7a123/rpds_py-0.22.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:378753b4a4de2a7b34063d6f95ae81bfa7b15f2c1a04a9518e8644e81807ebea", size = 359735 }, + { url = "https://files.pythonhosted.org/packages/0c/41/430903669397ea3ee76865e0b53ea236e8dc0ffbecde47b2c4c783ad6759/rpds_py-0.22.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3445e07bf2e8ecfeef6ef67ac83de670358abf2996916039b16a218e3d95e97e", size = 348724 }, + { url = "https://files.pythonhosted.org/packages/c9/5c/3496f4f0ee818297544f2d5f641c49dde8ae156392e6834b79c0609ba006/rpds_py-0.22.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b2513ba235829860b13faa931f3b6846548021846ac808455301c23a101689d", size = 381782 }, + { url = "https://files.pythonhosted.org/packages/b6/dc/db0523ce0cd16ce579185cc9aa9141992de956d0a9c469ecfd1fb5d54ddc/rpds_py-0.22.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eaf16ae9ae519a0e237a0f528fd9f0197b9bb70f40263ee57ae53c2b8d48aeb3", size = 387036 }, + { url = "https://files.pythonhosted.org/packages/85/2a/9525c2427d2c257f877348918136a5d4e1b945c205a256e53bec61e54551/rpds_py-0.22.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:583f6a1993ca3369e0f80ba99d796d8e6b1a3a2a442dd4e1a79e652116413091", size = 424566 }, + { url = "https://files.pythonhosted.org/packages/b9/1c/f8c012a39794b84069635709f559c0309103d5d74b3f5013916e6ca4f174/rpds_py-0.22.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4617e1915a539a0d9a9567795023de41a87106522ff83fbfaf1f6baf8e85437e", size = 447203 }, + { url = "https://files.pythonhosted.org/packages/93/f5/c1c772364570d35b98ba64f36ec90c3c6d0b932bc4d8b9b4efef6dc64b07/rpds_py-0.22.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c150c7a61ed4a4f4955a96626574e9baf1adf772c2fb61ef6a5027e52803543", size = 382283 }, + { url = "https://files.pythonhosted.org/packages/10/06/f94f61313f94fc75c3c3aa74563f80bbd990e5b25a7c1a38cee7d5d0309b/rpds_py-0.22.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2fa4331c200c2521512595253f5bb70858b90f750d39b8cbfd67465f8d1b596d", size = 410022 }, + { url = "https://files.pythonhosted.org/packages/3f/b0/37ab416a9528419920dfb64886c220f58fcbd66b978e0a91b66e9ee9a993/rpds_py-0.22.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:214b7a953d73b5e87f0ebece4a32a5bd83c60a3ecc9d4ec8f1dca968a2d91e99", size = 557817 }, + { url = "https://files.pythonhosted.org/packages/2c/5d/9daa18adcd676dd3b2817c8a7cec3f3ebeeb0ce0d05a1b63bf994fc5114f/rpds_py-0.22.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f47ad3d5f3258bd7058d2d506852217865afefe6153a36eb4b6928758041d831", size = 585099 }, + { url = "https://files.pythonhosted.org/packages/41/3f/ad4e58035d3f848410aa3d59857b5f238bafab81c8b4a844281f80445d62/rpds_py-0.22.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:f276b245347e6e36526cbd4a266a417796fc531ddf391e43574cf6466c492520", size = 552818 }, + { url = "https://files.pythonhosted.org/packages/b8/19/123acae8f4cab3c9463097c3ced3cc87c46f405056e249c874940e045309/rpds_py-0.22.3-cp39-cp39-win32.whl", hash = "sha256:bbb232860e3d03d544bc03ac57855cd82ddf19c7a07651a7c0fdb95e9efea8b9", size = 220246 }, + { url = "https://files.pythonhosted.org/packages/8b/8d/9db93e48d96ace1f6713c71ce72e2d94b71d82156c37b6a54e0930486f00/rpds_py-0.22.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfbc454a2880389dbb9b5b398e50d439e2e58669160f27b60e5eca11f68ae17c", size = 231932 }, + { url = "https://files.pythonhosted.org/packages/8b/63/e29f8ee14fcf383574f73b6bbdcbec0fbc2e5fc36b4de44d1ac389b1de62/rpds_py-0.22.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d48424e39c2611ee1b84ad0f44fb3b2b53d473e65de061e3f460fc0be5f1939d", size = 360786 }, + { url = "https://files.pythonhosted.org/packages/d3/e0/771ee28b02a24e81c8c0e645796a371350a2bb6672753144f36ae2d2afc9/rpds_py-0.22.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:24e8abb5878e250f2eb0d7859a8e561846f98910326d06c0d51381fed59357bd", size = 350589 }, + { url = "https://files.pythonhosted.org/packages/cf/49/abad4c4a1e6f3adf04785a99c247bfabe55ed868133e2d1881200aa5d381/rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b232061ca880db21fa14defe219840ad9b74b6158adb52ddf0e87bead9e8493", size = 381848 }, + { url = "https://files.pythonhosted.org/packages/3a/7d/f4bc6d6fbe6af7a0d2b5f2ee77079efef7c8528712745659ec0026888998/rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac0a03221cdb5058ce0167ecc92a8c89e8d0decdc9e99a2ec23380793c4dcb96", size = 387879 }, + { url = "https://files.pythonhosted.org/packages/13/b0/575c797377fdcd26cedbb00a3324232e4cb2c5d121f6e4b0dbf8468b12ef/rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb0c341fa71df5a4595f9501df4ac5abfb5a09580081dffbd1ddd4654e6e9123", size = 423916 }, + { url = "https://files.pythonhosted.org/packages/54/78/87157fa39d58f32a68d3326f8a81ad8fb99f49fe2aa7ad9a1b7d544f9478/rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf9db5488121b596dbfc6718c76092fda77b703c1f7533a226a5a9f65248f8ad", size = 448410 }, + { url = "https://files.pythonhosted.org/packages/59/69/860f89996065a88be1b6ff2d60e96a02b920a262d8aadab99e7903986597/rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b8db6b5b2d4491ad5b6bdc2bc7c017eec108acbf4e6785f42a9eb0ba234f4c9", size = 382841 }, + { url = "https://files.pythonhosted.org/packages/bd/d7/bc144e10d27e3cb350f98df2492a319edd3caaf52ddfe1293f37a9afbfd7/rpds_py-0.22.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b3d504047aba448d70cf6fa22e06cb09f7cbd761939fdd47604f5e007675c24e", size = 409662 }, + { url = "https://files.pythonhosted.org/packages/14/2a/6bed0b05233c291a94c7e89bc76ffa1c619d4e1979fbfe5d96024020c1fb/rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:e61b02c3f7a1e0b75e20c3978f7135fd13cb6cf551bf4a6d29b999a88830a338", size = 558221 }, + { url = "https://files.pythonhosted.org/packages/11/23/cd8f566de444a137bc1ee5795e47069a947e60810ba4152886fe5308e1b7/rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:e35ba67d65d49080e8e5a1dd40101fccdd9798adb9b050ff670b7d74fa41c566", size = 583780 }, + { url = "https://files.pythonhosted.org/packages/8d/63/79c3602afd14d501f751e615a74a59040328da5ef29ed5754ae80d236b84/rpds_py-0.22.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:26fd7cac7dd51011a245f29a2cc6489c4608b5a8ce8d75661bb4a1066c52dfbe", size = 553619 }, + { url = "https://files.pythonhosted.org/packages/9f/2e/c5c1689e80298d4e94c75b70faada4c25445739d91b94c211244a3ed7ed1/rpds_py-0.22.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:177c7c0fce2855833819c98e43c262007f42ce86651ffbb84f37883308cb0e7d", size = 233338 }, + { url = "https://files.pythonhosted.org/packages/bc/b7/d2c205723e3b4d75b03215694f0297a1b4b395bf834cb5896ad9bbb90f90/rpds_py-0.22.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bb47271f60660803ad11f4c61b42242b8c1312a31c98c578f79ef9387bbde21c", size = 360594 }, + { url = "https://files.pythonhosted.org/packages/d8/8f/c3515f5234cf6055046d4cfe9c80a3742a20acfa7d0b1b290f0d7f56a8db/rpds_py-0.22.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:70fb28128acbfd264eda9bf47015537ba3fe86e40d046eb2963d75024be4d055", size = 349594 }, + { url = "https://files.pythonhosted.org/packages/6b/98/5b487cb06afc484befe350c87fda37f4ce11333f04f3380aba43dcf5bce2/rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d61b4b7d0c2c9ac019c314e52d7cbda0ae31078aabd0f22e583af3e0d79723", size = 381138 }, + { url = "https://files.pythonhosted.org/packages/5e/3a/12308d2c51b3fdfc173619943b7dc5ba41b4850c47112eeda38d9c54ed12/rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f0e260eaf54380380ac3808aa4ebe2d8ca28b9087cf411649f96bad6900c728", size = 387828 }, + { url = "https://files.pythonhosted.org/packages/17/b2/c242241ab5a2a206e093f24ccbfa519c4bbf10a762ac90bffe1766c225e0/rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b25bc607423935079e05619d7de556c91fb6adeae9d5f80868dde3468657994b", size = 424634 }, + { url = "https://files.pythonhosted.org/packages/d5/c7/52a1b15012139f3ba740f291f1d03c6b632938ba61bc605f24c101952493/rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fb6116dfb8d1925cbdb52595560584db42a7f664617a1f7d7f6e32f138cdf37d", size = 447862 }, + { url = "https://files.pythonhosted.org/packages/55/3e/4d3ed8fd01bad77e8ed101116fe63b03f1011940d9596a8f4d82ac80cacd/rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a63cbdd98acef6570c62b92a1e43266f9e8b21e699c363c0fef13bd530799c11", size = 382506 }, + { url = "https://files.pythonhosted.org/packages/30/78/df59d6f92470a84369a3757abeae1cfd7f7239c8beb6d948949bf78317d2/rpds_py-0.22.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2b8f60e1b739a74bab7e01fcbe3dddd4657ec685caa04681df9d562ef15b625f", size = 410534 }, + { url = "https://files.pythonhosted.org/packages/38/97/ea45d1edd9b753b20084b52dd5db6ee5e1ac3e036a27149972398a413858/rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:2e8b55d8517a2fda8d95cb45d62a5a8bbf9dd0ad39c5b25c8833efea07b880ca", size = 557453 }, + { url = "https://files.pythonhosted.org/packages/08/cd/3a1b35eb9da27ffbb981cfffd32a01c7655c4431ccb278cb3064f8887462/rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:2de29005e11637e7a2361fa151f780ff8eb2543a0da1413bb951e9f14b699ef3", size = 584412 }, + { url = "https://files.pythonhosted.org/packages/87/91/31d1c5aeb1606f71188259e0ba6ed6f5c21a3c72f58b51db6a8bd0aa2b5d/rpds_py-0.22.3-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:666ecce376999bf619756a24ce15bb14c5bfaf04bf00abc7e663ce17c3f34fe7", size = 553446 }, + { url = "https://files.pythonhosted.org/packages/e7/ad/03b5ccd1ab492c9dece85b3bf1c96453ab8c47983936fae6880f688f60b3/rpds_py-0.22.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:5246b14ca64a8675e0a7161f7af68fe3e910e6b90542b4bfb5439ba752191df6", size = 233013 }, +] + +[[package]] +name = "ruamel-yaml" +version = "0.18.7" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "ruamel-yaml-clib", marker = "platform_python_implementation == 'CPython'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/be/f6/ae958c14677098f4fcc0a6b04b382c02f31ec628d2b03910e24f29d9bc58/ruamel.yaml-0.18.7.tar.gz", hash = "sha256:270638acec6659f7bb30f4ea40083c9a0d0d5afdcef5e63d666f11209091531a", size = 142349 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ee/04/9fe08b3cc8000c1d98f2f79811d99caf992643f5d5510b5e64547659e808/ruamel.yaml-0.18.7-py3-none-any.whl", hash = "sha256:adef56d72a97bc2a6a78952ef398c4054f248fba5698ddc3ab07434e7fc47983", size = 116931 }, +] + +[[package]] +name = "ruamel-yaml-clib" +version = "0.2.12" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/20/84/80203abff8ea4993a87d823a5f632e4d92831ef75d404c9fc78d0176d2b5/ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f", size = 225315 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/70/57/40a958e863e299f0c74ef32a3bde9f2d1ea8d69669368c0c502a0997f57f/ruamel.yaml.clib-0.2.12-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:11f891336688faf5156a36293a9c362bdc7c88f03a8a027c2c1d8e0bcde998e5", size = 131301 }, + { url = "https://files.pythonhosted.org/packages/98/a8/29a3eb437b12b95f50a6bcc3d7d7214301c6c529d8fdc227247fa84162b5/ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:a606ef75a60ecf3d924613892cc603b154178ee25abb3055db5062da811fd969", size = 633728 }, + { url = "https://files.pythonhosted.org/packages/35/6d/ae05a87a3ad540259c3ad88d71275cbd1c0f2d30ae04c65dcbfb6dcd4b9f/ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd5415dded15c3822597455bc02bcd66e81ef8b7a48cb71a33628fc9fdde39df", size = 722230 }, + { url = "https://files.pythonhosted.org/packages/7f/b7/20c6f3c0b656fe609675d69bc135c03aac9e3865912444be6339207b6648/ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76", size = 686712 }, + { url = "https://files.pythonhosted.org/packages/cd/11/d12dbf683471f888d354dac59593873c2b45feb193c5e3e0f2ebf85e68b9/ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6", size = 663936 }, + { url = "https://files.pythonhosted.org/packages/72/14/4c268f5077db5c83f743ee1daeb236269fa8577133a5cfa49f8b382baf13/ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd", size = 696580 }, + { url = "https://files.pythonhosted.org/packages/30/fc/8cd12f189c6405a4c1cf37bd633aa740a9538c8e40497c231072d0fef5cf/ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a52d48f4e7bf9005e8f0a89209bf9a73f7190ddf0489eee5eb51377385f59f2a", size = 663393 }, + { url = "https://files.pythonhosted.org/packages/80/29/c0a017b704aaf3cbf704989785cd9c5d5b8ccec2dae6ac0c53833c84e677/ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da", size = 100326 }, + { url = "https://files.pythonhosted.org/packages/3a/65/fa39d74db4e2d0cd252355732d966a460a41cd01c6353b820a0952432839/ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28", size = 118079 }, + { url = "https://files.pythonhosted.org/packages/fb/8f/683c6ad562f558cbc4f7c029abcd9599148c51c54b5ef0f24f2638da9fbb/ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6", size = 132224 }, + { url = "https://files.pythonhosted.org/packages/3c/d2/b79b7d695e2f21da020bd44c782490578f300dd44f0a4c57a92575758a76/ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:d84318609196d6bd6da0edfa25cedfbabd8dbde5140a0a23af29ad4b8f91fb1e", size = 641480 }, + { url = "https://files.pythonhosted.org/packages/68/6e/264c50ce2a31473a9fdbf4fa66ca9b2b17c7455b31ef585462343818bd6c/ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb43a269eb827806502c7c8efb7ae7e9e9d0573257a46e8e952f4d4caba4f31e", size = 739068 }, + { url = "https://files.pythonhosted.org/packages/86/29/88c2567bc893c84d88b4c48027367c3562ae69121d568e8a3f3a8d363f4d/ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52", size = 703012 }, + { url = "https://files.pythonhosted.org/packages/11/46/879763c619b5470820f0cd6ca97d134771e502776bc2b844d2adb6e37753/ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642", size = 704352 }, + { url = "https://files.pythonhosted.org/packages/02/80/ece7e6034256a4186bbe50dee28cd032d816974941a6abf6a9d65e4228a7/ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2", size = 737344 }, + { url = "https://files.pythonhosted.org/packages/f0/ca/e4106ac7e80efbabdf4bf91d3d32fc424e41418458251712f5672eada9ce/ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1492a6051dab8d912fc2adeef0e8c72216b24d57bd896ea607cb90bb0c4981d3", size = 714498 }, + { url = "https://files.pythonhosted.org/packages/67/58/b1f60a1d591b771298ffa0428237afb092c7f29ae23bad93420b1eb10703/ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4", size = 100205 }, + { url = "https://files.pythonhosted.org/packages/b4/4f/b52f634c9548a9291a70dfce26ca7ebce388235c93588a1068028ea23fcc/ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb", size = 118185 }, + { url = "https://files.pythonhosted.org/packages/48/41/e7a405afbdc26af961678474a55373e1b323605a4f5e2ddd4a80ea80f628/ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632", size = 133433 }, + { url = "https://files.pythonhosted.org/packages/ec/b0/b850385604334c2ce90e3ee1013bd911aedf058a934905863a6ea95e9eb4/ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:943f32bc9dedb3abff9879edc134901df92cfce2c3d5c9348f172f62eb2d771d", size = 647362 }, + { url = "https://files.pythonhosted.org/packages/44/d0/3f68a86e006448fb6c005aee66565b9eb89014a70c491d70c08de597f8e4/ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95c3829bb364fdb8e0332c9931ecf57d9be3519241323c5274bd82f709cebc0c", size = 754118 }, + { url = "https://files.pythonhosted.org/packages/52/a9/d39f3c5ada0a3bb2870d7db41901125dbe2434fa4f12ca8c5b83a42d7c53/ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd", size = 706497 }, + { url = "https://files.pythonhosted.org/packages/b0/fa/097e38135dadd9ac25aecf2a54be17ddf6e4c23e43d538492a90ab3d71c6/ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31", size = 698042 }, + { url = "https://files.pythonhosted.org/packages/ec/d5/a659ca6f503b9379b930f13bc6b130c9f176469b73b9834296822a83a132/ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680", size = 745831 }, + { url = "https://files.pythonhosted.org/packages/db/5d/36619b61ffa2429eeaefaab4f3374666adf36ad8ac6330d855848d7d36fd/ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b82a7c94a498853aa0b272fd5bc67f29008da798d4f93a2f9f289feb8426a58d", size = 715692 }, + { url = "https://files.pythonhosted.org/packages/b1/82/85cb92f15a4231c89b95dfe08b09eb6adca929ef7df7e17ab59902b6f589/ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5", size = 98777 }, + { url = "https://files.pythonhosted.org/packages/d7/8f/c3654f6f1ddb75daf3922c3d8fc6005b1ab56671ad56ffb874d908bfa668/ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4", size = 115523 }, + { url = "https://files.pythonhosted.org/packages/e5/46/ccdef7a84ad745c37cb3d9a81790f28fbc9adf9c237dba682017b123294e/ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987", size = 131834 }, + { url = "https://files.pythonhosted.org/packages/29/09/932360f30ad1b7b79f08757e0a6fb8c5392a52cdcc182779158fe66d25ac/ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bc5f1e1c28e966d61d2519f2a3d451ba989f9ea0f2307de7bc45baa526de9e45", size = 636120 }, + { url = "https://files.pythonhosted.org/packages/a2/2a/5b27602e7a4344c1334e26bf4739746206b7a60a8acdba33a61473468b73/ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a0e060aace4c24dcaf71023bbd7d42674e3b230f7e7b97317baf1e953e5b519", size = 724914 }, + { url = "https://files.pythonhosted.org/packages/da/1c/23497017c554fc06ff5701b29355522cff850f626337fff35d9ab352cb18/ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7", size = 689072 }, + { url = "https://files.pythonhosted.org/packages/68/e6/f3d4ff3223f9ea49c3b7169ec0268e42bd49f87c70c0e3e853895e4a7ae2/ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285", size = 667091 }, + { url = "https://files.pythonhosted.org/packages/84/62/ead07043527642491e5011b143f44b81ef80f1025a96069b7210e0f2f0f3/ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed", size = 699111 }, + { url = "https://files.pythonhosted.org/packages/52/b3/fe4d84446f7e4887e3bea7ceff0a7df23790b5ed625f830e79ace88ebefb/ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2c59aa6170b990d8d2719323e628aaf36f3bfbc1c26279c0eeeb24d05d2d11c7", size = 666365 }, + { url = "https://files.pythonhosted.org/packages/6e/b3/7feb99a00bfaa5c6868617bb7651308afde85e5a0b23cd187fe5de65feeb/ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12", size = 100863 }, + { url = "https://files.pythonhosted.org/packages/93/07/de635108684b7a5bb06e432b0930c5a04b6c59efe73bd966d8db3cc208f2/ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b", size = 118653 }, +] + +[[package]] +name = "scipy" +version = "1.13.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ae/00/48c2f661e2816ccf2ecd77982f6605b2950afe60f60a52b4cbbc2504aa8f/scipy-1.13.1.tar.gz", hash = "sha256:095a87a0312b08dfd6a6155cbbd310a8c51800fc931b8c0b84003014b874ed3c", size = 57210720 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/59/41b2529908c002ade869623b87eecff3e11e3ce62e996d0bdcb536984187/scipy-1.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:20335853b85e9a49ff7572ab453794298bcf0354d8068c5f6775a0eabf350aca", size = 39328076 }, + { url = "https://files.pythonhosted.org/packages/d5/33/f1307601f492f764062ce7dd471a14750f3360e33cd0f8c614dae208492c/scipy-1.13.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d605e9c23906d1994f55ace80e0125c587f96c020037ea6aa98d01b4bd2e222f", size = 30306232 }, + { url = "https://files.pythonhosted.org/packages/c0/66/9cd4f501dd5ea03e4a4572ecd874936d0da296bd04d1c45ae1a4a75d9c3a/scipy-1.13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfa31f1def5c819b19ecc3a8b52d28ffdcc7ed52bb20c9a7589669dd3c250989", size = 33743202 }, + { url = "https://files.pythonhosted.org/packages/a3/ba/7255e5dc82a65adbe83771c72f384d99c43063648456796436c9a5585ec3/scipy-1.13.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26264b282b9da0952a024ae34710c2aff7d27480ee91a2e82b7b7073c24722f", size = 38577335 }, + { url = "https://files.pythonhosted.org/packages/49/a5/bb9ded8326e9f0cdfdc412eeda1054b914dfea952bda2097d174f8832cc0/scipy-1.13.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:eccfa1906eacc02de42d70ef4aecea45415f5be17e72b61bafcfd329bdc52e94", size = 38820728 }, + { url = "https://files.pythonhosted.org/packages/12/30/df7a8fcc08f9b4a83f5f27cfaaa7d43f9a2d2ad0b6562cced433e5b04e31/scipy-1.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:2831f0dc9c5ea9edd6e51e6e769b655f08ec6db6e2e10f86ef39bd32eb11da54", size = 46210588 }, + { url = "https://files.pythonhosted.org/packages/b4/15/4a4bb1b15bbd2cd2786c4f46e76b871b28799b67891f23f455323a0cdcfb/scipy-1.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:27e52b09c0d3a1d5b63e1105f24177e544a222b43611aaf5bc44d4a0979e32f9", size = 39333805 }, + { url = "https://files.pythonhosted.org/packages/ba/92/42476de1af309c27710004f5cdebc27bec62c204db42e05b23a302cb0c9a/scipy-1.13.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:54f430b00f0133e2224c3ba42b805bfd0086fe488835effa33fa291561932326", size = 30317687 }, + { url = "https://files.pythonhosted.org/packages/80/ba/8be64fe225360a4beb6840f3cbee494c107c0887f33350d0a47d55400b01/scipy-1.13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e89369d27f9e7b0884ae559a3a956e77c02114cc60a6058b4e5011572eea9299", size = 33694638 }, + { url = "https://files.pythonhosted.org/packages/36/07/035d22ff9795129c5a847c64cb43c1fa9188826b59344fee28a3ab02e283/scipy-1.13.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a78b4b3345f1b6f68a763c6e25c0c9a23a9fd0f39f5f3d200efe8feda560a5fa", size = 38569931 }, + { url = "https://files.pythonhosted.org/packages/d9/10/f9b43de37e5ed91facc0cfff31d45ed0104f359e4f9a68416cbf4e790241/scipy-1.13.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:45484bee6d65633752c490404513b9ef02475b4284c4cfab0ef946def50b3f59", size = 38838145 }, + { url = "https://files.pythonhosted.org/packages/4a/48/4513a1a5623a23e95f94abd675ed91cfb19989c58e9f6f7d03990f6caf3d/scipy-1.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:5713f62f781eebd8d597eb3f88b8bf9274e79eeabf63afb4a737abc6c84ad37b", size = 46196227 }, + { url = "https://files.pythonhosted.org/packages/f2/7b/fb6b46fbee30fc7051913068758414f2721003a89dd9a707ad49174e3843/scipy-1.13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5d72782f39716b2b3509cd7c33cdc08c96f2f4d2b06d51e52fb45a19ca0c86a1", size = 39357301 }, + { url = "https://files.pythonhosted.org/packages/dc/5a/2043a3bde1443d94014aaa41e0b50c39d046dda8360abd3b2a1d3f79907d/scipy-1.13.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:017367484ce5498445aade74b1d5ab377acdc65e27095155e448c88497755a5d", size = 30363348 }, + { url = "https://files.pythonhosted.org/packages/e7/cb/26e4a47364bbfdb3b7fb3363be6d8a1c543bcd70a7753ab397350f5f189a/scipy-1.13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:949ae67db5fa78a86e8fa644b9a6b07252f449dcf74247108c50e1d20d2b4627", size = 33406062 }, + { url = "https://files.pythonhosted.org/packages/88/ab/6ecdc526d509d33814835447bbbeedbebdec7cca46ef495a61b00a35b4bf/scipy-1.13.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de3ade0e53bc1f21358aa74ff4830235d716211d7d077e340c7349bc3542e884", size = 38218311 }, + { url = "https://files.pythonhosted.org/packages/0b/00/9f54554f0f8318100a71515122d8f4f503b1a2c4b4cfab3b4b68c0eb08fa/scipy-1.13.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:2ac65fb503dad64218c228e2dc2d0a0193f7904747db43014645ae139c8fad16", size = 38442493 }, + { url = "https://files.pythonhosted.org/packages/3e/df/963384e90733e08eac978cd103c34df181d1fec424de383cdc443f418dd4/scipy-1.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:cdd7dacfb95fea358916410ec61bbc20440f7860333aee6d882bb8046264e949", size = 45910955 }, + { url = "https://files.pythonhosted.org/packages/7f/29/c2ea58c9731b9ecb30b6738113a95d147e83922986b34c685b8f6eefde21/scipy-1.13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:436bbb42a94a8aeef855d755ce5a465479c721e9d684de76bf61a62e7c2b81d5", size = 39352927 }, + { url = "https://files.pythonhosted.org/packages/5c/c0/e71b94b20ccf9effb38d7147c0064c08c622309fd487b1b677771a97d18c/scipy-1.13.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:8335549ebbca860c52bf3d02f80784e91a004b71b059e3eea9678ba994796a24", size = 30324538 }, + { url = "https://files.pythonhosted.org/packages/6d/0f/aaa55b06d474817cea311e7b10aab2ea1fd5d43bc6a2861ccc9caec9f418/scipy-1.13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d533654b7d221a6a97304ab63c41c96473ff04459e404b83275b60aa8f4b7004", size = 33732190 }, + { url = "https://files.pythonhosted.org/packages/35/f5/d0ad1a96f80962ba65e2ce1de6a1e59edecd1f0a7b55990ed208848012e0/scipy-1.13.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637e98dcf185ba7f8e663e122ebf908c4702420477ae52a04f9908707456ba4d", size = 38612244 }, + { url = "https://files.pythonhosted.org/packages/8d/02/1165905f14962174e6569076bcc3315809ae1291ed14de6448cc151eedfd/scipy-1.13.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a014c2b3697bde71724244f63de2476925596c24285c7a637364761f8710891c", size = 38845637 }, + { url = "https://files.pythonhosted.org/packages/3e/77/dab54fe647a08ee4253963bcd8f9cf17509c8ca64d6335141422fe2e2114/scipy-1.13.1-cp39-cp39-win_amd64.whl", hash = "sha256:392e4ec766654852c25ebad4f64e4e584cf19820b980bc04960bca0b0cd6eaa2", size = 46227440 }, +] + +[[package]] +name = "seaborn" +version = "0.13.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "matplotlib" }, + { name = "numpy" }, + { name = "pandas" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/86/59/a451d7420a77ab0b98f7affa3a1d78a313d2f7281a57afb1a34bae8ab412/seaborn-0.13.2.tar.gz", hash = "sha256:93e60a40988f4d65e9f4885df477e2fdaff6b73a9ded434c1ab356dd57eefff7", size = 1457696 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/83/11/00d3c3dfc25ad54e731d91449895a79e4bf2384dc3ac01809010ba88f6d5/seaborn-0.13.2-py3-none-any.whl", hash = "sha256:636f8336facf092165e27924f223d3c62ca560b1f2bb5dff7ab7fad265361987", size = 294914 }, +] + +[[package]] +name = "sgmllib3k" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/bd/3704a8c3e0942d711c1299ebf7b9091930adae6675d7c8f476a7ce48653c/sgmllib3k-1.0.0.tar.gz", hash = "sha256:7868fb1c8bfa764c1ac563d3cf369c381d1325d36124933a726f29fcdaa812e9", size = 5750 } + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 }, +] + +[[package]] +name = "smmap" +version = "5.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/88/04/b5bf6d21dc4041000ccba7eb17dd3055feb237e7ffc2c20d3fae3af62baa/smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62", size = 22291 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/a5/10f97f73544edcdef54409f1d839f6049a0d79df68adbc1ceb24d1aaca42/smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da", size = 24282 }, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, +] + +[[package]] +name = "snowplow-tracker" +version = "1.0.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, + { name = "types-requests" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e1/86/0c6a02dd258d93050265e00b40448045a849c57b8f83de37ecec63089a4b/snowplow_tracker-1.0.4.tar.gz", hash = "sha256:16d8a3c001a7847d91dc081d508324550c314a4cbf5d6106b5ab35f77fa34678", size = 33998 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/70/c2836c7143b390a4b12ac44ada6fea7440b153165edcef47eca551b298af/snowplow_tracker-1.0.4-py3-none-any.whl", hash = "sha256:382e289811550f6ce7d5abc9e68590cc080ac9b21916b701b17497cfd6b32038", size = 44094 }, +] + +[[package]] +name = "sqlparse" +version = "0.5.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e5/40/edede8dd6977b0d3da179a342c198ed100dd2aba4be081861ee5911e4da4/sqlparse-0.5.3.tar.gz", hash = "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272", size = 84999 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/5c/bfd6bd0bf979426d405cc6e71eceb8701b148b16c21d2dc3c261efc61c7b/sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca", size = 44415 }, +] + +[[package]] +name = "statsmodels" +version = "0.14.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "numpy" }, + { name = "packaging" }, + { name = "pandas" }, + { name = "patsy" }, + { name = "scipy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/1f/3b/963a015dd8ea17e10c7b0e2f14d7c4daec903baf60a017e756b57953a4bf/statsmodels-0.14.4.tar.gz", hash = "sha256:5d69e0f39060dc72c067f9bb6e8033b6dccdb0bae101d76a7ef0bcc94e898b67", size = 20354802 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/af/2c/23bf5ad9e8a77c0c8d9750512bff89e32154dea91998114118e0e147ae67/statsmodels-0.14.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7a62f1fc9086e4b7ee789a6f66b3c0fc82dd8de1edda1522d30901a0aa45e42b", size = 10216574 }, + { url = "https://files.pythonhosted.org/packages/ba/a5/2f09ab918296e534ea5d132e90efac51ae12ff15992d77539bbfca1158fa/statsmodels-0.14.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:46ac7ddefac0c9b7b607eed1d47d11e26fe92a1bc1f4d9af48aeed4e21e87981", size = 9912430 }, + { url = "https://files.pythonhosted.org/packages/93/6a/b86f8c9b799dc93e5b4a3267eb809843e6328e34248a53496b96f50d732e/statsmodels-0.14.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a337b731aa365d09bb0eab6da81446c04fde6c31976b1d8e3d3a911f0f1e07b", size = 10444673 }, + { url = "https://files.pythonhosted.org/packages/78/44/d72c634211797ed07dd8c63ced4ae11debd7a40b24ee80e79346a526194f/statsmodels-0.14.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:631bb52159117c5da42ba94bd94859276b68cab25dc4cac86475bc24671143bc", size = 10811248 }, + { url = "https://files.pythonhosted.org/packages/35/64/df81426924fcc48a0402534efa96cde13275629ae52f123189d16c4b75ff/statsmodels-0.14.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3bb2e580d382545a65f298589809af29daeb15f9da2eb252af8f79693e618abc", size = 10946447 }, + { url = "https://files.pythonhosted.org/packages/5c/f9/205130cceeda0eebd5a1a58c04e060c2f87a1d63cbbe37a9caa0fcb50c68/statsmodels-0.14.4-cp310-cp310-win_amd64.whl", hash = "sha256:9729642884147ee9db67b5a06a355890663d21f76ed608a56ac2ad98b94d201a", size = 9845796 }, + { url = "https://files.pythonhosted.org/packages/48/88/326f5f689e69d9c47a68a22ffdd20a6ea6410b53918f9a8e63380dfc181c/statsmodels-0.14.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5ed7e118e6e3e02d6723a079b8c97eaadeed943fa1f7f619f7148dfc7862670f", size = 10221032 }, + { url = "https://files.pythonhosted.org/packages/07/0b/9a0818be42f6689ebdc7a2277ea984d6299f0809d0e0277128df4f7dc606/statsmodels-0.14.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f5f537f7d000de4a1708c63400755152b862cd4926bb81a86568e347c19c364b", size = 9912219 }, + { url = "https://files.pythonhosted.org/packages/b1/f2/91c70a3b4a3e416f76ead61b04c87bc60080d634d7fa2ab893976bdd86fa/statsmodels-0.14.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa74aaa26eaa5012b0a01deeaa8a777595d0835d3d6c7175f2ac65435a7324d2", size = 10424053 }, + { url = "https://files.pythonhosted.org/packages/9d/4f/a96e682f82b675e4a6f3de8ad990587d8b1fde500a630a2aabcaabee11d8/statsmodels-0.14.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e332c2d9b806083d1797231280602340c5c913f90d4caa0213a6a54679ce9331", size = 10752529 }, + { url = "https://files.pythonhosted.org/packages/4b/c6/47549345d32da1530a819a3699f6f34f9f70733a245eeb29f5e05e53f362/statsmodels-0.14.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9c8fa28dfd75753d9cf62769ba1fecd7e73a0be187f35cc6f54076f98aa3f3f", size = 10959003 }, + { url = "https://files.pythonhosted.org/packages/4b/e4/f9e96896278308e17dfd4f60a84826c48117674c980234ee38f59ab28a12/statsmodels-0.14.4-cp311-cp311-win_amd64.whl", hash = "sha256:a6087ecb0714f7c59eb24c22781491e6f1cfffb660b4740e167625ca4f052056", size = 9853281 }, + { url = "https://files.pythonhosted.org/packages/f5/99/654fd41a9024643ee70b239e5ebc987bf98ce9fc2693bd550bee58136564/statsmodels-0.14.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5221dba7424cf4f2561b22e9081de85f5bb871228581124a0d1b572708545199", size = 10220508 }, + { url = "https://files.pythonhosted.org/packages/67/d8/ac30cf4cf97adaa48548be57e7cf02e894f31b45fd55bf9213358d9781c9/statsmodels-0.14.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:17672b30c6b98afe2b095591e32d1d66d4372f2651428e433f16a3667f19eabb", size = 9912317 }, + { url = "https://files.pythonhosted.org/packages/e0/77/2440d551eaf27f9c1d3650e13b3821a35ad5b21d3a19f62fb302af9203e8/statsmodels-0.14.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab5e6312213b8cfb9dca93dd46a0f4dccb856541f91d3306227c3d92f7659245", size = 10301662 }, + { url = "https://files.pythonhosted.org/packages/fa/e1/60a652f18996a40a7410aeb7eb476c18da8a39792c7effe67f06883e9852/statsmodels-0.14.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bbb150620b53133d6cd1c5d14c28a4f85701e6c781d9b689b53681effaa655f", size = 10741763 }, + { url = "https://files.pythonhosted.org/packages/81/0c/2453eec3ac25e300847d9ed97f41156de145e507391ecb5ac989e111e525/statsmodels-0.14.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bb695c2025d122a101c2aca66d2b78813c321b60d3a7c86bb8ec4467bb53b0f9", size = 10879534 }, + { url = "https://files.pythonhosted.org/packages/59/9a/e466a1b887a1441141e52dbcc98152f013d85076576da6eed2357f2016ae/statsmodels-0.14.4-cp312-cp312-win_amd64.whl", hash = "sha256:7f7917a51766b4e074da283c507a25048ad29a18e527207883d73535e0dc6184", size = 9823866 }, + { url = "https://files.pythonhosted.org/packages/19/5e/6ed84430ca3133507a8e37446e94f0a9cb45a54b412f600fd8152431cff5/statsmodels-0.14.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4793b01b7a5f5424f5a1dbcefc614c83c7608aa2b035f087538253007c339d5d", size = 10237063 }, + { url = "https://files.pythonhosted.org/packages/dc/02/df44d1a73368fd0c0618e3169e7649303e6adb3ce96a429b617549f87165/statsmodels-0.14.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d330da34f59f1653c5193f9fe3a3a258977c880746db7f155fc33713ea858db5", size = 9930086 }, + { url = "https://files.pythonhosted.org/packages/33/6f/44a38bbef8a9641e02e36ad46ca27b43ff26161fe7292995f89306ce964c/statsmodels-0.14.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e9ddefba1d4e1107c1f20f601b0581421ea3ad9fd75ce3c2ba6a76b6dc4682c", size = 10429513 }, + { url = "https://files.pythonhosted.org/packages/68/8b/c640e4a243b59fc75e566ff3509ae55fb6cd4535643494be834c7d69c25d/statsmodels-0.14.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f43da7957e00190104c5dd0f661bfc6dfc68b87313e3f9c4dbd5e7d222e0aeb", size = 10789664 }, + { url = "https://files.pythonhosted.org/packages/f9/1b/f7c77e5a8c4aba97bca8c730cf4087b102f1cc796d9b71e3430dc31f9e57/statsmodels-0.14.4-cp39-cp39-win_amd64.whl", hash = "sha256:8286f69a5e1d0e0b366ffed5691140c83d3efc75da6dbf34a3d06e88abfaaab6", size = 9858334 }, +] + +[[package]] +name = "streamlit" +version = "1.41.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "altair" }, + { name = "blinker" }, + { name = "cachetools" }, + { name = "click" }, + { name = "gitpython" }, + { name = "numpy" }, + { name = "packaging" }, + { name = "pandas" }, + { name = "pillow" }, + { name = "protobuf" }, + { name = "pyarrow" }, + { name = "pydeck" }, + { name = "requests" }, + { name = "rich" }, + { name = "tenacity" }, + { name = "toml" }, + { name = "tornado" }, + { name = "typing-extensions" }, + { name = "watchdog", marker = "sys_platform != 'darwin'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/78/33/14b5ac0369ecf0af675911e5e84b934e6fcc2cec850857d2390eb373b0a6/streamlit-1.41.1.tar.gz", hash = "sha256:6626d32b098ba1458b71eebdd634c62af2dd876380e59c4b6a1e828a39d62d69", size = 8712473 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/87/b2e162869500062a94dde7589c167367b5538dab6eacce2e7c0f00d5c9c5/streamlit-1.41.1-py2.py3-none-any.whl", hash = "sha256:0def00822480071d642e6df36cd63c089f991da3a69fd9eb4ab8f65ce27de4e0", size = 9100386 }, +] + +[[package]] +name = "streamlit-ace" +version = "0.1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "streamlit" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2e/34/a508123af170e41852d247b17d38d9cddc2b4be3884f2f29a84aeb322992/streamlit_ace-0.1.1.tar.gz", hash = "sha256:1852fa19707685fd4241be9256c1ab1a89da4a3b8c28ab286e3bff122d3a1686", size = 2389652 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/c9/e862cefc5735e11c101b2bc45f37a2b767666ffe673d262004202b506f3b/streamlit_ace-0.1.1-py3-none-any.whl", hash = "sha256:cdf908a90058fa831fb720d29e2aef35a0f5799eac33e2b2e58ba9f3631b1aa5", size = 3642928 }, +] + +[[package]] +name = "streamlit-elements-fluence" +version = "0.1.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "streamlit" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/24/55/d4588a8467802505ca840ebf74d03bfa72dfcdcf94e64bd512b0449392d6/streamlit-elements-fluence-0.1.4.tar.gz", hash = "sha256:79263c002918b67ddc5a6ee929aa3f80f091ddc3df0b4703ffda134187e255ef", size = 16950858 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/b4/7db136a1b9d1e4eb71d7713abb0695ca2555c112add3ef0a4e01f58c77c4/streamlit_elements_fluence-0.1.4-py3-none-any.whl", hash = "sha256:6bf8c7ee582e4b0edf4603dc5b743888bb9df878ee1bd1780c8db287cd08c303", size = 4510084 }, +] + +[[package]] +name = "tenacity" +version = "9.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/94/91fccdb4b8110642462e653d5dcb27e7b674742ad68efd146367da7bdb10/tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b", size = 47421 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b6/cb/b86984bed139586d01532a587464b5805f12e397594f19f931c4c2fbfa61/tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539", size = 28169 }, +] + +[[package]] +name = "text-unidecode" +version = "1.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ab/e2/e9a00f0ccb71718418230718b3d900e71a5d16e701a3dae079a21e9cd8f8/text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93", size = 76885 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/a5/c0b6468d3824fe3fde30dbb5e1f687b291608f9473681bbf7dabbf5a87d7/text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8", size = 78154 }, +] + +[[package]] +name = "toml" +version = "0.10.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f", size = 22253 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588 }, +] + +[[package]] +name = "tomli" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077 }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429 }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067 }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030 }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898 }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894 }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319 }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273 }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310 }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309 }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762 }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453 }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486 }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349 }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159 }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243 }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645 }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584 }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875 }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418 }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257 }, +] + +[[package]] +name = "tomlkit" +version = "0.13.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b1/09/a439bec5888f00a54b8b9f05fa94d7f901d6735ef4e55dcec9bc37b5d8fa/tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79", size = 192885 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f9/b6/a447b5e4ec71e13871be01ba81f5dfc9d0af7e473da256ff46bc0e24026f/tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde", size = 37955 }, +] + +[[package]] +name = "tornado" +version = "6.4.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/59/45/a0daf161f7d6f36c3ea5fc0c2de619746cc3dd4c76402e9db545bd920f63/tornado-6.4.2.tar.gz", hash = "sha256:92bad5b4746e9879fd7bf1eb21dce4e3fc5128d71601f80005afa39237ad620b", size = 501135 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/7e/71f604d8cea1b58f82ba3590290b66da1e72d840aeb37e0d5f7291bd30db/tornado-6.4.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:e828cce1123e9e44ae2a50a9de3055497ab1d0aeb440c5ac23064d9e44880da1", size = 436299 }, + { url = "https://files.pythonhosted.org/packages/96/44/87543a3b99016d0bf54fdaab30d24bf0af2e848f1d13d34a3a5380aabe16/tornado-6.4.2-cp38-abi3-macosx_10_9_x86_64.whl", hash = "sha256:072ce12ada169c5b00b7d92a99ba089447ccc993ea2143c9ede887e0937aa803", size = 434253 }, + { url = "https://files.pythonhosted.org/packages/cb/fb/fdf679b4ce51bcb7210801ef4f11fdac96e9885daa402861751353beea6e/tornado-6.4.2-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a017d239bd1bb0919f72af256a970624241f070496635784d9bf0db640d3fec", size = 437602 }, + { url = "https://files.pythonhosted.org/packages/4f/3b/e31aeffffc22b475a64dbeb273026a21b5b566f74dee48742817626c47dc/tornado-6.4.2-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c36e62ce8f63409301537222faffcef7dfc5284f27eec227389f2ad11b09d946", size = 436972 }, + { url = "https://files.pythonhosted.org/packages/22/55/b78a464de78051a30599ceb6983b01d8f732e6f69bf37b4ed07f642ac0fc/tornado-6.4.2-cp38-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca9eb02196e789c9cb5c3c7c0f04fb447dc2adffd95265b2c7223a8a615ccbf", size = 437173 }, + { url = "https://files.pythonhosted.org/packages/79/5e/be4fb0d1684eb822c9a62fb18a3e44a06188f78aa466b2ad991d2ee31104/tornado-6.4.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:304463bd0772442ff4d0f5149c6f1c2135a1fae045adf070821c6cdc76980634", size = 437892 }, + { url = "https://files.pythonhosted.org/packages/f5/33/4f91fdd94ea36e1d796147003b490fe60a0215ac5737b6f9c65e160d4fe0/tornado-6.4.2-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:c82c46813ba483a385ab2a99caeaedf92585a1f90defb5693351fa7e4ea0bf73", size = 437334 }, + { url = "https://files.pythonhosted.org/packages/2b/ae/c1b22d4524b0e10da2f29a176fb2890386f7bd1f63aacf186444873a88a0/tornado-6.4.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:932d195ca9015956fa502c6b56af9eb06106140d844a335590c1ec7f5277d10c", size = 437261 }, + { url = "https://files.pythonhosted.org/packages/b5/25/36dbd49ab6d179bcfc4c6c093a51795a4f3bed380543a8242ac3517a1751/tornado-6.4.2-cp38-abi3-win32.whl", hash = "sha256:2876cef82e6c5978fde1e0d5b1f919d756968d5b4282418f3146b79b58556482", size = 438463 }, + { url = "https://files.pythonhosted.org/packages/61/cc/58b1adeb1bb46228442081e746fcdbc4540905c87e8add7c277540934edb/tornado-6.4.2-cp38-abi3-win_amd64.whl", hash = "sha256:908b71bf3ff37d81073356a5fadcc660eb10c1476ee6e2725588626ce7e5ca38", size = 438907 }, +] + +[[package]] +name = "tqdm" +version = "4.67.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540 }, +] + +[[package]] +name = "typeguard" +version = "4.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/62/c3/400917dd37d7b8c07e9723f3046818530423e1e759a56a22133362adab00/typeguard-4.4.1.tar.gz", hash = "sha256:0d22a89d00b453b47c49875f42b6601b961757541a2e1e0ef517b6e24213c21b", size = 74959 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f2/53/9465dedf2d69fe26008e7732cf6e0a385e387c240869e7d54eed49782a3c/typeguard-4.4.1-py3-none-any.whl", hash = "sha256:9324ec07a27ec67fc54a9c063020ca4c0ae6abad5e9f0f9804ca59aee68c6e21", size = 35635 }, +] + +[[package]] +name = "types-requests" +version = "2.32.0.20241016" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fa/3c/4f2a430c01a22abd49a583b6b944173e39e7d01b688190a5618bd59a2e22/types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95", size = 18065 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/01/485b3026ff90e5190b5e24f1711522e06c79f4a56c8f4b95848ac072e20f/types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747", size = 15836 }, +] + +[[package]] +name = "typing-extensions" +version = "4.12.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, +] + +[[package]] +name = "tzdata" +version = "2024.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e1/34/943888654477a574a86a98e9896bae89c7aa15078ec29f490fef2f1e5384/tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc", size = 193282 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a6/ab/7e5f53c3b9d14972843a647d8d7a853969a58aecc7559cb3267302c94774/tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd", size = 346586 }, +] + +[[package]] +name = "urllib3" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/63/e53da845320b757bf29ef6a9062f5c669fe997973f966045cb019c3f4b66/urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d", size = 307268 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369 }, +] + +[[package]] +name = "virtualenv" +version = "20.28.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "distlib" }, + { name = "filelock" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bf/75/53316a5a8050069228a2f6d11f32046cfa94fbb6cc3f08703f59b873de2e/virtualenv-20.28.0.tar.gz", hash = "sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa", size = 7650368 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/10/f9/0919cf6f1432a8c4baa62511f8f8da8225432d22e83e3476f5be1a1edc6e/virtualenv-20.28.0-py3-none-any.whl", hash = "sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0", size = 4276702 }, +] + +[[package]] +name = "visions" +version = "0.7.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "attrs" }, + { name = "multimethod" }, + { name = "networkx", version = "3.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "numpy" }, + { name = "pandas" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/40/17/8ddcab3699d442a3a21c9859b5573a5b96ec19c51b85525653433bc28f5e/visions-0.7.6.tar.gz", hash = "sha256:00f494a7f78917db2292e11ea832c6e026b64783e688b11da24f4c271ef1631d", size = 566629 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/bf/612b24e711ae25dea9af19b9304634b8949faa0b035fad47e8bcadf62f59/visions-0.7.6-py3-none-any.whl", hash = "sha256:72b7f8dbc374e9d6055e938c8c67b0b8da52f3bcb8320f25d86b1a57457e7aa6", size = 104827 }, +] + +[package.optional-dependencies] +type-image-path = [ + { name = "imagehash" }, + { name = "pillow" }, +] + +[[package]] +name = "watchdog" +version = "6.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/c7/ca4bf3e518cb57a686b2feb4f55a1892fd9a3dd13f470fca14e00f80ea36/watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13", size = 79079 }, + { url = "https://files.pythonhosted.org/packages/5c/51/d46dc9332f9a647593c947b4b88e2381c8dfc0942d15b8edc0310fa4abb1/watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379", size = 79078 }, + { url = "https://files.pythonhosted.org/packages/d4/57/04edbf5e169cd318d5f07b4766fee38e825d64b6913ca157ca32d1a42267/watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e", size = 79076 }, + { url = "https://files.pythonhosted.org/packages/ab/cc/da8422b300e13cb187d2203f20b9253e91058aaf7db65b74142013478e66/watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f", size = 79077 }, + { url = "https://files.pythonhosted.org/packages/2c/3b/b8964e04ae1a025c44ba8e4291f86e97fac443bca31de8bd98d3263d2fcf/watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26", size = 79078 }, + { url = "https://files.pythonhosted.org/packages/62/ae/a696eb424bedff7407801c257d4b1afda455fe40821a2be430e173660e81/watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c", size = 79077 }, + { url = "https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2", size = 79078 }, + { url = "https://files.pythonhosted.org/packages/07/f6/d0e5b343768e8bcb4cda79f0f2f55051bf26177ecd5651f84c07567461cf/watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a", size = 79065 }, + { url = "https://files.pythonhosted.org/packages/db/d9/c495884c6e548fce18a8f40568ff120bc3a4b7b99813081c8ac0c936fa64/watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680", size = 79070 }, + { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067 }, +] + +[[package]] +name = "wordcloud" +version = "1.9.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "matplotlib" }, + { name = "numpy" }, + { name = "pillow" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/03/a1/0530b9001b05dcd13de21c2d4d821db5365dade5180242179fbc39372c57/wordcloud-1.9.4.tar.gz", hash = "sha256:b273d8a5ded97d3ead904046b49464dcb71119ee79df875072a4c105cadd347a", size = 27563753 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/b0/21ab9282e5922ccd23b67050151476a4a32a7678d59bad4aa46534615afa/wordcloud-1.9.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:61a84e7311fce8415943edcb7b2ba65b4bfec1dc6dff8fe5a8ea76e278447fb2", size = 171753 }, + { url = "https://files.pythonhosted.org/packages/54/e5/7969f3b4a58bd4ac19e60e86f6a23925c19416ec065725b530f40c692e0e/wordcloud-1.9.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e8752750726f31385f364823d3ef1d9c8ec829e5c07706c36beb40679945c71", size = 167716 }, + { url = "https://files.pythonhosted.org/packages/dc/2a/a6d5c03decbe145858e777e80dbbf92697da9728c9d0c71c1c81a2d68e35/wordcloud-1.9.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:990dfd6dd43a1c7fa156be865eb98aba167a986b65f56cbf50e24772107fcd70", size = 511106 }, + { url = "https://files.pythonhosted.org/packages/e2/1e/663284794aedbaeb58939d3df391725de6d3966d4f742f6ff37ae7181a1b/wordcloud-1.9.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a70fe8999cd63aec64daa0377b720be6e5ff344963b828caeb4c2a081599a3a0", size = 493431 }, + { url = "https://files.pythonhosted.org/packages/2f/76/971e2689430555fb1a04f148a4888f0b4d20873f19d1e04c02ff1e613b60/wordcloud-1.9.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:37dcd5500cc2ea02950739390e89e2efa6624c2f54b5e2df1ee961fce685b2d7", size = 514561 }, + { url = "https://files.pythonhosted.org/packages/76/40/eb530e49348f8acac2351843f886f74ea6f17e99c349aad6d583029d60f7/wordcloud-1.9.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5cc5c902dc2492b9fc0e29a1f5c688422d7e6eb9e5c0e43f0331d1c8e1341ba", size = 530938 }, + { url = "https://files.pythonhosted.org/packages/c5/56/ac3d6bab585ac2a9fc7d3aec4f25832188ad1d7d81815dbca9e7017709e6/wordcloud-1.9.4-cp310-cp310-win32.whl", hash = "sha256:c20fbb51af2046c940b4fead4bafffc30b4191f5fb477c3af844446d8956bfd4", size = 289958 }, + { url = "https://files.pythonhosted.org/packages/79/59/7a85f2be948999e16b88574cc8341524a4042b4544d1904b08a652f7f415/wordcloud-1.9.4-cp310-cp310-win_amd64.whl", hash = "sha256:61a153e76d73c72f5cc6c89ee80ddad70758a207c3c6b1d86be8635ec70164f1", size = 299767 }, + { url = "https://files.pythonhosted.org/packages/32/07/3d355f71fb32f9d780d0099ece83dd10f0b9b71a9359cd468f3c25c52bfc/wordcloud-1.9.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:af168eeaed67a675f35b5668a7804c4d64f8e4f62a273b909eb5cc39efc4c294", size = 171653 }, + { url = "https://files.pythonhosted.org/packages/fa/5f/34a0c0be52dc95f96aa1197f03ad5d5c58a9901c74f050d21a3e1b21ebf9/wordcloud-1.9.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3092bf85cb20158c8b90d78650dc0226985109ac6fe13a0086ac47b9581b62ce", size = 167608 }, + { url = "https://files.pythonhosted.org/packages/ee/d3/67ccdab9d2756f8b30b0669015840cd5fdb5f062a2d621d67f033cf3dd54/wordcloud-1.9.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddfb852f551681f5e33feb934505e060952b6aa98aaa48c781cdbf101f84e7cc", size = 547933 }, + { url = "https://files.pythonhosted.org/packages/9b/a5/44f374424cceab8db931ea4bf48a78d9969e5fd3e6c47d84d0e2276c0bd6/wordcloud-1.9.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57ad8064a634a4870fcd00a9694c0a7839c6dfbac3d32522c69d5e1e9cbfd911", size = 528539 }, + { url = "https://files.pythonhosted.org/packages/fc/5f/2ab2ce735e4dc8775888ddb2fb12c91d2a4208cffb5861a49b3191da74e9/wordcloud-1.9.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ea14858973ad8561a20a5475eb8d7ad33622bc5f27c60206fbb3e10a036cee26", size = 548667 }, + { url = "https://files.pythonhosted.org/packages/94/3e/54fc6d268dc6404d878b2e49a587b6f92440d08cb5d6bb6b8f1b575efef3/wordcloud-1.9.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b27759f12dd235468ff8c1df875b106b23dbf2c74aae05cdcdc3ccd8e23ea89c", size = 567875 }, + { url = "https://files.pythonhosted.org/packages/28/a7/ca3e02c86080332c3af55c581ba0a79c4c25b12bc49fcd2bbbbeeec7a490/wordcloud-1.9.4-cp311-cp311-win32.whl", hash = "sha256:0ac3d87627022fb8cce17297298be96c91185edd55ecf8906f89f981b55974f0", size = 289607 }, + { url = "https://files.pythonhosted.org/packages/00/09/abb305dce85911b8fba382926cfc57f2f257729e25937fdcc63f3a1a67f9/wordcloud-1.9.4-cp311-cp311-win_amd64.whl", hash = "sha256:85368249df056527f1b64e80e68636abb61f0f6bd2d1c430894d2af1feea7f73", size = 299928 }, + { url = "https://files.pythonhosted.org/packages/3d/5d/b51a970694ca5feda24cec7dbd37a63bcecd3e5735386840b0ed883bd3df/wordcloud-1.9.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3910494ce5acb27731fd5678d146e8aa8f588d5fdb455810c817ff4b84ee0f67", size = 173370 }, + { url = "https://files.pythonhosted.org/packages/c4/07/1a82829127bbf4f9d46de5c6009b7c095a450fa8f0fb466f788a62eeae11/wordcloud-1.9.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1b1c29a0089ee90778700cc96305fa830a6a5bbb342eaaa59d6ac8d37a9b232f", size = 168868 }, + { url = "https://files.pythonhosted.org/packages/af/ad/9ec4ded660b95c265ec4de2688ac1d0940bb06e609193c200853ba0e7db6/wordcloud-1.9.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f369ae7bef16341c2bb208e658d5e4c56517046eb6176f89ac95525eaf8ace09", size = 539171 }, + { url = "https://files.pythonhosted.org/packages/49/ef/a9aad2400d5f68fca8efd6b75fb5067b7c91abcf9f9cfd5cc4cd3dc955ba/wordcloud-1.9.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ec6ffba61ca20123e7c09103a5692bbc3163f75ee0bdc7893e80e0e2786ccd2", size = 518450 }, + { url = "https://files.pythonhosted.org/packages/d1/b0/320b69ad1976a351649e778ccc1d899791010661736179614434f5229cdf/wordcloud-1.9.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:cdc4aac2bcce77fd91dbfe91db5a8c0cdc239e10d8954356d2ebf79a3b43646c", size = 542195 }, + { url = "https://files.pythonhosted.org/packages/7d/db/c9c3f2d6e1af3f8b2488da4d14d731ecaf8ba990bdaed1d0f9e3f9c6aa5e/wordcloud-1.9.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e4942fbed48a88a0c42c5b0a057651fc09d26b31be8b6c069adaaa5051836040", size = 559512 }, + { url = "https://files.pythonhosted.org/packages/b3/be/307b2ba29f9502ea2fa5f8553d1674f0b811cc97f6d4f88a5ef374f91a75/wordcloud-1.9.4-cp312-cp312-win32.whl", hash = "sha256:96b801fe4b2aa39bb6c5e68b4a74c81fd8996dd5fb5cea31fda518dc5f77ad82", size = 291061 }, + { url = "https://files.pythonhosted.org/packages/b9/74/6a15f68f59bc90032bfcf62eca796a5675be95e1aaf84f7861e1bfcfa8e5/wordcloud-1.9.4-cp312-cp312-win_amd64.whl", hash = "sha256:360977705d0808a1795fcbe98afb5dc4833cb4bb8e421cbb10e93ef0bce816ff", size = 301186 }, + { url = "https://files.pythonhosted.org/packages/f9/47/2c9c9e3090d80299e84482e3bb99b3fdfddb1c6fcc0e376ed9e33130319d/wordcloud-1.9.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e5b2f7195adef0a071dc24a568d8a7715bc5cf5d752b4560f51da3aa4467dcf8", size = 172267 }, + { url = "https://files.pythonhosted.org/packages/06/dc/87b01d90d62bd6715c864f379ae7ace8a53b0755abf1ad8e822129002528/wordcloud-1.9.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:34843fa49135c4ed3739dea050696e707fd00e7335ee4ed62c33639589f90adf", size = 168323 }, + { url = "https://files.pythonhosted.org/packages/7e/29/5fd253433d880dd91a0e058e292fae5828277166e988204638ede2a3e6ce/wordcloud-1.9.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6570cc4e48e8e951d24ef6599cd8bf7ff405fbe995ff6d596bcdfa290a6206a8", size = 513727 }, + { url = "https://files.pythonhosted.org/packages/38/0b/f03c6709bfe7c72e667431c56c2bbd6821d6ffe7b28e6301e7dd31b62837/wordcloud-1.9.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17f944805a17b8343eb877c9aa1dc9e5339eb14c02dd00ec80feccea899bbf81", size = 496534 }, + { url = "https://files.pythonhosted.org/packages/25/db/9bab416d3196b578021ca73d81b9268d5de50f9a92f8288b554ec883fd4a/wordcloud-1.9.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:7c1cd2a6ef876f5f9fe0255e44f131a6113f883447ed1cf8bdb86f569603bac9", size = 517229 }, + { url = "https://files.pythonhosted.org/packages/a4/22/a40722ffd1975a52ef3697a9704eb75e1d9adfca5502bd10cf8be8a81543/wordcloud-1.9.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2b129584327ba21d05869fcf9495f10f7b31a34a580c431c4942a71ce2317e79", size = 533556 }, + { url = "https://files.pythonhosted.org/packages/35/d3/720a79f055bc776e65369e26e2c11ed33c3a3549ca8753c5929842dda19d/wordcloud-1.9.4-cp39-cp39-win32.whl", hash = "sha256:526dfd822600f158210a191a59cc4bdcaaa1ff05ab2aa199040d857a518b1db6", size = 290653 }, + { url = "https://files.pythonhosted.org/packages/08/02/1e220adb310d5b5019e1ee85fdb7616e6401b2db434c790e7505012f847c/wordcloud-1.9.4-cp39-cp39-win_amd64.whl", hash = "sha256:ac32b851a19b7d2a9ee5e0aebc8210bf16eadc42c5c0da82e36d447552c8ec48", size = 300352 }, + { url = "https://files.pythonhosted.org/packages/63/6b/886a158a7ccc9c4c0a5ff2f3d45623b66da1af73c75584bc25113dff5e36/wordcloud-1.9.4-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f733cca468eae79af83cdda1de2434f1799cefef461ed892e7679d5a4c929fa1", size = 157970 }, + { url = "https://files.pythonhosted.org/packages/4d/c5/180698e0f0e6bb42b3a173ce47d0474649dfcfa9bd60c902609e5707fde6/wordcloud-1.9.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a99f96efe5983c6eed17abb8766ced713ddf18b26450da74addc91570922e62", size = 171923 }, + { url = "https://files.pythonhosted.org/packages/a2/b9/94ca10d94b6f64ab55a7bacc3d9e30df339a8d3607d1919099d12da12024/wordcloud-1.9.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80773ec6a9caa2048602bc347151e3b6e68e1d8fab148dfd0d2e7d4302ce5c01", size = 172780 }, + { url = "https://files.pythonhosted.org/packages/e6/ed/35af241d0243ed82f5231a4e4dbabfdb421fba2b64fd4bdd66b413ad5c04/wordcloud-1.9.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ca95392bba150190cca8df4a97854b554bdeb28007f28bf4698bd7e1af91b310", size = 295073 }, + { url = "https://files.pythonhosted.org/packages/95/72/afbb82bbe764c427ba40be15ea0e5251b726ae6abb0314ad0084ebcd64cb/wordcloud-1.9.4-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6a30ed8aa50b98edb113f72ef619581c221ba3678adeeed88345263c90092561", size = 157850 }, + { url = "https://files.pythonhosted.org/packages/39/d7/1b9286323356c5ca8c25c16481a98f3c26a3b0221128af8b3e1515ab3991/wordcloud-1.9.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a62627e5b081b23a4586104d4b01d064db7b53342ae123b511326585eaf7433c", size = 171806 }, + { url = "https://files.pythonhosted.org/packages/25/74/fda616cf81a33c836b008ba2f32ffc8dad1eaa20739687239914ae909234/wordcloud-1.9.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e137493365770f59655c7308ff76addc95ada2c6bd50ac119e4c33091e2e4e08", size = 172639 }, + { url = "https://files.pythonhosted.org/packages/18/6e/1b9134027cd1d3534a5ec3cc2cc279ffad2a3999d3f74e89e05f58cb4b5b/wordcloud-1.9.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:665f8e7de3dcc1e43aa5bdd9560d56ed51026ba638a33472eede2b9051108adb", size = 294911 }, +] + +[[package]] +name = "ydata-profiling" +version = "4.12.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "dacite" }, + { name = "htmlmin" }, + { name = "imagehash" }, + { name = "jinja2" }, + { name = "matplotlib" }, + { name = "multimethod" }, + { name = "numba" }, + { name = "numpy" }, + { name = "pandas" }, + { name = "phik" }, + { name = "pydantic" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "scipy" }, + { name = "seaborn" }, + { name = "statsmodels" }, + { name = "tqdm" }, + { name = "typeguard" }, + { name = "visions", extra = ["type-image-path"] }, + { name = "wordcloud" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b0/0a/2330bfead50869350b28c19df994081ce5f06e1b9193a789cbb05bab5719/ydata-profiling-4.12.1.tar.gz", hash = "sha256:341b23bbf220a03639a0e2a4b58c4c663cb0a8d73dd27b6f93fa86406cd16cc1", size = 316123 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/e2/91e8ec48c5a9ba759847d74bb69475d22d79922f686e5252d0eb16e5bb85/ydata_profiling-4.12.1-py2.py3-none-any.whl", hash = "sha256:c14e148dfc779540203acd17b2298171a72c8098c7e2481f8030f50d6f0dc4b5", size = 390905 }, +] + +[[package]] +name = "zipp" +version = "3.21.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3f/50/bad581df71744867e9468ebd0bcd6505de3b275e06f202c2cb016e3ff56f/zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4", size = 24545 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/1a/7e4798e9339adc931158c9d69ecc34f5e6791489d469f5e50ec15e35f458/zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931", size = 9630 }, +] From aaca6db466fa98e0ea695d0f4b1acbbd13916674 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Mon, 30 Dec 2024 22:38:54 -0700 Subject: [PATCH 19/46] chore: run pre-commit --- .github/workflows/constraints.txt | 1 - pyproject.toml | 1 - 2 files changed, 2 deletions(-) diff --git a/.github/workflows/constraints.txt b/.github/workflows/constraints.txt index 061c7ca4..f35c807f 100644 --- a/.github/workflows/constraints.txt +++ b/.github/workflows/constraints.txt @@ -1,3 +1,2 @@ pip==24.0 uv==0.5.13 - diff --git a/pyproject.toml b/pyproject.toml index 4c03dcc2..da6eecda 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,7 +45,6 @@ workbench = [ openai = ["openai>0.28.0"] - dev = [ "black>=21.9b0", "mypy>=0.910", From e794eabed04c54eb80ebe8e0f1ab53319e2bea91 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Mon, 30 Dec 2024 22:43:56 -0700 Subject: [PATCH 20/46] chore: swap in our log controller --- src/dbt_osmosis/core/log_controller.py | 39 +- src/dbt_osmosis/core/osmosis.py | 6 +- src/dbt_osmosis/core/osmosis_o.py | 1305 ------------------------ 3 files changed, 21 insertions(+), 1329 deletions(-) delete mode 100644 src/dbt_osmosis/core/osmosis_o.py diff --git a/src/dbt_osmosis/core/log_controller.py b/src/dbt_osmosis/core/log_controller.py index 22fa7042..9c06c6da 100644 --- a/src/dbt_osmosis/core/log_controller.py +++ b/src/dbt_osmosis/core/log_controller.py @@ -1,26 +1,20 @@ +"""Logging module for dbt-osmosis. This module provides a logger factory that can be used to create loggers with rotating log files and console streaming. The logger is configured with a default log level of INFO and a default log file format of "time — name — level — message". The default log file path is `~/.dbt-osmosis/logs` and the default log file name is the logger name. The logger is configured to not propagate messages to the root logger.""" + +from __future__ import annotations + import logging from functools import lru_cache from logging.handlers import RotatingFileHandler from pathlib import Path -from typing import Optional, Union from rich.logging import RichHandler -# Log File Format -LOG_FILE_FORMAT = "%(asctime)s — %(name)s — %(levelname)s — %(message)s" - -# Log File Path -LOG_PATH = Path.home().absolute() / ".dbt-osmosis" / "logs" - -# Console Output Level -LOGGING_LEVEL = logging.INFO +_LOG_FILE_FORMAT = "%(asctime)s — %(name)s — %(levelname)s — %(message)s" +_LOG_PATH = Path.home().absolute() / ".dbt-osmosis" / "logs" +_LOGGING_LEVEL = logging.INFO -def rotating_log_handler( - name: str, - path: Path, - formatter: str, -) -> RotatingFileHandler: +def rotating_log_handler(name: str, path: Path, formatter: str) -> RotatingFileHandler: """This handler writes warning and higher level outputs to logs in a home .dbt-osmosis directory rotating them as needed""" path.mkdir(parents=True, exist_ok=True) handler = RotatingFileHandler( @@ -36,11 +30,12 @@ def rotating_log_handler( @lru_cache(maxsize=10) def logger( name: str = "dbt-osmosis", - level: Optional[Union[int, str]] = None, - path: Optional[Path] = None, - formatter: Optional[str] = None, + level: int | str | None = None, + path: Path | None = None, + formatter: str | None = None, ) -> logging.Logger: """Builds and caches loggers. Can be configured with module level attributes or on a call by call basis. + Simplifies logger management without having to instantiate separate pointers in each module. Args: @@ -55,11 +50,11 @@ def logger( if isinstance(level, str): level = getattr(logging, level, logging.INFO) if level is None: - level = LOGGING_LEVEL + level = _LOGGING_LEVEL if path is None: - path = LOG_PATH + path = _LOG_PATH if formatter is None: - formatter = LOG_FILE_FORMAT + formatter = _LOG_FILE_FORMAT _logger = logging.getLogger(name) _logger.setLevel(level) _logger.addHandler(rotating_log_handler(name, path, formatter)) @@ -73,3 +68,7 @@ def logger( ) _logger.propagate = False return _logger + + +LOGGER = logger() +"""Default logger for dbt-osmosis""" diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index 10e26de7..e54c0f36 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -50,11 +50,9 @@ from dbt_common.clients.system import get_env from dbt_common.context import set_invocation_context -disable_tracking() +from dbt_osmosis.core.log_controller import LOGGER as logger -logger = logging.getLogger(__file__) -logger.setLevel(logging.DEBUG) -logger.addHandler(rich.logging.RichHandler(level=logging.DEBUG)) +disable_tracking() T = t.TypeVar("T") diff --git a/src/dbt_osmosis/core/osmosis_o.py b/src/dbt_osmosis/core/osmosis_o.py deleted file mode 100644 index 89f00df8..00000000 --- a/src/dbt_osmosis/core/osmosis_o.py +++ /dev/null @@ -1,1305 +0,0 @@ -# pyright: reportUnknownVariableType=false, reportPrivateImportUsage=false, reportAny=false, reportUnknownMemberType=false -import json -import logging -import os -import re -import sys -import threading -import time -import typing as t -import uuid -from argparse import Namespace -from collections import OrderedDict, UserDict -from collections.abc import Iterable, Iterator, MutableMapping -from concurrent.futures import ThreadPoolExecutor, wait -from contextlib import contextmanager -from copy import copy -from dataclasses import dataclass, field -from functools import lru_cache -from itertools import chain -from pathlib import Path - -import ruamel.yaml -from dbt.adapters.factory import get_adapter_class_by_name -from dbt.config.runtime import RuntimeConfig -from dbt.contracts.graph.manifest import Manifest -from dbt.contracts.graph.nodes import ColumnInfo, ManifestNode -from dbt.contracts.results import CatalogArtifact, CatalogKey, CatalogTable, ColumnMetadata -from dbt.flags import set_from_args -from dbt.node_types import NodeType -from dbt.parser.manifest import ManifestLoader, process_node -from dbt.parser.sql import SqlBlockParser, SqlMacroParser -from dbt.task.sql import SqlCompileRunner -from dbt.tracking import disable_tracking - -# Disabling dbt tracking for non-standard usage -disable_tracking() - - -def logger() -> logging.Logger: - """Get the log handle for dbt-osmosis""" - return logging.getLogger("dbt-osmosis") - - -def has_jinja(code: str) -> bool: - """Check if code contains Jinja tokens""" - return any(token in code for token in ("{{", "}}", "{%", "%}", "{#", "#}")) - - -def column_casing(column: str, credentials_type: str, to_lower: bool) -> str: - """Utility to handle column name casing based on dbt adapter & user flag.""" - # If quoted in snowflake, pass verbatim - if credentials_type == "snowflake" and column.startswith('"') and column.endswith('"'): - return column - # Otherwise apply user-specified transformations - if to_lower: - return column.lower() - if credentials_type == "snowflake": - return column.upper() - return column - - -class YamlHandler(ruamel.yaml.YAML): - """A ruamel.yaml wrapper to handle dbt YAML files with sane defaults.""" - - def __init__(self, **kwargs: t.Any) -> None: - super().__init__(**kwargs) - self.indent(mapping=2, sequence=4, offset=2) - self.width: int = 800 - self.preserve_quotes: bool = True - self.default_flow_style: bool = False - self.encoding: str = os.getenv("DBT_OSMOSIS_ENCODING", "utf-8") - - -@dataclass -class SchemaFileLocation: - """Dataclass to store schema file location details.""" - - target: Path - current: Path | None = None - node_type: NodeType = NodeType.Model - - @property - def is_valid(self) -> bool: - return self.current == self.target - - -@dataclass -class SchemaFileMigration: - """Dataclass to store schema file migration details.""" - - output: dict[str, t.Any] = field( - default_factory=lambda: {"version": 2, "models": [], "sources": []} - ) - supersede: dict[Path, list[str]] = field(default_factory=dict) - - -@dataclass -class DbtConfiguration: - """Stores dbt project configuration in a namespace""" - - project_dir: str - profiles_dir: str - threads: int = 1 - single_threaded: bool = True - which: str = "" - target: str | None = None - profile: str | None = None - - DEBUG: bool = False - - _vars: str | dict[str, t.Any] = field(default_factory=dict) - - def __post_init__(self) -> None: - if self.threads != 1: - self.single_threaded = False - - @property - def vars(self) -> str: - if isinstance(self._vars, dict): - return json.dumps(self._vars) - return self._vars - - @vars.setter - def vars(self, v: t.Any) -> None: - if not isinstance(v, (str, dict)): - raise ValueError("vars must be a string or dict") - self._vars = v - - -class DbtManifestProxy(UserDict[str, t.Any]): - """Proxy for the manifest's flat_graph, read-only by design.""" - - def _readonly(self, *args: t.Any, **kwargs: t.Any) -> t.Never: - _ = args, kwargs - raise RuntimeError("Cannot modify DbtManifestProxy") - - __setitem__: t.Callable[..., None] = _readonly - __delitem__: t.Callable[..., None] = _readonly - pop: t.Callable[..., None] = _readonly - popitem: t.Callable[..., t.Any] = _readonly - clear: t.Callable[..., None] = _readonly - update: t.Callable[..., None] = _readonly - setdefault: t.Callable[..., None] = _readonly - - -@dataclass -class DbtAdapterExecutionResult: - adapter_response: t.Any - table: t.Any - raw_code: str - compiled_code: str - - -@dataclass -class DbtAdapterCompilationResult: - raw_code: str - compiled_code: str - node: ManifestNode - injected_code: str | None = None - - -def find_default_project_dir() -> str: - cwd = Path.cwd() - # Walk up if needed - for p in [cwd] + list(cwd.parents): - if (p / "dbt_project.yml").exists(): - return str(p.resolve()) - return str(cwd.resolve()) - - -def find_default_profiles_dir() -> str: - # Common fallback for DBT_PROFILES_DIR - if (Path.cwd() / "profiles.yml").exists(): - return str(Path.cwd().resolve()) - return str(Path.home() / ".dbt") - - -class DbtProject: - """Wraps dbt's in-memory project & adapter, enabling queries, compilation, etc.""" - - ADAPTER_TTL: float = 3600.0 - - def __init__( - self, - target: str | None = None, - profiles_dir: str | None = None, - project_dir: str | None = None, - threads: int = 1, - vars: str | dict[str, t.Any] | None = None, - profile: str | None = None, - ): - if not profiles_dir: - profiles_dir = find_default_profiles_dir() - if not project_dir: - project_dir = find_default_project_dir() - - self.base_config: DbtConfiguration = DbtConfiguration( - project_dir=project_dir, - profiles_dir=profiles_dir, - target=target, - threads=threads, - profile=profile, - ) - if vars: - self.base_config.vars = vars - - self.adapter_mutex: threading.Lock = threading.Lock() - self.parsing_mutex: threading.Lock = threading.Lock() - self.manifest_mutation_mutex: threading.Lock = threading.Lock() - - self._config: RuntimeConfig | None = None - self._manifest: Manifest | None = None - self.parse_project(init=True) - - self._sql_parser: SqlBlockParser | None = None - self._macro_parser: SqlMacroParser | None = None - self._adapter_created_at: float = 0.0 - - @property - def config(self) -> RuntimeConfig: - """Get the dbt project configuration.""" - if self._config is None: - raise RuntimeError("DbtProject not initialized. parse_project() must be called first.") - return self._config - - @property - def manifest(self) -> Manifest: - """Get the dbt project manifest.""" - if self._manifest is None: - raise RuntimeError("DbtProject not initialized. parse_project() must be called first.") - return self._manifest - - def parse_project(self, init: bool = False) -> None: - """Parse the dbt project configuration and manifest.""" - with self.parsing_mutex: - if init: - ns = Namespace( - **self.base_config.__dict__ - ) # TODO: replace with method call to handle _vars prop - set_from_args(ns, ns) - self._config = RuntimeConfig.from_args(ns) - self.initialize_adapter() - loader = ManifestLoader( - self.config, - self.config.load_dependencies(), - self.adapter.connections.set_query_header, - ) - self._manifest = loader.load() - self._manifest.build_flat_graph() - loader.save_macros_to_adapter(self.adapter) - self._sql_parser = None - self._macro_parser = None - - def safe_parse_project(self, init: bool = False) -> None: - """Safely re-parse the dbt project configuration and manifest preserving internal state on error.""" - old_config = copy(getattr(self, "config", None)) - try: - self.parse_project(init=init) - except Exception as exc: - if old_config: - self._config = old_config - raise exc - # Write manifest to disk here - self.write_manifest_artifact() - - def initialize_adapter(self) -> None: - """Initialize the dbt adapter.""" - if hasattr(self, "_adapter"): - try: - self.adapter.connections.cleanup_all() - except Exception: - pass - try: - adapter_cls = get_adapter_class_by_name( - self.base_config.target or self.base_config.profile or "" - ) - except Exception: - # fallback if none found (dbt should raise if invalid type) - raise RuntimeError("Could not find an adapter class by name.") - if not adapter_cls: - raise RuntimeError("No valid adapter class found.") - - # NOTE: this smooths over an API change upstream - try: - self.adapter = adapter_cls(self.config) - except TypeError: - from dbt.mp_context import get_mp_context - - self.adapter = adapter_cls(self.config, get_mp_context()) # pyright: ignore[reportCallIssue] - - self.adapter.connections.set_connection_name() - self._adapter_created_at = time.time() - setattr(self.config, "adapter", self.adapter) - - @property - def adapter(self) -> t.Any: - """Get the dbt adapter. Automatically refreshes if TTL exceeded.""" - if (time.time() - getattr(self, "_adapter_created_at", 0)) > self.ADAPTER_TTL: - self.initialize_adapter() - return self._adapter # FIXME: add to init - - @adapter.setter - def adapter(self, v: t.Any) -> None: - """Set the dbt adapter. Thread-safe.""" - if self.adapter_mutex.acquire(blocking=False): - try: - setattr(self, "_adapter", v) - v.debug_query() # Verify connection - self._adapter_created_at = time.time() - setattr(self.config, "adapter", v) - finally: - self.adapter_mutex.release() - - @property - def manifest_dict(self) -> DbtManifestProxy: - """Get a read-only proxy for the manifest's flat_graph.""" - return DbtManifestProxy(self.manifest.flat_graph) - - def write_manifest_artifact(self) -> None: - """Convenience method to write the manifest to disk.""" - artifact_path = Path(self.config.project_root) / self.config.target_path / "manifest.json" - self.manifest.write(str(artifact_path)) - - def clear_internal_caches(self) -> None: - """Clear internal lru caches for the project instance.""" - self.compile_code.cache_clear() - self.unsafe_compile_code.cache_clear() - - def get_relation(self, database: str, schema: str, name: str) -> t.Any: - """Get a relation from the adapter.""" - return self.adapter.get_relation(database, schema, name) - - def adapter_execute( - self, sql: str, auto_begin: bool = False, fetch: bool = False - ) -> tuple[t.Any, t.Any]: - """Convenience method to execute a query via the adapter.""" - return self.adapter.execute(sql, auto_begin, fetch) - - def execute_code(self, raw_code: str) -> DbtAdapterExecutionResult: - """Execute SQL, compiling jinja if necessary and wrapping result in a consistent interface.""" - compiled = raw_code - if has_jinja(raw_code): - compiled = self.compile_code(raw_code).compiled_code - resp, table = self.adapter_execute(compiled, fetch=True) - return DbtAdapterExecutionResult(resp, table, raw_code, compiled) - - @contextmanager - def generate_server_node(self, sql: str, node_name: str = "anonymous_node"): - """Generate a server node, process it, and clear it after use. Mutates manifest during context.""" - with self.manifest_mutation_mutex: - self._clear_node(node_name) - sql_node = self.sql_parser.parse_remote(sql, node_name) - process_node(self.config, self.manifest, sql_node) - yield sql_node - self._clear_node(node_name) - - def unsafe_generate_server_node( - self, sql: str, node_name: str = "anonymous_node" - ) -> ManifestNode: - """Generate a server node without context, mutating manifest.""" - self._clear_node(node_name) - sql_node = self.sql_parser.parse_remote(sql, node_name) - process_node(self.config, self.manifest, sql_node) - return sql_node - - def _clear_node(self, name: str) -> None: - """Clear a node from the manifest.""" - _ = self.manifest.nodes.pop( - f"{NodeType.SqlOperation}.{self.config.project_name}.{name}", None - ) - - @property - def sql_parser(self) -> SqlBlockParser: - """Lazy handle to the dbt SQL parser for the project.""" - if not self._sql_parser: - self._sql_parser = SqlBlockParser(self.config, self.manifest, self._config) - return self._sql_parser - - @property - def macro_parser(self) -> SqlMacroParser: - """Lazy handle to the dbt SQL macro parser for the project.""" - if not self._macro_parser: - self._macro_parser = SqlMacroParser(self.config, self.manifest) - return self._macro_parser - - def compile_from_node(self, node: ManifestNode) -> DbtAdapterCompilationResult: - """Compile a node and wrap the result in a consistent interface.""" - compiled_node = SqlCompileRunner( - self._config, self.adapter, node=node, node_index=1, num_nodes=1 - ).compile(self.manifest) - return DbtAdapterCompilationResult( - raw_code=getattr(compiled_node, "raw_code"), - compiled_code=getattr(compiled_node, "compiled_code"), - node=compiled_node, - ) - - @lru_cache(maxsize=100) - def compile_code(self, raw_code: str) -> DbtAdapterCompilationResult: - """Compile raw SQL and wrap the result in a consistent interface, leveraging lru cache.""" - tmp_id = str(uuid.uuid4()) - with self.generate_server_node(raw_code, tmp_id) as node: - return self.compile_from_node(node) - - @lru_cache(maxsize=100) - def unsafe_compile_code(self, raw_code: str, retry: int = 3) -> DbtAdapterCompilationResult: - """Compile raw SQL and wrap the result in a consistent interface, leveraging lru cache. Technically less thread-safe than compile_code but faster in a high throughput server scenario""" - tmp_id = str(uuid.uuid4()) - try: - node = self.unsafe_generate_server_node(raw_code, tmp_id) - return self.compile_from_node(node) - except Exception as e: - if retry > 0: - return self.compile_code(raw_code) - raise e - finally: - self._clear_node(tmp_id) - - -# TODO: we will collapse this from the file it is in currently -class ColumnLevelKnowledgePropagator: - """Stub for doc-propagation logic. For brevity, only the relevant part is included.""" - - @staticmethod - def get_node_columns_with_inherited_knowledge( - manifest: t.Any, - node: ManifestNode, - placeholders: list[str], - project_dir: str, - use_unrendered_descriptions: bool, - ) -> dict[str, dict[str, t.Any]]: - """ - Return known doc/metadata from related lineage. - In real usage, you would gather from multiple upstream nodes. - """ - # This is a stub. - # For now, returning an empty dict or minimal placeholders - _ = manifest, node, placeholders, project_dir, use_unrendered_descriptions - return {} - - @staticmethod - def update_undocumented_columns_with_prior_knowledge( - columns_to_update: Iterable[str], - node: ManifestNode, - yaml_section: dict[str, t.Any], - known_knowledge: dict[str, dict[str, t.Any]], - skip_add_tags: bool, - skip_merge_meta: bool, - add_progenitor_to_meta: bool, - add_inheritance_keys: list[str], - ) -> int: - """ - Propagate docs from known_knowledge onto columns in node + yaml_section. - Return count of columns that changed. - """ - _ = skip_add_tags, skip_merge_meta, add_progenitor_to_meta, add_inheritance_keys - n = 0 - for col in columns_to_update: - if col not in node.columns: - continue - cinfo = node.columns[col] - old_desc = getattr(cinfo, "description", "") - # If we have prior knowledge, do something - # (for example, update cinfo.description if old_desc is blank). - new_desc = old_desc - if col in known_knowledge and not old_desc: - new_desc = known_knowledge[col].get("description", "") - if new_desc and new_desc != old_desc: - setattr(cinfo, "description", new_desc) - # Mirror in yaml - for c in yaml_section.get("columns", []): - if c["name"].lower() == col.lower(): - c["description"] = new_desc - n += 1 - return n - - -class MissingOsmosisConfig(Exception): - pass - - -class InvalidOsmosisConfig(Exception): - pass - - -@dataclass -class DbtYamlManager(DbtProject): - """Automates tasks around schema yml files, organization, coverage, etc. - - Inherits from DbtProject to access manifest and adapter. - """ - - fqn: str | None = None - models: list[str] = field(default_factory=list) - dry_run: bool = False - catalog_file: str | None = None - skip_add_columns: bool = False - skip_add_tags: bool = False - skip_add_data_types: bool = False - numeric_precision: bool = False - char_length: bool = False - skip_merge_meta: bool = False - add_progenitor_to_meta: bool = False - use_unrendered_descriptions: bool = False - add_inheritance_for_specified_keys: list[str] = field(default_factory=list) - output_to_lower: bool = False - - _mutex: threading.Lock = threading.Lock() - _pool: ThreadPoolExecutor = ThreadPoolExecutor(max_workers=(os.cpu_count() or 1) * 2) - _catalog: CatalogArtifact | None = field(default=None, init=False, repr=False) - _mutations: int = 0 - - placeholders: tuple[str, ...] = ( - "Pending further documentation", - "Pending further documentation.", - "No description for this column", - "No description for this column.", - "Not documented", - "Not documented.", - "Undefined", - "Undefined.", - "", - ) - - def __post_init__(self) -> None: - super(DbtProject, self).__init__() # FIXME: this is not right - - # Re-parse to ensure our newly added attributes (like skip_add_columns) are recognized - if not list(self.filtered_models()): - logger().warning("No models found to process given fqn/models arguments") - logger().info("Check your filters or supply a valid model name/fqn.") - sys.exit(0) - - @property - def yaml_handler(self) -> YamlHandler: - """Get a canonical YAML handler for dbt project files""" - if not hasattr(self, "_yaml_handler"): - self._yaml_handler = YamlHandler() # FIXME: do like DbtProject - return self._yaml_handler - - @property - def catalog(self) -> CatalogArtifact | None: - """Get the catalog artifact, loading from disk if needed.""" - if self._catalog: - return self._catalog - if not self.catalog_file: - return None - fp = Path(self.catalog_file) - if not fp.exists(): - return None - self._catalog = CatalogArtifact.from_dict(json.loads(fp.read_text())) - return self._catalog - - def _filter_model_by_fqn(self, node: ManifestNode) -> bool: - """Filter a model node by its fqn.""" - if not self.fqn: - return True - fqn_parts = self.fqn.split(".") - return len(node.fqn[1:]) >= len(fqn_parts) and all( - left == right for left, right in zip(fqn_parts, node.fqn[1:]) - ) - - def _filter_model_by_models(self, node: ManifestNode) -> bool: - """Filter a model node by its name.""" - for m in self.models: - if node.name == m: - return True - node_path = self.get_node_path(node) - inp_path = Path(m).resolve() - if inp_path.is_dir(): - if node_path and inp_path in node_path.parents: - return True - elif inp_path.is_file(): - if node_path and inp_path == node_path: - return True - return False - - def _filter_model(self, node: ManifestNode) -> bool: - """Filter a model node by fqn or models depending on input.""" - if self.models: - filter_method = self._filter_model_by_models - elif self.fqn: - filter_method = self._filter_model_by_fqn - else: - # FIXME: make this more concise - def _filter_method(_): - return True - - filter_method = _filter_method - - return ( - node.resource_type in (NodeType.Model, NodeType.Source) - and node.package_name == self.project_name - and not ( - node.resource_type == NodeType.Model and node.config.materialized == "ephemeral" - ) - and filter_method(node) - ) - - def filtered_models( - self, subset: MutableMapping[str, ManifestNode] | None = None - ) -> Iterator[tuple[str, ManifestNode]]: - """Iterate over models in the manifest, applying filters.""" - items = ( - subset.items() - if subset - else chain(self.manifest.nodes.items(), self.manifest.sources.items()) - ) - for unique_id, dbt_node in items: - if self._filter_model(dbt_node): - yield unique_id, dbt_node - - @staticmethod - def get_node_path(node: ManifestNode) -> Path | None: - """Get the resolved path for a node.""" - if node.original_file_path: - return Path(node.root_path, node.original_file_path).resolve() - return None - - @staticmethod - def get_patch_path(node: ManifestNode) -> Path | None: - """Get the resolved path for a node's patch (YAML) file.""" - if node.patch_path: - return Path(node.patch_path.split("://")[-1]) - return None - - def get_columns_meta( - self, catalog_key: CatalogKey, output_to_lower: bool = False - ) -> dict[str, ColumnMetadata]: - """ - Resolve columns metadata (type, comment, etc.) either from an external CatalogArtifact - or from a live introspection query with the adapter. - """ - columns = OrderedDict() - blacklist = self._config.vars.get("dbt-osmosis", {}).get("_blacklist", []) - # if catalog is loaded: - if self.catalog: - # Attempt to match node in catalog - cat_objs = {**self.catalog.nodes, **self.catalog.sources} - matched = [ - obj for key, obj in cat_objs.items() if key.split(".")[-1] == catalog_key.name - ] - if matched: - for col in matched[0].columns.values(): - if any(re.match(pat, col.name) for pat in blacklist): - continue - columns[ - column_casing(col.name, self._config.credentials.type, output_to_lower) - ] = ColumnMetadata( - name=column_casing( - col.name, self._config.credentials.type, output_to_lower - ), - type=col.type, - index=col.index, - comment=col.comment, - ) - return columns - - # fallback to adapter-based introspection - with self.adapter.connection_named("dbt-osmosis"): - table = self.adapter.get_relation( - catalog_key.database, catalog_key.schema, catalog_key.name - ) - if not table: - return columns - try: - for c in self.adapter.get_columns_in_relation(table): - if any(re.match(p, c.name) for p in blacklist): - continue - col_cased = column_casing( - c.name, self._config.credentials.type, output_to_lower - ) - columns[col_cased] = ColumnMetadata( - name=col_cased, - type=c.dtype - if not ( - c.is_numeric() - and self.numeric_precision - or c.is_string() - and self.char_length - ) - else c.data_type, - index=None, - comment=getattr(c, "comment", None), - ) - if hasattr(c, "flatten"): - for exp in c.flatten(): - if any(re.match(p, exp.name) for p in blacklist): - continue - col_exp_cased = column_casing( - exp.name, self._config.credentials.type, output_to_lower - ) - columns[col_exp_cased] = ColumnMetadata( - name=col_exp_cased, - type=exp.dtype - if not ( - exp.is_numeric() - and self.numeric_precision - or exp.is_string() - and self.char_length - ) - else exp.data_type, - index=None, - comment=getattr(exp, "comment", None), - ) - except Exception as e: - logger().info(f"Could not resolve columns for {catalog_key}: {e}") - return columns - - def get_catalog_key(self, node: ManifestNode) -> CatalogKey: - if node.resource_type == NodeType.Source: - return CatalogKey(node.database, node.schema, getattr(node, "identifier", node.name)) - return CatalogKey(node.database, node.schema, getattr(node, "alias", node.name)) - - def propagate_documentation_downstream( - self, force_inheritance: bool = False, output_to_lower: bool = False - ) -> None: - schema_map = self.build_schema_folder_mapping(output_to_lower) - futures = [] - with self.adapter.connection_named("dbt-osmosis"): - for unique_id, node in self.filtered_models(): - futures.append( - self._pool.submit( - self._run, unique_id, node, schema_map, force_inheritance, output_to_lower - ) - ) - wait(futures) - - def build_schema_folder_mapping(self, output_to_lower: bool) -> dict[str, SchemaFileLocation]: - """ - Build a mapping of model unique_id -> (target schema yml path, existing path) - """ - self.bootstrap_sources(output_to_lower) - out = {} - for uid, node in self.filtered_models(): - sc_path = self.get_schema_path(node) - target_sc_path = self.get_target_schema_path(node) - out[uid] = SchemaFileLocation( - target=target_sc_path.resolve(), - current=sc_path.resolve() if sc_path else None, - node_type=node.resource_type, - ) - return out - - def bootstrap_sources(self, output_to_lower: bool = False) -> None: - """ - Quick approach: if the user has declared sources in 'dbt-osmosis' vars, - create or augment the schema files for them. For brevity, direct approach only. - """ - performed_disk_mutation = False - spec_dict = self._config.vars.get("dbt-osmosis", {}) - blacklist = spec_dict.get("_blacklist", []) - - for source, spec in spec_dict.items(): - if source == "_blacklist": - continue - if isinstance(spec, str): - schema = source - database = self._config.credentials.database - path = spec - elif isinstance(spec, dict): - schema = spec.get("schema", source) - database = spec.get("database", self._config.credentials.database) - path = spec["path"] - else: - continue - - # Check if source in manifest - dbt_node = next( - (s for s in self.manifest.sources.values() if s.source_name == source), None - ) - if not dbt_node: - # create file with tables from introspection - sc_file = ( - Path(self._config.project_root) - / self._config.model_paths[0] - / path.lstrip(os.sep) - ) - relations = self.adapter.list_relations(database=database, schema=schema) - tables_data = [] - for rel in relations: - cols = [] - for c in self.adapter.get_columns_in_relation(rel): - if any(re.match(p, c.name) for p in blacklist): - continue - col_cased = column_casing( - c.name, self._config.credentials.type, output_to_lower - ) - dt = c.dtype.lower() if output_to_lower else c.dtype - cols.append({"name": col_cased, "description": "", "data_type": dt}) - tables_data.append({"name": rel.identifier, "description": "", "columns": cols}) - - sc_file.parent.mkdir(parents=True, exist_ok=True) - with open(sc_file, "w") as f: - logger().info(f"Injecting source {source} => {sc_file}") - self.yaml_handler.dump( - { - "version": 2, - "sources": [ - { - "name": source, - "database": database, - "schema": schema, - "tables": tables_data, - } - ], - }, - f, - ) - self._mutations += 1 - performed_disk_mutation = True - - if performed_disk_mutation: - logger().info("Reloading project to pick up new sources.") - self.safe_parse_project(init=True) - - def get_schema_path(self, node: ManifestNode) -> Optional[Path]: - if node.resource_type == NodeType.Model and node.patch_path: - return Path(self._config.project_root).joinpath(node.patch_path.partition("://")[-1]) - if node.resource_type == NodeType.Source and hasattr(node, "source_name"): - return Path(self._config.project_root).joinpath(node.path) - return None - - def get_target_schema_path(self, node: ManifestNode) -> Path: - path_spec = self.get_osmosis_path_spec(node) - if not path_spec: - return Path(self._config.project_root, node.original_file_path) - sc = path_spec.format(node=node, model=node.name, parent=node.fqn[-2]) - parts = [] - if node.resource_type == NodeType.Source: - parts.append(self._config.model_paths[0]) - else: - parts.append(Path(node.original_file_path).parent) - if not (sc.endswith(".yml") or sc.endswith(".yaml")): - sc += ".yml" - parts.append(sc) - return Path(self._config.project_root, *parts) - - def get_osmosis_path_spec(self, node: ManifestNode) -> Optional[str]: - if node.resource_type == NodeType.Source: - source_specs = self._config.vars.get("dbt-osmosis", {}) - source_spec = source_specs.get(node.source_name) - if isinstance(source_spec, dict): - return source_spec.get("path") - return source_spec - osm_spec = node.unrendered_config.get("dbt-osmosis") - if not osm_spec: - raise MissingOsmosisConfig(f"Config not set for model {node.name}") - return osm_spec - - def get_columns(self, key: CatalogKey, to_lower: bool) -> list[str]: - return list(self.get_columns_meta(key, to_lower).keys()) - - def get_base_model(self, node: ManifestNode, to_lower: bool) -> dict[str, t.Any]: - cols = self.get_columns(self.get_catalog_key(node), to_lower) - return { - "name": node.name, - "columns": [{"name": c, "description": ""} for c in cols], - } - - def augment_existing_model( - self, doc: dict[str, t.Any], node: ManifestNode, to_lower: bool - ) -> dict[str, t.Any]: - existing_cols = [c["name"] for c in doc.get("columns", [])] - db_cols = self.get_columns(self.get_catalog_key(node), to_lower) - new_cols = [c for c in db_cols if not any(c.lower() == e.lower() for e in existing_cols)] - for col in new_cols: - doc.setdefault("columns", []).append({"name": col, "description": ""}) - logger().info(f"Injecting column {col} into {node.unique_id}") - return doc - - def draft_project_structure_update_plan( - self, output_to_lower: bool = False - ) -> dict[Path, SchemaFileMigration]: - blueprint = {} - logger().info("Building structure update plan.") - futs = [] - with self.adapter.connection_named("dbt-osmosis"): - for uid, sf_loc in self.build_schema_folder_mapping(output_to_lower).items(): - if not sf_loc.is_valid: - futs.append( - self._pool.submit(self._draft, sf_loc, uid, blueprint, output_to_lower) - ) - wait(futs) - return blueprint - - def _draft( - self, - sf_loc: SchemaFileLocation, - uid: str, - blueprint: dict[Path, SchemaFileMigration], - to_lower: bool, - ): - try: - with self._mutex: - if sf_loc.target not in blueprint: - blueprint[sf_loc.target] = SchemaFileMigration() - if sf_loc.node_type == NodeType.Model: - node = self.manifest.nodes[uid] - else: - node = self.manifest.sources[uid] - - if sf_loc.current is None: - # model not documented yet - with self._mutex: - if sf_loc.node_type == NodeType.Model: - blueprint[sf_loc.target].output["models"].append( - self.get_base_model(node, to_lower) - ) - else: - # We have existing doc, but we want to unify it into the new location - with self._mutex: - doc = self.yaml_handler.load(sf_loc.current) - if sf_loc.node_type == NodeType.Model: - for m in doc.get("models", []): - if m["name"] == node.name: - newm = self.augment_existing_model(m, node, to_lower) - with self._mutex: - blueprint[sf_loc.target].output["models"].append(newm) - blueprint[sf_loc.target].supersede.setdefault( - sf_loc.current, [] - ).append(node) - break - else: - for source in doc.get("sources", []): - if source["name"] == node.source_name: - for table in source["tables"]: - if table["name"] == node.name: - newt = self.augment_existing_model(table, node, to_lower) - with self._mutex: - if not any( - s["name"] == node.source_name - for s in blueprint[sf_loc.target].output["sources"] - ): - blueprint[sf_loc.target].output["sources"].append( - source - ) - for s in blueprint[sf_loc.target].output["sources"]: - if s["name"] == node.source_name: - for t2 in s["tables"]: - if t2["name"] == node.name: - t2.update(newt) - break - blueprint[sf_loc.target].supersede.setdefault( - sf_loc.current, [] - ).append(node) - break - except Exception as e: - logger().error(f"Drafting structure plan for {uid} failed: {e}") - raise e - - def cleanup_blueprint( - self, blueprint: dict[Path, SchemaFileMigration] - ) -> dict[Path, SchemaFileMigration]: - for k in list(blueprint.keys()): - out = blueprint[k].output - # remove empty models/sources - if "models" in out and not out["models"]: - del out["models"] - if "sources" in out and not out["sources"]: - del out["sources"] - if not out.get("models") and not out.get("sources"): - del blueprint[k] - return blueprint - - def commit_project_restructure_to_disk( - self, - blueprint: Optional[dict[Path, SchemaFileMigration]] = None, - output_to_lower: bool = False, - ) -> bool: - if not blueprint: - blueprint = self.draft_project_structure_update_plan(output_to_lower) - blueprint = self.cleanup_blueprint(blueprint) - if not blueprint: - logger().info("Project structure is already conformed.") - return False - self.pretty_print_restructure_plan(blueprint) - - for target, struct in blueprint.items(): - if not target.exists(): - logger().info(f"Creating schema file {target}") - if not self.dry_run: - target.parent.mkdir(parents=True, exist_ok=True) - target.touch() - self.yaml_handler.dump(struct.output, target) - self._mutations += 1 - else: - logger().info(f"Updating schema file {target}") - existing = self.yaml_handler.load(target) - if not existing: - existing = {"version": 2} - if "version" not in existing: - existing["version"] = 2 - - if "models" in struct.output: - existing.setdefault("models", []).extend(struct.output["models"]) - if "sources" in struct.output: - existing.setdefault("sources", []).extend(struct.output["sources"]) - if not self.dry_run: - self.yaml_handler.dump(existing, target) - self._mutations += 1 - - # handle superseded - for sup_path, nodes in struct.supersede.items(): - raw_sc = self.yaml_handler.load(sup_path) - # figure out which ones to remove - to_remove_models = {n.name for n in nodes if n.resource_type == NodeType.Model} - to_remove_sources = { - (n.source_name, n.name) for n in nodes if n.resource_type == NodeType.Source - } - - keep_models = [] - for m in raw_sc.get("models", []): - if m["name"] not in to_remove_models: - keep_models.append(m) - raw_sc["models"] = keep_models - - # remove relevant source tables - keep_src = [] - for s in raw_sc.get("sources", []): - keep_tables = [] - for t_ in s.get("tables", []): - if (s["name"], t_["name"]) not in to_remove_sources: - keep_tables.append(t_) - if keep_tables: - s["tables"] = keep_tables - keep_src.append(s) - raw_sc["sources"] = keep_src - - # if file is empty => remove it - if (not raw_sc.get("models")) and (not raw_sc.get("sources")): - logger().info(f"Superseding entire file {sup_path}") - if not self.dry_run: - sup_path.unlink(missing_ok=True) - if sup_path.parent.exists() and not any(sup_path.parent.iterdir()): - sup_path.parent.rmdir() - else: - if not self.dry_run: - self.yaml_handler.dump(raw_sc, sup_path) - self._mutations += 1 - logger().info(f"Migrated doc from {sup_path} -> {target}") - return True - - @staticmethod - def pretty_print_restructure_plan(blueprint: dict[Path, SchemaFileMigration]) -> None: - summary = [] - for plan in blueprint.keys(): - files_superseded = [s.name for s in blueprint[plan].supersede] or ["CREATE"] - summary.append((files_superseded, "->", plan.name)) - logger().info(summary) - - ############################################################################ - # Column Sync - ############################################################################ - @staticmethod - def get_column_sets( - database_cols: Iterable[str], - yaml_cols: Iterable[str], - documented_cols: Iterable[str], - ) -> t.tuple[list[str], list[str], list[str]]: - """ - Return: (missing_in_yaml, undocumented_in_yaml, extra_in_yaml) - """ - missing = [x for x in database_cols if x.lower() not in (y.lower() for y in yaml_cols)] - undocumented = [ - x for x in database_cols if x.lower() not in (y.lower() for y in documented_cols) - ] - extra = [x for x in yaml_cols if x.lower() not in (y.lower() for y in database_cols)] - return missing, undocumented, extra - - def _run( - self, - uid: str, - node: ManifestNode, - schema_map: dict[str, SchemaFileLocation], - force_inheritance: bool, - output_to_lower: bool, - ): - try: - with self._mutex: - logger().info(f"Processing model: {uid}") - sf_loc = schema_map.get(uid) - if not sf_loc or not sf_loc.current: - with self._mutex: - logger().info(f"No schema file for {uid}, skipping.") - return - db_cols_list = self.get_columns(self.get_catalog_key(node), output_to_lower) - if not db_cols_list: - with self._mutex: - logger().info( - f"No database columns found for {uid}, falling back to yaml columns." - ) - db_cols_list = list(node.columns.keys()) - - db_cols_set = set(db_cols_list) - yaml_cols_list = list(node.columns.keys()) - documented_cols_set = { - c - for c, info in node.columns.items() - if info.description and info.description not in self.placeholders - } - - missing, undocumented, extra = self.get_column_sets( - db_cols_list, yaml_cols_list, documented_cols_set - ) - - if force_inheritance: - undocumented = list(db_cols_set) # treat all as needing doc - - with self._mutex: - sc_data = self.yaml_handler.load(sf_loc.current) - section = self.maybe_get_section_from_schema_file(sc_data, node) - if not section: - logger().info(f"No section in {sf_loc.current} for {uid}") - return - # Perform updates - n_added = n_doc_inh = n_removed = n_type_changed = n_desc_changed = 0 - if any([missing, undocumented, extra]): - ( - n_added, - n_doc_inh, - n_removed, - n_type_changed, - n_desc_changed, - ) = self.update_schema_file_and_node( - missing, - undocumented, - extra, - node, - section, - self.get_columns_meta(self.get_catalog_key(node), output_to_lower), - output_to_lower, - ) - - reorder = tuple(db_cols_list) != tuple(yaml_cols_list) - if reorder: - - def _sort(c: dict[str, t.Any]) -> int: - try: - return db_cols_list.index( - column_casing( - c["name"], self._config.credentials.type, output_to_lower - ) - ) - except ValueError: - return 999999 - - section["columns"].sort(key=_sort) - - if ( - n_added + n_doc_inh + n_removed + n_type_changed + n_desc_changed or reorder - ) and not self.dry_run: - self.yaml_handler.dump(sc_data, sf_loc.current) - self._mutations += 1 - logger().info(f"Updated {sf_loc.current}") - else: - logger().info(f"{sf_loc.current} is up to date") - - except Exception as e: - logger().error(f"Error while processing {uid}: {e}") - raise e - - @staticmethod - def maybe_get_section_from_schema_file( - yaml_data: dict[str, t.Any], node: ManifestNode - ) -> Optional[dict[str, t.Any]]: - if node.resource_type == NodeType.Source: - for s in yaml_data.get("sources", []): - for t_ in s.get("tables", []): - if s["name"] == node.source_name and t_["name"] == node.name: - return t_ - else: - for m in yaml_data.get("models", []): - if m["name"] == node.name: - return m - return None - - @staticmethod - def remove_columns_not_in_database( - extra_columns: Iterable[str], - node: ManifestNode, - yaml_section: dict[str, t.Any], - ) -> int: - c = 0 - for e in extra_columns: - node.columns.pop(e, None) - yaml_section["columns"] = [col for col in yaml_section["columns"] if col["name"] != e] - c += 1 - return c - - def update_columns_attribute( - self, - node: ManifestNode, - yaml_section: dict[str, t.Any], - db_meta: dict[str, ColumnMetadata], - attr: str, - meta_key: str, - skip_flag: bool, - output_to_lower: bool, - ) -> int: - if skip_flag: - return 0 - changed = 0 - for col_name, col_meta in db_meta.items(): - if col_name in node.columns: - new_val = getattr(col_meta, meta_key, "") or "" - old_val = getattr(node.columns[col_name], attr, "") - if new_val and old_val != new_val: - setattr(node.columns[col_name], attr, new_val) - for c in yaml_section["columns"]: - if ( - column_casing(c["name"], self._config.credentials.type, output_to_lower) - == col_name - ): - if output_to_lower and isinstance(new_val, str): - new_val = new_val.lower() - c[attr] = new_val - changed += 1 - return changed - - def add_missing_cols_to_node_and_model( - self, - missing_cols: Iterable[str], - node: ManifestNode, - yaml_section: dict[str, t.Any], - db_meta: dict[str, ColumnMetadata], - output_to_lower: bool, - ) -> int: - c = 0 - for col in missing_cols: - if col not in db_meta: - continue - dtype = db_meta[col].type or "" - desc = db_meta[col].comment or "" - meta_name = col.lower() if output_to_lower else col - meta_type = dtype.lower() if output_to_lower else dtype - node.columns[col] = ColumnInfo.from_dict( - {"name": meta_name, "description": desc, "data_type": meta_type} - ) - yaml_section.setdefault("columns", []).append( - {"name": meta_name, "description": desc, "data_type": meta_type} - ) - c += 1 - return c - - def update_schema_file_and_node( - self, - missing_cols: Iterable[str], - undocumented_cols: Iterable[str], - extra_cols: Iterable[str], - node: ManifestNode, - yaml_section: dict[str, t.Any], - db_meta: dict[str, ColumnMetadata], - output_to_lower: bool, - ) -> t.tuple[int, int, int, int, int]: - n_added = 0 - n_doc_inherited = 0 - n_removed = 0 - n_type_updated = 0 - n_desc_updated = 0 - - if not self.skip_add_columns: - n_added = self.add_missing_cols_to_node_and_model( - missing_cols, node, yaml_section, db_meta, output_to_lower - ) - - knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( - self.manifest, - node, - self.placeholders, - self._config.project_root, - self.use_unrendered_descriptions, - ) - n_doc_inherited = ( - ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( - undocumented_cols, - node, - yaml_section, - knowledge, - self.skip_add_tags, - self.skip_merge_meta, - self.add_progenitor_to_meta, - self.add_inheritance_for_specified_keys, - ) - ) - n_type_updated = self.update_columns_attribute( - node, - yaml_section, - db_meta, - attr="data_type", - meta_key="type", - skip_flag=self.skip_add_data_types, - output_to_lower=output_to_lower, - ) - # We piggyback the "catalog_file" presence as "update description?" flag in original code - n_desc_updated = self.update_columns_attribute( - node, - yaml_section, - db_meta, - attr="description", - meta_key="comment", - skip_flag=(self.catalog_file is None), - output_to_lower=output_to_lower, - ) - n_removed = self.remove_columns_not_in_database(extra_cols, node, yaml_section) - return n_added, n_doc_inherited, n_removed, n_type_updated, n_desc_updated From 7bc28619b96c621e9bb080cb10f496520b4e589d Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Mon, 30 Dec 2024 22:55:23 -0700 Subject: [PATCH 21/46] feat: make usage example more fun --- src/dbt_osmosis/core/osmosis.py | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index e54c0f36..df601d39 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -4,7 +4,6 @@ import argparse import json -import logging import os import re import threading @@ -19,7 +18,6 @@ from pathlib import Path import dbt.flags as dbt_flags -import rich.logging import ruamel.yaml from dbt.adapters.base.column import Column as BaseColumn from dbt.adapters.base.impl import BaseAdapter @@ -1302,18 +1300,20 @@ def run_example_compilation_flow() -> None: plan = draft_restructure_delta_plan(yaml_context) steps = ( - step - for step in ( - create_missing_source_yamls(yaml_context), - apply_restructure_plan(yaml_context, plan, confirm=True), - inject_missing_columns(yaml_context), - remove_columns_not_in_database(yaml_context), - inherit_upstream_column_knowledge(yaml_context), - sort_columns_as_in_database(yaml_context), - commit_yamls(yaml_context), - ) + (create_missing_source_yamls, (yaml_context,), {}), + (apply_restructure_plan, (yaml_context, plan), {"confirm": True}), + (inject_missing_columns, (yaml_context,), {}), + (remove_columns_not_in_database, (yaml_context,), {}), + (inherit_upstream_column_knowledge, (yaml_context,), {}), + (sort_columns_as_in_database, (yaml_context,), {}), + (commit_yamls, (yaml_context,), {}), ) + steps = iter(t.cast(t.Any, steps)) DONE = object() - while next(steps, DONE) is not DONE: - logger.info("Completed step.") + nr = 1 + while (step := next(steps, DONE)) is not DONE: + step, args, kwargs = step # pyright: ignore[reportGeneralTypeIssues] + step(*args, **kwargs) + logger.info("Completed step %d (%s).", nr, getattr(t.cast(object, step), "__name__")) + nr += 1 From a8e86285fe59fdc1a624a0177367cc494e02cf03 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Tue, 31 Dec 2024 00:23:44 -0700 Subject: [PATCH 22/46] chore: update logger and allow proxying attr access to default logger --- .../core/{log_controller.py => logger.py} | 45 +++++++++++-------- src/dbt_osmosis/core/osmosis.py | 2 +- 2 files changed, 27 insertions(+), 20 deletions(-) rename src/dbt_osmosis/core/{log_controller.py => logger.py} (66%) diff --git a/src/dbt_osmosis/core/log_controller.py b/src/dbt_osmosis/core/logger.py similarity index 66% rename from src/dbt_osmosis/core/log_controller.py rename to src/dbt_osmosis/core/logger.py index 9c06c6da..b50491fc 100644 --- a/src/dbt_osmosis/core/log_controller.py +++ b/src/dbt_osmosis/core/logger.py @@ -1,8 +1,10 @@ -"""Logging module for dbt-osmosis. This module provides a logger factory that can be used to create loggers with rotating log files and console streaming. The logger is configured with a default log level of INFO and a default log file format of "time — name — level — message". The default log file path is `~/.dbt-osmosis/logs` and the default log file name is the logger name. The logger is configured to not propagate messages to the root logger.""" +# pyright: reportAny=false +"""Logging module for dbt-osmosis. The module itself can be used as a logger as it proxies calls to the default LOGGER instance.""" from __future__ import annotations import logging +import typing as t from functools import lru_cache from logging.handlers import RotatingFileHandler from pathlib import Path @@ -14,7 +16,7 @@ _LOGGING_LEVEL = logging.INFO -def rotating_log_handler(name: str, path: Path, formatter: str) -> RotatingFileHandler: +def get_rotating_log_handler(name: str, path: Path, formatter: str) -> RotatingFileHandler: """This handler writes warning and higher level outputs to logs in a home .dbt-osmosis directory rotating them as needed""" path.mkdir(parents=True, exist_ok=True) handler = RotatingFileHandler( @@ -28,11 +30,11 @@ def rotating_log_handler(name: str, path: Path, formatter: str) -> RotatingFileH @lru_cache(maxsize=10) -def logger( +def get_logger( name: str = "dbt-osmosis", - level: int | str | None = None, - path: Path | None = None, - formatter: str | None = None, + level: int | str = _LOGGING_LEVEL, + path: Path = _LOG_PATH, + formatter: str = _LOG_FILE_FORMAT, ) -> logging.Logger: """Builds and caches loggers. Can be configured with module level attributes or on a call by call basis. @@ -49,16 +51,10 @@ def logger( """ if isinstance(level, str): level = getattr(logging, level, logging.INFO) - if level is None: - level = _LOGGING_LEVEL - if path is None: - path = _LOG_PATH - if formatter is None: - formatter = _LOG_FILE_FORMAT - _logger = logging.getLogger(name) - _logger.setLevel(level) - _logger.addHandler(rotating_log_handler(name, path, formatter)) - _logger.addHandler( + logger = logging.getLogger(name) + logger.setLevel(level) + logger.addHandler(get_rotating_log_handler(name, path, formatter)) + logger.addHandler( RichHandler( level=level, rich_tracebacks=True, @@ -66,9 +62,20 @@ def logger( show_time=False, ) ) - _logger.propagate = False - return _logger + logger.propagate = False + return logger -LOGGER = logger() +LOGGER = get_logger() """Default logger for dbt-osmosis""" + + +class LogMethod(t.Protocol): + """Protocol for logger methods""" + + def __call__(self, msg: str, /, *args: t.Any, **kwds: t.Any) -> t.Any: ... + + +def __getattr__(name: str) -> LogMethod: + func = getattr(LOGGER, name) + return func diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index df601d39..21f21ada 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -48,7 +48,7 @@ from dbt_common.clients.system import get_env from dbt_common.context import set_invocation_context -from dbt_osmosis.core.log_controller import LOGGER as logger +import dbt_osmosis.core.logger as logger disable_tracking() From bb9116f661569c78fa454aada05cd287d8356cb4 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Tue, 31 Dec 2024 01:11:29 -0700 Subject: [PATCH 23/46] feat: add plugin system for fuzzy matching columns --- pyproject.toml | 2 +- src/dbt_osmosis/core/osmosis.py | 71 +++++++++++++++++++++++++++++++-- uv.lock | 4 +- 3 files changed, 70 insertions(+), 7 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index da6eecda..7852dd47 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,7 +28,7 @@ dependencies = [ "dbt-core>=1.8,<1.10", "ruamel.yaml>=0.17", "rich>=10", - "GitPython>3,<4", + "pluggy>=1.5.0", ] [project.optional-dependencies] diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index 21f21ada..364866d4 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -6,6 +6,7 @@ import json import os import re +import sys import threading import time import typing as t @@ -14,10 +15,12 @@ from collections.abc import Iterable, Iterator from concurrent.futures import FIRST_EXCEPTION, Future, ThreadPoolExecutor, wait from dataclasses import dataclass, field +from functools import lru_cache, partial from itertools import chain from pathlib import Path import dbt.flags as dbt_flags +import pluggy import ruamel.yaml from dbt.adapters.base.column import Column as BaseColumn from dbt.adapters.base.impl import BaseAdapter @@ -1151,11 +1154,17 @@ def inherit_upstream_column_knowledge( yaml_section = _get_member_yaml(context, node) column_knowledge_graph = _build_column_knowledge_grap(context, node) + kwargs = None for name, node_column in node.columns.items(): - # TODO: This is our graph "lookup", and our primary opportunity to apply user defined fuzzing - # so we should make the composable and robust (maybe a plugin system for fuzzing? it's an important problem) - kwargs = column_knowledge_graph.get(name) - if kwargs is None: + variants: list[str] = [name] + pm = get_plugin_manager() + for v in pm.hook.get_candidates(name=name, node=node, context=context.project): + variants.extend(v) + for variant in variants: + kwargs = column_knowledge_graph.get(variant) + if kwargs is not None: + break + else: continue updated_metadata = {k: v for k, v in kwargs.items() if v is not None and k in inheritable} @@ -1274,6 +1283,60 @@ def sort_columns_alphabetically( context.register_mutations(1) +# Fuzzy Plugins +# ============= + +_hookspec = pluggy.HookspecMarker("dbt-osmosis") +hookimpl = pluggy.HookimplMarker("dbt-osmosis") + + +@_hookspec +def get_candidates(name: str, node: ResultNode, context: DbtProjectContext) -> list[str]: # pyright: ignore[reportUnusedParameter] + """Get a list of candidate names for a column.""" + raise NotImplementedError + + +class FuzzyCaseMatching: + @hookimpl + def get_candidates(self, name: str, node: ResultNode, context: DbtProjectContext) -> list[str]: + """Get a list of candidate names for a column based on case variants.""" + _ = node, context + variants = [ + name.lower(), # lowercase + name.upper(), # UPPERCASE + cc := re.sub("_(.)", lambda m: m.group(1).upper(), name), # camelCase + cc[0].upper() + cc[1:], # PascalCase + ] + return variants + + +class FuzzyPrefixMatching: + @hookimpl + def get_candidates(self, name: str, node: ResultNode, context: DbtProjectContext) -> list[str]: + """Get a list of candidate names for a column excluding a prefix.""" + _ = context + variants = [] + prefix = t.cast( + str, + node.config.extra.get( + "dbt-osmosis-prefix", node.unrendered_config.get("dbt-osmosis-prefix") + ), + ) + if prefix and name.startswith(prefix): + variants.append(name[len(prefix) :]) + return variants + + +@lru_cache(maxsize=None) +def get_plugin_manager(): + """Get the pluggy plugin manager for dbt-osmosis.""" + manager = pluggy.PluginManager("dbt-osmosis") + _ = manager.register(FuzzyCaseMatching()) + _ = manager.register(FuzzyPrefixMatching()) + _ = manager.load_setuptools_entrypoints("dbt-osmosis") + return manager + + # NOTE: usage example of the more FP style module below diff --git a/uv.lock b/uv.lock index 2c0826ae..95fcd3e9 100644 --- a/uv.lock +++ b/uv.lock @@ -495,7 +495,7 @@ source = { editable = "." } dependencies = [ { name = "click" }, { name = "dbt-core" }, - { name = "gitpython" }, + { name = "pluggy" }, { name = "rich" }, { name = "ruamel-yaml" }, ] @@ -537,9 +537,9 @@ requires-dist = [ { name = "dbt-postgres", marker = "extra == 'postgres'", specifier = ">=1.0.0" }, { name = "dbt-sqlite", marker = "extra == 'sqlite'", specifier = ">=1.0.0" }, { name = "feedparser", marker = "extra == 'workbench'", specifier = "~=6.0.10" }, - { name = "gitpython", specifier = ">3,<4" }, { name = "mypy", marker = "extra == 'dev'", specifier = ">=0.910" }, { name = "openai", marker = "extra == 'openai'", specifier = ">0.28.0" }, + { name = "pluggy", specifier = ">=1.5.0" }, { name = "pre-commit", marker = "extra == 'dev'", specifier = ">3.0.0" }, { name = "pylint", marker = "extra == 'dev'", specifier = ">=2.11.1" }, { name = "pytest", marker = "extra == 'dev'", specifier = "~=7.4.2" }, From 7f81f1a1991f8301d23809e4f0792f9bd8641366 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Tue, 31 Dec 2024 01:36:56 -0700 Subject: [PATCH 24/46] chore: update tests to show initial parity and serve as reference example --- src/dbt_osmosis/core/osmosis.py | 5 +- tests/test_yaml_context.py | 126 ++++++++++++++++++++++++++++++++ tests/test_yaml_manager.py | 117 ----------------------------- 3 files changed, 128 insertions(+), 120 deletions(-) create mode 100644 tests/test_yaml_context.py delete mode 100644 tests/test_yaml_manager.py diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index 364866d4..a6639087 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -6,7 +6,6 @@ import json import os import re -import sys import threading import time import typing as t @@ -15,7 +14,7 @@ from collections.abc import Iterable, Iterator from concurrent.futures import FIRST_EXCEPTION, Future, ThreadPoolExecutor, wait from dataclasses import dataclass, field -from functools import lru_cache, partial +from functools import lru_cache from itertools import chain from pathlib import Path @@ -547,7 +546,7 @@ def normalize_column_name(column: str, credentials_type: str) -> str: return column -def _maybe_use_precise_dtype(col: t.Any, settings: YamlRefactorSettings) -> str: +def _maybe_use_precise_dtype(col: BaseColumn, settings: YamlRefactorSettings) -> str: """Use the precise data type if enabled in the settings.""" if (col.is_numeric() and settings.numeric_precision) or ( col.is_string() and settings.char_length diff --git a/tests/test_yaml_context.py b/tests/test_yaml_context.py new file mode 100644 index 00000000..857918e2 --- /dev/null +++ b/tests/test_yaml_context.py @@ -0,0 +1,126 @@ +from unittest import mock + +import pytest + +from dbt_osmosis.core.osmosis import ( + DbtConfiguration, + YamlRefactorContext, + YamlRefactorSettings, + apply_restructure_plan, + create_dbt_project_context, + create_missing_source_yamls, + draft_restructure_delta_plan, + get_columns, + get_table_ref, + inherit_upstream_column_knowledge, + reload_manifest, +) + + +@pytest.fixture(scope="module") +def yaml_context() -> YamlRefactorContext: + # initializing the context is a sanity test in and of itself + c = DbtConfiguration(project_dir="demo_duckdb", profiles_dir="demo_duckdb") + c.vars = {"dbt-osmosis": {}} + project = create_dbt_project_context(c) + context = YamlRefactorContext( + project, settings=YamlRefactorSettings(use_unrendered_descriptions=True, dry_run=True) + ) + return context + + +# Sanity tests + + +def test_reload_manifest(yaml_context: YamlRefactorContext): + reload_manifest(yaml_context.project) + + +def test_create_missing_source_yamls(yaml_context: YamlRefactorContext): + create_missing_source_yamls(yaml_context) + + +def test_draft_restructure_delta_plan(yaml_context: YamlRefactorContext): + assert draft_restructure_delta_plan(yaml_context) is not None + + +def test_apply_restructure_plan(yaml_context: YamlRefactorContext): + plan = draft_restructure_delta_plan(yaml_context) + apply_restructure_plan(yaml_context, plan, confirm=False) + + +def test_inherit_upstream_column_knowledge(yaml_context: YamlRefactorContext): + inherit_upstream_column_knowledge(yaml_context) + + +# Column type + settings tests + + +def _customer_column_types(yaml_context: YamlRefactorContext) -> dict[str, str]: + node = next(n for n in yaml_context.project.manifest.nodes.values() if n.name == "customers") + assert node + + catalog_key = get_table_ref(node) + columns = get_columns(yaml_context, catalog_key) + assert columns + + column_types = dict({name: meta.type for name, meta in columns.items()}) + assert column_types + return column_types + + +def test_get_columns_meta(yaml_context: YamlRefactorContext): + with mock.patch("dbt_osmosis.core.osmosis._COLUMN_LIST_CACHE", {}): + assert _customer_column_types(yaml_context) == { + # in DuckDB decimals always have presision and scale + "customer_average_value": "DECIMAL(18,3)", + "customer_id": "INTEGER", + "customer_lifetime_value": "DOUBLE", + "first_name": "VARCHAR", + "first_order": "DATE", + "last_name": "VARCHAR", + "most_recent_order": "DATE", + "number_of_orders": "BIGINT", + } + + +def test_get_columns_meta_char_length(): + yaml_context = YamlRefactorContext( + project=create_dbt_project_context( + DbtConfiguration(project_dir="demo_duckdb", profiles_dir="demo_duckdb") + ), + settings=YamlRefactorSettings(char_length=True, dry_run=True), + ) + with mock.patch("dbt_osmosis.core.osmosis._COLUMN_LIST_CACHE", {}): + assert _customer_column_types(yaml_context) == { + # in DuckDB decimals always have presision and scale + "customer_average_value": "DECIMAL(18,3)", + "customer_id": "INTEGER", + "customer_lifetime_value": "DOUBLE", + "first_name": "character varying(256)", + "first_order": "DATE", + "last_name": "character varying(256)", + "most_recent_order": "DATE", + "number_of_orders": "BIGINT", + } + + +def test_get_columns_meta_numeric_precision(): + yaml_context = YamlRefactorContext( + project=create_dbt_project_context( + DbtConfiguration(project_dir="demo_duckdb", profiles_dir="demo_duckdb") + ), + settings=YamlRefactorSettings(numeric_precision=True, dry_run=True), + ) + with mock.patch("dbt_osmosis.core.osmosis._COLUMN_LIST_CACHE", {}): + assert _customer_column_types(yaml_context) == { + # in DuckDB decimals always have presision and scale + "customer_average_value": "DECIMAL(18,3)", + "customer_id": "INTEGER", + "customer_lifetime_value": "DOUBLE", + "first_name": "VARCHAR", + "first_order": "DATE", + "last_name": "VARCHAR", + "most_recent_order": "DATE", + "number_of_orders": "BIGINT", + } diff --git a/tests/test_yaml_manager.py b/tests/test_yaml_manager.py deleted file mode 100644 index 024dfd49..00000000 --- a/tests/test_yaml_manager.py +++ /dev/null @@ -1,117 +0,0 @@ -# TODO: refactor this test -# from pathlib import Path -# -# import pytest -# from dbt.contracts.results import CatalogKey -# -# from dbt_osmosis.core.osmosis import DbtYamlManager -# -# -# @pytest.fixture(scope="module") -# def yaml_manager() -> DbtYamlManager: -# return DbtYamlManager(project_dir="demo_duckdb", profiles_dir="demo_duckdb", dry_run=True) -# -# -# def test_initialize_adapter(yaml_manager: DbtYamlManager): -# yaml_manager.initialize_adapter() -# -# -# def test_list(yaml_manager: DbtYamlManager): -# yaml_manager.list() -# -# -# def test_test(yaml_manager: DbtYamlManager): -# yaml_manager.test() -# -# -# def test_run(yaml_manager: DbtYamlManager): -# yaml_manager.run() -# -# -# def test_build(yaml_manager: DbtYamlManager): -# yaml_manager.build() -# -# -# def test_parse_project(yaml_manager: DbtYamlManager): -# yaml_manager.parse_project() -# -# -# def test_safe_parse_project(yaml_manager: DbtYamlManager): -# yaml_manager.safe_parse_project() -# -# -# def test_bootstrap_sources(yaml_manager: DbtYamlManager): -# yaml_manager.bootstrap_sources() -# -# -# def test_draft_project_structure_update_plan(yaml_manager: DbtYamlManager): -# yaml_manager.draft_project_structure_update_plan() -# -# -# def test_commit_project_restructure_to_disk(yaml_manager: DbtYamlManager): -# yaml_manager.commit_project_restructure_to_disk() -# -# -# def test_propagate_documentation_downstream(yaml_manager: DbtYamlManager): -# yaml_manager.propagate_documentation_downstream() -# -# -# def _customer_column_types(yaml_manager: DbtYamlManager) -> dict[str, str]: -# node = next(n for n in yaml_manager.manifest.nodes.values() if n.name == "customers") -# assert node -# -# catalog_key = yaml_manager.get_catalog_key(node) -# columns = yaml_manager.get_columns_meta(catalog_key) -# assert columns -# -# column_types = dict({name: meta.type for name, meta in columns.items()}) -# assert column_types -# return column_types -# -# -# def test_get_columns_meta(yaml_manager: DbtYamlManager): -# assert _customer_column_types(yaml_manager) == { -# # in DuckDB decimals always have presision and scale -# "customer_average_value": "DECIMAL(18,3)", -# "customer_id": "INTEGER", -# "customer_lifetime_value": "DOUBLE", -# "first_name": "VARCHAR", -# "first_order": "DATE", -# "last_name": "VARCHAR", -# "most_recent_order": "DATE", -# "number_of_orders": "BIGINT", -# } -# -# -# def test_get_columns_meta_char_length(): -# yaml_manager = DbtYamlManager( -# project_dir="demo_duckdb", profiles_dir="demo_duckdb", char_length=True, dry_run=True -# ) -# assert _customer_column_types(yaml_manager) == { -# # in DuckDB decimals always have presision and scale -# "customer_average_value": "DECIMAL(18,3)", -# "customer_id": "INTEGER", -# "customer_lifetime_value": "DOUBLE", -# "first_name": "character varying(256)", -# "first_order": "DATE", -# "last_name": "character varying(256)", -# "most_recent_order": "DATE", -# "number_of_orders": "BIGINT", -# } -# -# -# def test_get_columns_meta_numeric_precision(): -# yaml_manager = DbtYamlManager( -# project_dir="demo_duckdb", profiles_dir="demo_duckdb", numeric_precision=True, dry_run=True -# ) -# assert _customer_column_types(yaml_manager) == { -# # in DuckDB decimals always have presision and scale -# "customer_average_value": "DECIMAL(18,3)", -# "customer_id": "INTEGER", -# "customer_lifetime_value": "DOUBLE", -# "first_name": "VARCHAR", -# "first_order": "DATE", -# "last_name": "VARCHAR", -# "most_recent_order": "DATE", -# "number_of_orders": "BIGINT", -# } From e792e793389907483e20a46295747fbd4e764019 Mon Sep 17 00:00:00 2001 From: Alexander Butler <41213451+z3z1ma@users.noreply.github.com> Date: Tue, 31 Dec 2024 01:37:51 -0700 Subject: [PATCH 25/46] Update pyproject.toml Co-authored-by: Yasuhisa Yoshida --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 7852dd47..abf758c9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,7 +58,7 @@ dev = [ [tool.black] line-length = 100 -target-version = ["py39", "py310", "py311"] +target-version = ["py39", "py310", "py311", "py312"] preview = true [tool.isort] From cebbb08c762e79024979f5997227b803d5515663 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Tue, 31 Dec 2024 01:43:29 -0700 Subject: [PATCH 26/46] fix: get tests using venv --- .github/workflows/tests.yml | 6 +- pyproject.toml | 9 +- uv.lock | 190 ++++++------------------------------ 3 files changed, 36 insertions(+), 169 deletions(-) diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 512f0e87..81656da3 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -29,14 +29,14 @@ jobs: pip install --constraint=.github/workflows/constraints.txt pip pip --version - - name: Install Poetry + - name: Install uv run: | pip install --constraint=.github/workflows/constraints.txt uv uv --version - name: Install required packages run: | # install duckdb extras to be able to parse manifest - uv sync --extra duckdb + uv sync --extra duckdb --extra dev - name: Install dbt-core run: | @@ -48,4 +48,4 @@ jobs: - name: Run pytest run: | - uv tool run pytest + uv run pytest diff --git a/pyproject.toml b/pyproject.toml index abf758c9..e37808a5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,6 +35,7 @@ dependencies = [ duckdb = ["dbt-duckdb>=1.0.0"] sqlite = ["dbt-sqlite>=1.0.0"] postgres = ["dbt-postgres>=1.0.0"] + workbench = [ "streamlit>=1.20.0", "streamlit-ace>=0.1.0", @@ -45,13 +46,7 @@ workbench = [ openai = ["openai>0.28.0"] -dev = [ - "black>=21.9b0", - "mypy>=0.910", - "pylint>=2.11.1", - "pytest~=7.4.2", - "pre-commit>3.0.0", -] +dev = ["ruff~=0.8.4", "pytest~=8.3.4", "pre-commit>3.0.0"] [project.scripts] "dbt-osmosis" = "dbt_osmosis.main:cli" diff --git a/uv.lock b/uv.lock index 95fcd3e9..c9917abf 100644 --- a/uv.lock +++ b/uv.lock @@ -65,18 +65,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a0/7a/4daaf3b6c08ad7ceffea4634ec206faeff697526421c20f07628c7372156/anyio-4.7.0-py3-none-any.whl", hash = "sha256:ea60c3723ab42ba6fff7e8ccb0488c898ec538ff4df1f1d5e642c3601d07e352", size = 93052 }, ] -[[package]] -name = "astroid" -version = "3.3.8" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/80/c5/5c83c48bbf547f3dd8b587529db7cf5a265a3368b33e85e76af8ff6061d3/astroid-3.3.8.tar.gz", hash = "sha256:a88c7994f914a4ea8572fac479459f4955eeccc877be3f2d959a33273b0cf40b", size = 398196 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/07/28/0bc8a17d6cd4cc3c79ae41b7105a2b9a327c110e5ddd37a8a27b29a5c8a2/astroid-3.3.8-py3-none-any.whl", hash = "sha256:187ccc0c248bfbba564826c26f070494f7bc964fd286b6d9fff4420e55de828c", size = 275153 }, -] - [[package]] name = "attrs" version = "24.3.0" @@ -95,40 +83,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ed/20/bc79bc575ba2e2a7f70e8a1155618bb1301eaa5132a8271373a6903f73f8/babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b", size = 9587599 }, ] -[[package]] -name = "black" -version = "24.10.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, - { name = "mypy-extensions" }, - { name = "packaging" }, - { name = "pathspec" }, - { name = "platformdirs" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/d8/0d/cc2fb42b8c50d80143221515dd7e4766995bd07c56c9a3ed30baf080b6dc/black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875", size = 645813 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/f3/465c0eb5cddf7dbbfe1fecd9b875d1dcf51b88923cd2c1d7e9ab95c6336b/black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812", size = 1623211 }, - { url = "https://files.pythonhosted.org/packages/df/57/b6d2da7d200773fdfcc224ffb87052cf283cec4d7102fab450b4a05996d8/black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea", size = 1457139 }, - { url = "https://files.pythonhosted.org/packages/6e/c5/9023b7673904a5188f9be81f5e129fff69f51f5515655fbd1d5a4e80a47b/black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f", size = 1753774 }, - { url = "https://files.pythonhosted.org/packages/e1/32/df7f18bd0e724e0d9748829765455d6643ec847b3f87e77456fc99d0edab/black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e", size = 1414209 }, - { url = "https://files.pythonhosted.org/packages/c2/cc/7496bb63a9b06a954d3d0ac9fe7a73f3bf1cd92d7a58877c27f4ad1e9d41/black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad", size = 1607468 }, - { url = "https://files.pythonhosted.org/packages/2b/e3/69a738fb5ba18b5422f50b4f143544c664d7da40f09c13969b2fd52900e0/black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50", size = 1437270 }, - { url = "https://files.pythonhosted.org/packages/c9/9b/2db8045b45844665c720dcfe292fdaf2e49825810c0103e1191515fc101a/black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392", size = 1737061 }, - { url = "https://files.pythonhosted.org/packages/a3/95/17d4a09a5be5f8c65aa4a361444d95edc45def0de887810f508d3f65db7a/black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175", size = 1423293 }, - { url = "https://files.pythonhosted.org/packages/90/04/bf74c71f592bcd761610bbf67e23e6a3cff824780761f536512437f1e655/black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3", size = 1644256 }, - { url = "https://files.pythonhosted.org/packages/4c/ea/a77bab4cf1887f4b2e0bce5516ea0b3ff7d04ba96af21d65024629afedb6/black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65", size = 1448534 }, - { url = "https://files.pythonhosted.org/packages/4e/3e/443ef8bc1fbda78e61f79157f303893f3fddf19ca3c8989b163eb3469a12/black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f", size = 1761892 }, - { url = "https://files.pythonhosted.org/packages/52/93/eac95ff229049a6901bc84fec6908a5124b8a0b7c26ea766b3b8a5debd22/black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8", size = 1434796 }, - { url = "https://files.pythonhosted.org/packages/fe/02/f408c804e0ee78c367dcea0a01aedde4f1712af93b8b6e60df981e0228c7/black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd", size = 1622516 }, - { url = "https://files.pythonhosted.org/packages/f8/b9/9b706ed2f55bfb28b436225a9c57da35990c9005b90b8c91f03924454ad7/black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f", size = 1456181 }, - { url = "https://files.pythonhosted.org/packages/0a/1c/314d7f17434a5375682ad097f6f4cc0e3f414f3c95a9b1bb4df14a0f11f9/black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800", size = 1752801 }, - { url = "https://files.pythonhosted.org/packages/39/a7/20e5cd9237d28ad0b31438de5d9f01c8b99814576f4c0cda1edd62caf4b0/black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7", size = 1413626 }, - { url = "https://files.pythonhosted.org/packages/8d/a7/4b27c50537ebca8bec139b872861f9d2bf501c5ec51fcf897cb924d9e264/black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d", size = 206898 }, -] - [[package]] name = "blinker" version = "1.9.0" @@ -502,11 +456,9 @@ dependencies = [ [package.optional-dependencies] dev = [ - { name = "black" }, - { name = "mypy" }, { name = "pre-commit" }, - { name = "pylint" }, { name = "pytest" }, + { name = "ruff" }, ] duckdb = [ { name = "dbt-duckdb" }, @@ -530,21 +482,19 @@ workbench = [ [package.metadata] requires-dist = [ - { name = "black", marker = "extra == 'dev'", specifier = ">=21.9b0" }, { name = "click", specifier = ">7" }, { name = "dbt-core", specifier = ">=1.8,<1.10" }, { name = "dbt-duckdb", marker = "extra == 'duckdb'", specifier = ">=1.0.0" }, { name = "dbt-postgres", marker = "extra == 'postgres'", specifier = ">=1.0.0" }, { name = "dbt-sqlite", marker = "extra == 'sqlite'", specifier = ">=1.0.0" }, { name = "feedparser", marker = "extra == 'workbench'", specifier = "~=6.0.10" }, - { name = "mypy", marker = "extra == 'dev'", specifier = ">=0.910" }, { name = "openai", marker = "extra == 'openai'", specifier = ">0.28.0" }, { name = "pluggy", specifier = ">=1.5.0" }, { name = "pre-commit", marker = "extra == 'dev'", specifier = ">3.0.0" }, - { name = "pylint", marker = "extra == 'dev'", specifier = ">=2.11.1" }, - { name = "pytest", marker = "extra == 'dev'", specifier = "~=7.4.2" }, + { name = "pytest", marker = "extra == 'dev'", specifier = "~=8.3.4" }, { name = "rich", specifier = ">=10" }, { name = "ruamel-yaml", specifier = ">=0.17" }, + { name = "ruff", marker = "extra == 'dev'", specifier = "~=0.8.4" }, { name = "streamlit", marker = "extra == 'workbench'", specifier = ">=1.20.0" }, { name = "streamlit-ace", marker = "extra == 'workbench'", specifier = ">=0.1.0" }, { name = "streamlit-elements-fluence", marker = "extra == 'workbench'", specifier = ">=0.1.4" }, @@ -611,15 +561,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/18/e6/d27d37dc55dbf40cdbd665aa52844b065ac760c9a02a02265f97ea7a4256/deepdiff-7.0.1-py3-none-any.whl", hash = "sha256:447760081918216aa4fd4ca78a4b6a848b81307b2ea94c810255334b759e1dc3", size = 80825 }, ] -[[package]] -name = "dill" -version = "0.3.9" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/70/43/86fe3f9e130c4137b0f1b50784dd70a5087b911fe07fa81e53e0c4c47fea/dill-0.3.9.tar.gz", hash = "sha256:81aa267dddf68cbfe8029c42ca9ec6a4ab3b22371d1c450abc54422577b4512c", size = 187000 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/46/d1/e73b6ad76f0b1fb7f23c35c6d95dbc506a9c8804f43dda8cb5b0fa6331fd/dill-0.3.9-py3-none-any.whl", hash = "sha256:468dff3b89520b474c0397703366b7b95eebe6303f108adf9b19da1f702be87a", size = 119418 }, -] - [[package]] name = "distlib" version = "0.3.9" @@ -895,15 +836,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b6/85/7882d311924cbcfc70b1890780763e36ff0b140c7e51c110fc59a532f087/isodate-0.6.1-py2.py3-none-any.whl", hash = "sha256:0751eece944162659049d35f4f549ed815792b38793f07cf73381c1c87cbed96", size = 41722 }, ] -[[package]] -name = "isort" -version = "5.13.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/87/f9/c1eb8635a24e87ade2efce21e3ce8cd6b8630bb685ddc9cdaca1349b2eb5/isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109", size = 175303 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/b3/8def84f539e7d2289a02f0524b944b15d7c75dab7628bedf1c4f0992029c/isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6", size = 92310 }, -] - [[package]] name = "jinja2" version = "3.1.5" @@ -1323,15 +1255,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5e/b6/5a1f868782cd13f053a679984e222007ecff654a9bfbac6b27a65f4eeb05/matplotlib-3.9.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ad45da51be7ad02387801fd154ef74d942f49fe3fcd26a64c94842ba7ec0d865", size = 7854624 }, ] -[[package]] -name = "mccabe" -version = "0.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e7/ff/0ffefdcac38932a54d2b5eed4e0ba8a408f215002cd178ad1df0f2806ff8/mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325", size = 9658 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/27/1a/1f68f9ba0c207934b35b86a8ca3aad8395a3d6dd7921c0686e23853ff5a9/mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e", size = 7350 }, -] - [[package]] name = "mdurl" version = "0.1.2" @@ -1411,53 +1334,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/af/98/cff14d53a2f2f67d7fe8a4e235a383ee71aba6a1da12aeea24b325d0c72a/multimethod-1.12-py3-none-any.whl", hash = "sha256:fd0c473c43558908d97cc06e4d68e8f69202f167db46f7b4e4058893e7dbdf60", size = 10646 }, ] -[[package]] -name = "mypy" -version = "1.14.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "mypy-extensions" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b9/eb/2c92d8ea1e684440f54fa49ac5d9a5f19967b7b472a281f419e69a8d228e/mypy-1.14.1.tar.gz", hash = "sha256:7ec88144fe9b510e8475ec2f5f251992690fcf89ccb4500b214b4226abcd32d6", size = 3216051 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/7a/87ae2adb31d68402da6da1e5f30c07ea6063e9f09b5e7cfc9dfa44075e74/mypy-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb", size = 11211002 }, - { url = "https://files.pythonhosted.org/packages/e1/23/eada4c38608b444618a132be0d199b280049ded278b24cbb9d3fc59658e4/mypy-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0", size = 10358400 }, - { url = "https://files.pythonhosted.org/packages/43/c9/d6785c6f66241c62fd2992b05057f404237deaad1566545e9f144ced07f5/mypy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90716d8b2d1f4cd503309788e51366f07c56635a3309b0f6a32547eaaa36a64d", size = 12095172 }, - { url = "https://files.pythonhosted.org/packages/c3/62/daa7e787770c83c52ce2aaf1a111eae5893de9e004743f51bfcad9e487ec/mypy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae753f5c9fef278bcf12e1a564351764f2a6da579d4a81347e1d5a15819997b", size = 12828732 }, - { url = "https://files.pythonhosted.org/packages/1b/a2/5fb18318a3637f29f16f4e41340b795da14f4751ef4f51c99ff39ab62e52/mypy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e0fe0f5feaafcb04505bcf439e991c6d8f1bf8b15f12b05feeed96e9e7bf1427", size = 13012197 }, - { url = "https://files.pythonhosted.org/packages/28/99/e153ce39105d164b5f02c06c35c7ba958aaff50a2babba7d080988b03fe7/mypy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:7d54bd85b925e501c555a3227f3ec0cfc54ee8b6930bd6141ec872d1c572f81f", size = 9780836 }, - { url = "https://files.pythonhosted.org/packages/da/11/a9422850fd506edbcdc7f6090682ecceaf1f87b9dd847f9df79942da8506/mypy-1.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f995e511de847791c3b11ed90084a7a0aafdc074ab88c5a9711622fe4751138c", size = 11120432 }, - { url = "https://files.pythonhosted.org/packages/b6/9e/47e450fd39078d9c02d620545b2cb37993a8a8bdf7db3652ace2f80521ca/mypy-1.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d64169ec3b8461311f8ce2fd2eb5d33e2d0f2c7b49116259c51d0d96edee48d1", size = 10279515 }, - { url = "https://files.pythonhosted.org/packages/01/b5/6c8d33bd0f851a7692a8bfe4ee75eb82b6983a3cf39e5e32a5d2a723f0c1/mypy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba24549de7b89b6381b91fbc068d798192b1b5201987070319889e93038967a8", size = 12025791 }, - { url = "https://files.pythonhosted.org/packages/f0/4c/e10e2c46ea37cab5c471d0ddaaa9a434dc1d28650078ac1b56c2d7b9b2e4/mypy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:183cf0a45457d28ff9d758730cd0210419ac27d4d3f285beda038c9083363b1f", size = 12749203 }, - { url = "https://files.pythonhosted.org/packages/88/55/beacb0c69beab2153a0f57671ec07861d27d735a0faff135a494cd4f5020/mypy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f2a0ecc86378f45347f586e4163d1769dd81c5a223d577fe351f26b179e148b1", size = 12885900 }, - { url = "https://files.pythonhosted.org/packages/a2/75/8c93ff7f315c4d086a2dfcde02f713004357d70a163eddb6c56a6a5eff40/mypy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:ad3301ebebec9e8ee7135d8e3109ca76c23752bac1e717bc84cd3836b4bf3eae", size = 9777869 }, - { url = "https://files.pythonhosted.org/packages/43/1b/b38c079609bb4627905b74fc6a49849835acf68547ac33d8ceb707de5f52/mypy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:30ff5ef8519bbc2e18b3b54521ec319513a26f1bba19a7582e7b1f58a6e69f14", size = 11266668 }, - { url = "https://files.pythonhosted.org/packages/6b/75/2ed0d2964c1ffc9971c729f7a544e9cd34b2cdabbe2d11afd148d7838aa2/mypy-1.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb9f255c18052343c70234907e2e532bc7e55a62565d64536dbc7706a20b78b9", size = 10254060 }, - { url = "https://files.pythonhosted.org/packages/a1/5f/7b8051552d4da3c51bbe8fcafffd76a6823779101a2b198d80886cd8f08e/mypy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b4e3413e0bddea671012b063e27591b953d653209e7a4fa5e48759cda77ca11", size = 11933167 }, - { url = "https://files.pythonhosted.org/packages/04/90/f53971d3ac39d8b68bbaab9a4c6c58c8caa4d5fd3d587d16f5927eeeabe1/mypy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:553c293b1fbdebb6c3c4030589dab9fafb6dfa768995a453d8a5d3b23784af2e", size = 12864341 }, - { url = "https://files.pythonhosted.org/packages/03/d2/8bc0aeaaf2e88c977db41583559319f1821c069e943ada2701e86d0430b7/mypy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fad79bfe3b65fe6a1efaed97b445c3d37f7be9fdc348bdb2d7cac75579607c89", size = 12972991 }, - { url = "https://files.pythonhosted.org/packages/6f/17/07815114b903b49b0f2cf7499f1c130e5aa459411596668267535fe9243c/mypy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fa2220e54d2946e94ab6dbb3ba0a992795bd68b16dc852db33028df2b00191b", size = 9879016 }, - { url = "https://files.pythonhosted.org/packages/ca/1f/186d133ae2514633f8558e78cd658070ba686c0e9275c5a5c24a1e1f0d67/mypy-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3888a1816d69f7ab92092f785a462944b3ca16d7c470d564165fe703b0970c35", size = 11200493 }, - { url = "https://files.pythonhosted.org/packages/af/fc/4842485d034e38a4646cccd1369f6b1ccd7bc86989c52770d75d719a9941/mypy-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46c756a444117c43ee984bd055db99e498bc613a70bbbc120272bd13ca579fbc", size = 10357702 }, - { url = "https://files.pythonhosted.org/packages/b4/e6/457b83f2d701e23869cfec013a48a12638f75b9d37612a9ddf99072c1051/mypy-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:27fc248022907e72abfd8e22ab1f10e903915ff69961174784a3900a8cba9ad9", size = 12091104 }, - { url = "https://files.pythonhosted.org/packages/f1/bf/76a569158db678fee59f4fd30b8e7a0d75bcbaeef49edd882a0d63af6d66/mypy-1.14.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:499d6a72fb7e5de92218db961f1a66d5f11783f9ae549d214617edab5d4dbdbb", size = 12830167 }, - { url = "https://files.pythonhosted.org/packages/43/bc/0bc6b694b3103de9fed61867f1c8bd33336b913d16831431e7cb48ef1c92/mypy-1.14.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57961db9795eb566dc1d1b4e9139ebc4c6b0cb6e7254ecde69d1552bf7613f60", size = 13013834 }, - { url = "https://files.pythonhosted.org/packages/b0/79/5f5ec47849b6df1e6943d5fd8e6632fbfc04b4fd4acfa5a5a9535d11b4e2/mypy-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:07ba89fdcc9451f2ebb02853deb6aaaa3d2239a236669a63ab3801bbf923ef5c", size = 9781231 }, - { url = "https://files.pythonhosted.org/packages/a0/b5/32dd67b69a16d088e533962e5044e51004176a9952419de0370cdaead0f8/mypy-1.14.1-py3-none-any.whl", hash = "sha256:b66a60cc4073aeb8ae00057f9c1f64d49e90f918fbcef9a977eb121da8b8f1d1", size = 2752905 }, -] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/98/a4/1ab47638b92648243faf97a5aeb6ea83059cc3624972ab6b8d2316078d3f/mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782", size = 4433 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2a/e2/5d3f6ada4297caebe1a2add3b126fe800c96f56dbe5d1988a2cbe0b267aa/mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d", size = 4695 }, -] - [[package]] name = "narwhals" version = "1.20.1" @@ -2049,26 +1925,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f7/3f/01c8b82017c199075f8f788d0d906b9ffbbc5a47dc9918a945e13d5a2bda/pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a", size = 1205513 }, ] -[[package]] -name = "pylint" -version = "3.3.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "astroid" }, - { name = "colorama", marker = "sys_platform == 'win32'" }, - { name = "dill" }, - { name = "isort" }, - { name = "mccabe" }, - { name = "platformdirs" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, - { name = "tomlkit" }, - { name = "typing-extensions", marker = "python_full_version < '3.10'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/17/fd/e9a739afac274a39596bbe562e9d966db6f3917fdb2bd7322ffc56da0ba2/pylint-3.3.3.tar.gz", hash = "sha256:07c607523b17e6d16e2ae0d7ef59602e332caa762af64203c24b41c27139f36a", size = 1516550 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/91/e1/26d55acea92b1ea4d33672e48f09ceeb274e84d7d542a4fb9a32a556db46/pylint-3.3.3-py3-none-any.whl", hash = "sha256:26e271a2bc8bce0fc23833805a9076dd9b4d5194e2a02164942cb3cdc37b4183", size = 521918 }, -] - [[package]] name = "pyparsing" version = "3.2.0" @@ -2080,7 +1936,7 @@ wheels = [ [[package]] name = "pytest" -version = "7.4.4" +version = "8.3.4" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, @@ -2090,9 +1946,9 @@ dependencies = [ { name = "pluggy" }, { name = "tomli", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/80/1f/9d8e98e4133ffb16c90f3b405c43e38d3abb715bb5d7a63a5a684f7e46a3/pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280", size = 1357116 } +sdist = { url = "https://files.pythonhosted.org/packages/05/35/30e0d83068951d90a01852cb1cef56e5d8a09d20c7f511634cc2f7e0372a/pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761", size = 1445919 } wheels = [ - { url = "https://files.pythonhosted.org/packages/51/ff/f6e8b8f39e08547faece4bd80f89d5a8de68a38b2d179cc1c4490ffa3286/pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8", size = 325287 }, + { url = "https://files.pythonhosted.org/packages/11/92/76a1c94d3afee238333bc0a42b82935dd8f9cf8ce9e336ff87ee14d9e1cf/pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6", size = 343083 }, ] [[package]] @@ -2449,6 +2305,31 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/93/07/de635108684b7a5bb06e432b0930c5a04b6c59efe73bd966d8db3cc208f2/ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b", size = 118653 }, ] +[[package]] +name = "ruff" +version = "0.8.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/34/37/9c02181ef38d55b77d97c68b78e705fd14c0de0e5d085202bb2b52ce5be9/ruff-0.8.4.tar.gz", hash = "sha256:0d5f89f254836799af1615798caa5f80b7f935d7a670fad66c5007928e57ace8", size = 3402103 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/05/67/f480bf2f2723b2e49af38ed2be75ccdb2798fca7d56279b585c8f553aaab/ruff-0.8.4-py3-none-linux_armv6l.whl", hash = "sha256:58072f0c06080276804c6a4e21a9045a706584a958e644353603d36ca1eb8a60", size = 10546415 }, + { url = "https://files.pythonhosted.org/packages/eb/7a/5aba20312c73f1ce61814e520d1920edf68ca3b9c507bd84d8546a8ecaa8/ruff-0.8.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ffb60904651c00a1e0b8df594591770018a0f04587f7deeb3838344fe3adabac", size = 10346113 }, + { url = "https://files.pythonhosted.org/packages/76/f4/c41de22b3728486f0aa95383a44c42657b2db4062f3234ca36fc8cf52d8b/ruff-0.8.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:6ddf5d654ac0d44389f6bf05cee4caeefc3132a64b58ea46738111d687352296", size = 9943564 }, + { url = "https://files.pythonhosted.org/packages/0e/f0/afa0d2191af495ac82d4cbbfd7a94e3df6f62a04ca412033e073b871fc6d/ruff-0.8.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e248b1f0fa2749edd3350a2a342b67b43a2627434c059a063418e3d375cfe643", size = 10805522 }, + { url = "https://files.pythonhosted.org/packages/12/57/5d1e9a0fd0c228e663894e8e3a8e7063e5ee90f8e8e60cf2085f362bfa1a/ruff-0.8.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bf197b98ed86e417412ee3b6c893f44c8864f816451441483253d5ff22c0e81e", size = 10306763 }, + { url = "https://files.pythonhosted.org/packages/04/df/f069fdb02e408be8aac6853583572a2873f87f866fe8515de65873caf6b8/ruff-0.8.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c41319b85faa3aadd4d30cb1cffdd9ac6b89704ff79f7664b853785b48eccdf3", size = 11359574 }, + { url = "https://files.pythonhosted.org/packages/d3/04/37c27494cd02e4a8315680debfc6dfabcb97e597c07cce0044db1f9dfbe2/ruff-0.8.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:9f8402b7c4f96463f135e936d9ab77b65711fcd5d72e5d67597b543bbb43cf3f", size = 12094851 }, + { url = "https://files.pythonhosted.org/packages/81/b1/c5d7fb68506cab9832d208d03ea4668da9a9887a4a392f4f328b1bf734ad/ruff-0.8.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e4e56b3baa9c23d324ead112a4fdf20db9a3f8f29eeabff1355114dd96014604", size = 11655539 }, + { url = "https://files.pythonhosted.org/packages/ef/38/8f8f2c8898dc8a7a49bc340cf6f00226917f0f5cb489e37075bcb2ce3671/ruff-0.8.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:736272574e97157f7edbbb43b1d046125fce9e7d8d583d5d65d0c9bf2c15addf", size = 12912805 }, + { url = "https://files.pythonhosted.org/packages/06/dd/fa6660c279f4eb320788876d0cff4ea18d9af7d9ed7216d7bd66877468d0/ruff-0.8.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e5fe710ab6061592521f902fca7ebcb9fabd27bc7c57c764298b1c1f15fff720", size = 11205976 }, + { url = "https://files.pythonhosted.org/packages/a8/d7/de94cc89833b5de455750686c17c9e10f4e1ab7ccdc5521b8fe911d1477e/ruff-0.8.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:13e9ec6d6b55f6da412d59953d65d66e760d583dd3c1c72bf1f26435b5bfdbae", size = 10792039 }, + { url = "https://files.pythonhosted.org/packages/6d/15/3e4906559248bdbb74854af684314608297a05b996062c9d72e0ef7c7097/ruff-0.8.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:97d9aefef725348ad77d6db98b726cfdb075a40b936c7984088804dfd38268a7", size = 10400088 }, + { url = "https://files.pythonhosted.org/packages/a2/21/9ed4c0e8133cb4a87a18d470f534ad1a8a66d7bec493bcb8bda2d1a5d5be/ruff-0.8.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ab78e33325a6f5374e04c2ab924a3367d69a0da36f8c9cb6b894a62017506111", size = 10900814 }, + { url = "https://files.pythonhosted.org/packages/0d/5d/122a65a18955bd9da2616b69bc839351f8baf23b2805b543aa2f0aed72b5/ruff-0.8.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:8ef06f66f4a05c3ddbc9121a8b0cecccd92c5bf3dd43b5472ffe40b8ca10f0f8", size = 11268828 }, + { url = "https://files.pythonhosted.org/packages/43/a9/1676ee9106995381e3d34bccac5bb28df70194167337ed4854c20f27c7ba/ruff-0.8.4-py3-none-win32.whl", hash = "sha256:552fb6d861320958ca5e15f28b20a3d071aa83b93caee33a87b471f99a6c0835", size = 8805621 }, + { url = "https://files.pythonhosted.org/packages/10/98/ed6b56a30ee76771c193ff7ceeaf1d2acc98d33a1a27b8479cbdb5c17a23/ruff-0.8.4-py3-none-win_amd64.whl", hash = "sha256:f21a1143776f8656d7f364bd264a9d60f01b7f52243fbe90e7670c0dfe0cf65d", size = 9660086 }, + { url = "https://files.pythonhosted.org/packages/13/9f/026e18ca7d7766783d779dae5e9c656746c6ede36ef73c6d934aaf4a6dec/ruff-0.8.4-py3-none-win_arm64.whl", hash = "sha256:9183dd615d8df50defa8b1d9a074053891ba39025cf5ae88e8bcb52edcc4bf08", size = 9074500 }, +] + [[package]] name = "scipy" version = "1.13.1" @@ -2702,15 +2583,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257 }, ] -[[package]] -name = "tomlkit" -version = "0.13.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b1/09/a439bec5888f00a54b8b9f05fa94d7f901d6735ef4e55dcec9bc37b5d8fa/tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79", size = 192885 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/f9/b6/a447b5e4ec71e13871be01ba81f5dfc9d0af7e473da256ff46bc0e24026f/tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde", size = 37955 }, -] - [[package]] name = "tornado" version = "6.4.2" From 48ab10c03a64d54f03eb6f48ee0d430924156fb1 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Tue, 31 Dec 2024 01:45:16 -0700 Subject: [PATCH 27/46] chore: add config for seeds key --- demo_duckdb/dbt_project.yml | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/demo_duckdb/dbt_project.yml b/demo_duckdb/dbt_project.yml index db138093..6a30ba5a 100644 --- a/demo_duckdb/dbt_project.yml +++ b/demo_duckdb/dbt_project.yml @@ -5,11 +5,11 @@ version: "0.1" profile: "jaffle_shop" -model-paths: [ "models" ] -seed-paths: [ "seeds" ] -test-paths: [ "tests" ] -analysis-paths: [ "analysis" ] -macro-paths: [ "macros" ] +model-paths: ["models"] +seed-paths: ["seeds"] +test-paths: ["tests"] +analysis-paths: ["analysis"] +macro-paths: ["macros"] target-path: "target" clean-targets: @@ -17,7 +17,7 @@ clean-targets: - "dbt_modules" - "logs" -require-dbt-version: [ ">=1.0.0", "<2.0.0" ] +require-dbt-version: [">=1.0.0", "<2.0.0"] models: +dbt-osmosis: schema.yml @@ -25,3 +25,6 @@ models: materialized: table staging: materialized: view + +seeds: + +dbt-osmosis: schema.yml From e971271ea7a6f4a3fca99cbd0b7ec10cec0badce Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Tue, 31 Dec 2024 10:55:36 -0700 Subject: [PATCH 28/46] feat: centralized dry run application in IO funcs, more parity tests updated and enabled --- demo_duckdb/models/schema.yml | 5 +- demo_duckdb/models/staging/schema.yml | 4 +- demo_duckdb/seeds/schema.yml | 34 ++ src/dbt_osmosis/core/osmosis.py | 55 +-- .../test_column_level_knowledge_propagator.py | 446 +++++++++--------- 5 files changed, 293 insertions(+), 251 deletions(-) diff --git a/demo_duckdb/models/schema.yml b/demo_duckdb/models/schema.yml index 7b69aff6..be701013 100644 --- a/demo_duckdb/models/schema.yml +++ b/demo_duckdb/models/schema.yml @@ -30,9 +30,9 @@ models: data_type: DOUBLE description: '' - - name: customer_rank - data_type: VARCHAR + - name: customer_average_value description: '' + data_type: DECIMAL(18,3) - name: orders description: This table has basic information about orders, as well as some derived facts based on payments @@ -59,6 +59,7 @@ models: - accepted_values: values: ['placed', 'shipped', 'completed', 'return_pending', 'returned'] + description: '{{ doc("orders_status") }}' - name: credit_card_amount description: Amount of the order (AUD) paid for by credit card tests: diff --git a/demo_duckdb/models/staging/schema.yml b/demo_duckdb/models/staging/schema.yml index d16bf701..892c720e 100644 --- a/demo_duckdb/models/staging/schema.yml +++ b/demo_duckdb/models/staging/schema.yml @@ -15,9 +15,6 @@ models: - name: last_name data_type: VARCHAR description: '' - - name: rank - data_type: VARCHAR - description: '' - name: stg_orders columns: - name: order_id @@ -37,6 +34,7 @@ models: values: ['placed', 'shipped', 'completed', 'return_pending', 'returned'] data_type: VARCHAR + description: '{{ doc("orders_status") }}' - name: stg_payments columns: - name: payment_id diff --git a/demo_duckdb/seeds/schema.yml b/demo_duckdb/seeds/schema.yml index 907f5b6e..306aef1a 100644 --- a/demo_duckdb/seeds/schema.yml +++ b/demo_duckdb/seeds/schema.yml @@ -2,5 +2,39 @@ version: 2 seeds: - name: raw_orders columns: + - name: id + description: '' + data_type: INTEGER + - name: user_id + description: '' + data_type: INTEGER + - name: order_date + description: '' + data_type: DATE - name: status description: '{{ doc("orders_status") }}' # putting this in to test if unrendered propogation works + - name: raw_payments + columns: + - name: id + description: '' + data_type: INTEGER + - name: order_id + description: '' + data_type: INTEGER + - name: payment_method + description: '' + data_type: VARCHAR + - name: amount + description: '' + data_type: INTEGER + - name: raw_customers + columns: + - name: id + description: '' + data_type: INTEGER + - name: first_name + description: '' + data_type: VARCHAR + - name: last_name + description: '' + data_type: VARCHAR diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index a6639087..46b783c0 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -776,22 +776,24 @@ def _read_yaml(context: YamlRefactorContext, path: Path) -> dict[str, t.Any]: def _write_yaml(context: YamlRefactorContext, path: Path, data: dict[str, t.Any]) -> None: """Write a yaml file to disk and register a mutation with the context. Clears the path from the buffer cache.""" - with context.yaml_handler_lock: - path.parent.mkdir(parents=True, exist_ok=True) - context.yaml_handler.dump(data, path) - if path in _YAML_BUFFER_CACHE: - del _YAML_BUFFER_CACHE[path] + if not context.settings.dry_run: + with context.yaml_handler_lock: + path.parent.mkdir(parents=True, exist_ok=True) + context.yaml_handler.dump(data, path) + if path in _YAML_BUFFER_CACHE: + del _YAML_BUFFER_CACHE[path] context.register_mutations(1) def commit_yamls(context: YamlRefactorContext) -> None: """Commit all files in the yaml buffer cache to disk. Clears the buffer cache and registers mutations.""" - with context.yaml_handler_lock: - for path in list(_YAML_BUFFER_CACHE.keys()): - with path.open("w") as f: - context.yaml_handler.dump(_YAML_BUFFER_CACHE[path], f) - del _YAML_BUFFER_CACHE[path] - context.register_mutations(1) + if not context.settings.dry_run: + with context.yaml_handler_lock: + for path in list(_YAML_BUFFER_CACHE.keys()): + with path.open("w") as f: + context.yaml_handler.dump(_YAML_BUFFER_CACHE[path], f) + del _YAML_BUFFER_CACHE[path] + context.register_mutations(1) def _generate_minimal_model_yaml(node: ModelNode | SeedNode) -> dict[str, t.Any]: @@ -972,8 +974,7 @@ def apply_restructure_plan( else: output_doc[key] = val - if not context.settings.dry_run: - _write_yaml(context, op.file_path, output_doc) + _write_yaml(context, op.file_path, output_doc) for path, nodes in op.superseded_paths.items(): if path.is_file(): @@ -994,11 +995,10 @@ def apply_restructure_plan( path.parent.rmdir() if path in _YAML_BUFFER_CACHE: del _YAML_BUFFER_CACHE[path] - context.register_mutations(1) + context.register_mutations(1) logger.info(f"Superseded entire file {path}") else: - if not context.settings.dry_run: - _write_yaml(context, path, existing_data) + _write_yaml(context, path, existing_data) logger.info(f"Migrated doc from {path} -> {op.file_path}") _ = commit_yamls(context), reload_manifest(context.project) @@ -1065,12 +1065,11 @@ def _get_member_yaml(context: YamlRefactorContext, member: ResultNode) -> dict[s return None -def _build_column_knowledge_grap( +def _build_column_knowledge_graph( context: YamlRefactorContext, node: ResultNode ) -> dict[str, dict[str, t.Any]]: """Generate a column knowledge graph for a dbt model or source node.""" tree = _build_node_ancestor_tree(context.project.manifest, node) - _ = tree.pop("generation_0") column_knowledge_graph: dict[str, dict[str, t.Any]] = {} for generation in reversed(sorted(tree.keys())): @@ -1086,7 +1085,7 @@ def _build_column_knowledge_grap( graph_node = column_knowledge_graph.setdefault(name, {}) if context.settings.add_progenitor_to_meta: graph_node.setdefault("meta", {}).setdefault( - "osmosis_progenitor", ancestor.name + "osmosis_progenitor", ancestor.unique_id ) graph_edge = metadata.to_dict() @@ -1106,12 +1105,12 @@ def _build_column_knowledge_grap( graph_edge["description"] = unrendered_description current_tags = graph_node.get("tags", []) - if incoming_tags := (set(graph_edge.pop("tags", [])) | set(current_tags)): - graph_edge["tags"] = list(incoming_tags) + if merged_tags := (set(graph_edge.pop("tags", [])) | set(current_tags)): + graph_edge["tags"] = list(merged_tags) current_meta = graph_node.get("meta", {}) - if incoming_meta := {**current_meta, **graph_edge.pop("meta", {})}: - graph_edge["meta"] = incoming_meta + if merged_meta := {**current_meta, **graph_edge.pop("meta", {})}: + graph_edge["meta"] = merged_meta for inheritable in context.settings.add_inheritance_for_specified_keys: current_val = graph_node.get(inheritable) @@ -1152,13 +1151,13 @@ def inherit_upstream_column_knowledge( inheritable.append(extra) yaml_section = _get_member_yaml(context, node) - column_knowledge_graph = _build_column_knowledge_grap(context, node) + column_knowledge_graph = _build_column_knowledge_graph(context, node) kwargs = None for name, node_column in node.columns.items(): variants: list[str] = [name] pm = get_plugin_manager() for v in pm.hook.get_candidates(name=name, node=node, context=context.project): - variants.extend(v) + variants.extend(t.cast(list[str], v)) for variant in variants: kwargs = column_knowledge_graph.get(variant) if kwargs is not None: @@ -1176,12 +1175,6 @@ def inherit_upstream_column_knowledge( column["name"], context.project.config.credentials.type ) if yaml_name == name: - if updated_metadata.get("tags") == []: - del updated_metadata["tags"] - if updated_metadata.get("meta") == {}: - del updated_metadata["meta"] - if updated_metadata.get("description", EMPTY_STRING) in context.placeholders: - _ = updated_metadata.pop("description", None) column.update(**updated_metadata) diff --git a/tests/test_column_level_knowledge_propagator.py b/tests/test_column_level_knowledge_propagator.py index 53ca5fb5..683f8cae 100644 --- a/tests/test_column_level_knowledge_propagator.py +++ b/tests/test_column_level_knowledge_propagator.py @@ -1,218 +1,234 @@ -# TODO: refactor this test -# import json -# from pathlib import Path -# -# import dbt.version -# import pytest -# from dbt.contracts.graph.manifest import Manifest -# from packaging.version import Version -# -# from dbt_osmosis.core.column_level_knowledge_propagator import ( -# ColumnLevelKnowledgePropagator, -# _build_node_ancestor_tree, -# _inherit_column_level_knowledge, -# ) -# -# dbt_version = Version(dbt.version.get_installed_version().to_version_string(skip_matcher=True)) -# -# -# def load_manifest() -> Manifest: -# manifest_path = Path(__file__).parent.parent / "demo_duckdb/target/manifest.json" -# with manifest_path.open("r") as f: -# manifest_text = f.read() -# manifest_dict = json.loads(manifest_text) -# return Manifest.from_dict(manifest_dict) -# -# -# def test_build_node_ancestor_tree(): -# manifest = load_manifest() -# target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] -# expect = { -# "generation_0": [ -# "model.jaffle_shop_duckdb.stg_customers", -# "model.jaffle_shop_duckdb.stg_orders", -# "model.jaffle_shop_duckdb.stg_payments", -# ], -# "generation_1": [ -# "seed.jaffle_shop_duckdb.raw_customers", -# "seed.jaffle_shop_duckdb.raw_orders", -# "seed.jaffle_shop_duckdb.raw_payments", -# ], -# } -# assert _build_node_ancestor_tree(manifest, target_node) == expect -# -# -# def test_inherit_column_level_knowledge(): -# manifest = load_manifest() -# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ -# "customer_id" -# ].description = "THIS COLUMN IS UPDATED FOR TESTING" -# manifest.nodes["seed.jaffle_shop_duckdb.raw_orders"].columns[ -# "status" -# ].description = "THIS COLUMN IS UPDATED FOR TESTING" -# -# expect = { -# "customer_id": { -# "progenitor": "model.jaffle_shop_duckdb.stg_customers", -# "generation": "generation_0", -# "name": "customer_id", -# "description": "THIS COLUMN IS UPDATED FOR TESTING", -# "data_type": "INTEGER", -# "constraints": [], -# "quote": None, -# }, -# "first_name": { -# "progenitor": "model.jaffle_shop_duckdb.stg_customers", -# "generation": "generation_0", -# "name": "first_name", -# "data_type": "VARCHAR", -# "constraints": [], -# "quote": None, -# }, -# "last_name": { -# "progenitor": "model.jaffle_shop_duckdb.stg_customers", -# "generation": "generation_0", -# "name": "last_name", -# "data_type": "VARCHAR", -# "constraints": [], -# "quote": None, -# }, -# "rank": { -# "progenitor": "model.jaffle_shop_duckdb.stg_customers", -# "generation": "generation_0", -# "name": "rank", -# "data_type": "VARCHAR", -# "constraints": [], -# "quote": None, -# }, -# "order_id": { -# "progenitor": "model.jaffle_shop_duckdb.stg_orders", -# "generation": "generation_0", -# "name": "order_id", -# "data_type": "INTEGER", -# "constraints": [], -# "quote": None, -# }, -# "order_date": { -# "progenitor": "model.jaffle_shop_duckdb.stg_orders", -# "generation": "generation_0", -# "name": "order_date", -# "data_type": "DATE", -# "constraints": [], -# "quote": None, -# }, -# "status": { -# "progenitor": "seed.jaffle_shop_duckdb.raw_orders", -# "generation": "generation_1", -# "name": "status", -# "description": "THIS COLUMN IS UPDATED FOR TESTING", -# "data_type": "VARCHAR", -# "constraints": [], -# "quote": None, -# }, -# "payment_id": { -# "progenitor": "model.jaffle_shop_duckdb.stg_payments", -# "generation": "generation_0", -# "name": "payment_id", -# "data_type": "INTEGER", -# "constraints": [], -# "quote": None, -# }, -# "payment_method": { -# "progenitor": "model.jaffle_shop_duckdb.stg_payments", -# "generation": "generation_0", -# "name": "payment_method", -# "data_type": "VARCHAR", -# "constraints": [], -# "quote": None, -# }, -# "amount": { -# "progenitor": "model.jaffle_shop_duckdb.stg_payments", -# "generation": "generation_0", -# "name": "amount", -# "data_type": "DOUBLE", -# "constraints": [], -# "quote": None, -# }, -# } -# if dbt_version >= Version("1.9.0"): -# for key in expect.keys(): -# expect[key]["granularity"] = None -# -# target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] -# family_tree = _build_node_ancestor_tree(manifest, target_node) -# placeholders = [""] -# assert _inherit_column_level_knowledge(manifest, family_tree, placeholders) == expect -# -# -# def test_update_undocumented_columns_with_prior_knowledge(): -# manifest = load_manifest() -# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ -# "customer_id" -# ].description = "THIS COLUMN IS UPDATED FOR TESTING" -# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].meta = { -# "my_key": "my_value" -# } -# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].tags = [ -# "my_tag1", -# "my_tag2", -# ] -# -# target_node_name = "model.jaffle_shop_duckdb.customers" -# manifest.nodes[target_node_name].columns["customer_id"].tags = set( -# [ -# "my_tag3", -# "my_tag4", -# ] -# ) -# manifest.nodes[target_node_name].columns["customer_id"].meta = { -# "my_key": "my_old_value", -# "my_new_key": "my_new_value", -# } -# target_node = manifest.nodes[target_node_name] -# knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( -# manifest, target_node, placeholders=[""] -# ) -# yaml_file_model_section = { -# "columns": [ -# { -# "name": "customer_id", -# } -# ] -# } -# undocumented_columns = target_node.columns.keys() -# ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( -# undocumented_columns, -# target_node, -# yaml_file_model_section, -# knowledge, -# skip_add_tags=False, -# skip_merge_meta=False, -# add_progenitor_to_meta=False, -# ) -# -# assert yaml_file_model_section["columns"][0]["name"] == "customer_id" -# assert ( -# yaml_file_model_section["columns"][0]["description"] == "THIS COLUMN IS UPDATED FOR TESTING" -# ) -# assert yaml_file_model_section["columns"][0]["meta"] == { -# "my_key": "my_value", -# "my_new_key": "my_new_value", -# } -# assert set(yaml_file_model_section["columns"][0]["tags"]) == set( -# ["my_tag1", "my_tag2", "my_tag3", "my_tag4"] -# ) -# -# assert target_node.columns["customer_id"].description == "THIS COLUMN IS UPDATED FOR TESTING" -# assert target_node.columns["customer_id"].meta == { -# "my_key": "my_value", -# "my_new_key": "my_new_value", -# } -# assert set(target_node.columns["customer_id"].tags) == set( -# ["my_tag1", "my_tag2", "my_tag3", "my_tag4"] -# ) -# -# +# pyright: reportAny=false, reportUnknownMemberType=false, reportPrivateUsage=false +import json +import typing as t +from pathlib import Path +from unittest import mock + +import dbt.version +import pytest +from dbt.contracts.graph.manifest import Manifest +from packaging.version import Version + +from dbt_osmosis.core.osmosis import ( + DbtConfiguration, + YamlRefactorContext, + YamlRefactorSettings, + _build_column_knowledge_graph, + _build_node_ancestor_tree, + _get_member_yaml, + create_dbt_project_context, + inherit_upstream_column_knowledge, +) + +dbt_version = Version(dbt.version.get_installed_version().to_version_string(skip_matcher=True)) + + +@pytest.fixture(scope="function") +def yaml_context() -> YamlRefactorContext: + # initializing the context is a sanity test in and of itself + c = DbtConfiguration(project_dir="demo_duckdb", profiles_dir="demo_duckdb") + c.vars = {"dbt-osmosis": {}} + project = create_dbt_project_context(c) + context = YamlRefactorContext( + project, + settings=YamlRefactorSettings( + dry_run=True, + ), + ) + return context + + +def load_manifest() -> Manifest: + manifest_path = Path(__file__).parent.parent / "demo_duckdb/target/manifest.json" + with manifest_path.open("r") as f: + manifest_text = f.read() + manifest_dict = json.loads(manifest_text) + return Manifest.from_dict(manifest_dict) + + +def test_build_node_ancestor_tree(): + manifest = load_manifest() + target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] + expect = { + "generation_0": ["model.jaffle_shop_duckdb.customers"], + "generation_1": [ + "model.jaffle_shop_duckdb.stg_customers", + "model.jaffle_shop_duckdb.stg_orders", + "model.jaffle_shop_duckdb.stg_payments", + ], + "generation_2": [ + "seed.jaffle_shop_duckdb.raw_customers", + "seed.jaffle_shop_duckdb.raw_orders", + "seed.jaffle_shop_duckdb.raw_payments", + ], + } + assert _build_node_ancestor_tree(manifest, target_node) == expect + + +def test_inherit_upstream_column_knowledge(yaml_context: YamlRefactorContext): + manifest = yaml_context.project.manifest + manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ + "customer_id" + ].description = "THIS COLUMN IS UPDATED FOR TESTING" + + expect: dict[str, t.Any] = { + "customer_id": { + "name": "customer_id", + "description": "THIS COLUMN IS UPDATED FOR TESTING", + "meta": {"osmosis_progenitor": "model.jaffle_shop_duckdb.stg_customers"}, + "data_type": None, + "constraints": [], + "quote": None, + "tags": [], + "granularity": None, + }, + "first_name": { + "name": "first_name", + "description": "Customer's first name. PII.", + "meta": {"osmosis_progenitor": "seed.jaffle_shop_duckdb.raw_customers"}, + "data_type": None, + "constraints": [], + "quote": None, + "tags": [], + "granularity": None, + }, + "last_name": { + "name": "last_name", + "description": "Customer's last name. PII.", + "meta": {"osmosis_progenitor": "seed.jaffle_shop_duckdb.raw_customers"}, + "data_type": None, + "constraints": [], + "quote": None, + "tags": [], + "granularity": None, + }, + "first_order": { + "name": "first_order", + "description": "Date (UTC) of a customer's first order", + "meta": {"osmosis_progenitor": "model.jaffle_shop_duckdb.customers"}, + "data_type": None, + "constraints": [], + "quote": None, + "tags": [], + "granularity": None, + }, + "most_recent_order": { + "name": "most_recent_order", + "description": "Date (UTC) of a customer's most recent order", + "meta": {"osmosis_progenitor": "model.jaffle_shop_duckdb.customers"}, + "data_type": None, + "constraints": [], + "quote": None, + "tags": [], + "granularity": None, + }, + "number_of_orders": { + "name": "number_of_orders", + "description": "Count of the number of orders a customer has placed", + "meta": {"osmosis_progenitor": "model.jaffle_shop_duckdb.customers"}, + "data_type": None, + "constraints": [], + "quote": None, + "tags": [], + "granularity": None, + }, + "customer_lifetime_value": { + "name": "customer_lifetime_value", + "description": "", + "meta": {"osmosis_progenitor": "model.jaffle_shop_duckdb.customers"}, + "data_type": "DOUBLE", + "constraints": [], + "quote": None, + "tags": [], + "granularity": None, + }, + "customer_average_value": { + "name": "customer_average_value", + "description": "", + "meta": {"osmosis_progenitor": "model.jaffle_shop_duckdb.customers"}, + "data_type": "DECIMAL(18,3)", + "constraints": [], + "quote": None, + "tags": [], + "granularity": None, + }, + } + if dbt_version >= Version("1.9.0"): + for column in expect.keys(): + expect[column]["granularity"] = None + + target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] + # NOTE: we will only update empty / placeholders descriptions by design + target_node.columns["customer_id"].description = "" + + yaml_context.placeholders = ("",) + yaml_context.settings.add_progenitor_to_meta = True + with ( + mock.patch("dbt_osmosis.core.osmosis._YAML_BUFFER_CACHE", {}), + mock.patch("dbt_osmosis.core.osmosis._COLUMN_LIST_CACHE", {}), + ): + inherit_upstream_column_knowledge(yaml_context, target_node) + assert {k: v.to_dict() for k, v in target_node.columns.items()} == expect + + +def test_inherit_upstream_column_knowledge_with_mutations(yaml_context: YamlRefactorContext): + yaml_context.settings.skip_add_tags = False + yaml_context.settings.skip_merge_meta = False + + manifest = yaml_context.project.manifest + stg_customer_columns = manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns + stg_customer_columns["customer_id"].description = "THIS COLUMN IS UPDATED FOR TESTING" + stg_customer_columns["customer_id"].meta = {"my_key": "my_value"} + stg_customer_columns["customer_id"].tags = ["my_tag1", "my_tag2"] + + target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] + target_node_columns = target_node.columns + target_node_columns["customer_id"].description = "" + target_node_columns["customer_id"].tags = ["my_tag3", "my_tag4"] + target_node_columns["customer_id"].meta = { + "my_key": "my_local_value", + "my_new_key": "my_new_value", + } + + with ( + mock.patch("dbt_osmosis.core.osmosis._YAML_BUFFER_CACHE", {}), + mock.patch("dbt_osmosis.core.osmosis._COLUMN_LIST_CACHE", {}), + ): + inherit_upstream_column_knowledge(yaml_context, target_node) + yaml_file_model_section = _get_member_yaml(yaml_context, target_node) + + assert target_node_columns["customer_id"].description == "THIS COLUMN IS UPDATED FOR TESTING" + assert ( + target_node_columns["customer_id"].meta + == { + "my_key": "my_local_value", # NOTE: keys on the node itself always take precedence, hence it was not overridden + "my_new_key": "my_new_value", + } + ) + assert sorted(target_node_columns["customer_id"].tags) == [ + "my_tag1", + "my_tag2", + "my_tag3", + "my_tag4", + ] + + assert yaml_file_model_section + assert yaml_file_model_section["columns"][0]["name"] == "customer_id" + assert ( + yaml_file_model_section["columns"][0]["description"] == "THIS COLUMN IS UPDATED FOR TESTING" + ) + assert yaml_file_model_section["columns"][0]["meta"] == { + "my_key": "my_local_value", + "my_new_key": "my_new_value", + } + assert sorted(yaml_file_model_section["columns"][0]["tags"]) == [ + "my_tag1", + "my_tag2", + "my_tag3", + "my_tag4", + ] + + # def test_update_undocumented_columns_with_prior_knowledge_skip_add_tags(): # manifest = load_manifest() # manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ From 77c99af82d9debc0c7dfc5ce1d43fa3b61ffd4cc Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Tue, 31 Dec 2024 11:21:04 -0700 Subject: [PATCH 29/46] feat: more tests reenabled showing parity but also small changes which are more correct in current form --- .../test_column_level_knowledge_propagator.py | 225 +++++++++--------- 1 file changed, 116 insertions(+), 109 deletions(-) diff --git a/tests/test_column_level_knowledge_propagator.py b/tests/test_column_level_knowledge_propagator.py index 683f8cae..ab1de4de 100644 --- a/tests/test_column_level_knowledge_propagator.py +++ b/tests/test_column_level_knowledge_propagator.py @@ -176,19 +176,20 @@ def test_inherit_upstream_column_knowledge_with_mutations(yaml_context: YamlRefa yaml_context.settings.skip_merge_meta = False manifest = yaml_context.project.manifest - stg_customer_columns = manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns - stg_customer_columns["customer_id"].description = "THIS COLUMN IS UPDATED FOR TESTING" - stg_customer_columns["customer_id"].meta = {"my_key": "my_value"} - stg_customer_columns["customer_id"].tags = ["my_tag1", "my_tag2"] + customer_id_column = manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ + "customer_id" + ] + customer_id_column.description = "THIS COLUMN IS UPDATED FOR TESTING" + customer_id_column.meta = {"my_key": "my_value"} + customer_id_column.tags = ["my_tag1", "my_tag2"] target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] - target_node_columns = target_node.columns - target_node_columns["customer_id"].description = "" - target_node_columns["customer_id"].tags = ["my_tag3", "my_tag4"] - target_node_columns["customer_id"].meta = { - "my_key": "my_local_value", - "my_new_key": "my_new_value", - } + target_node_customer_id = target_node.columns["customer_id"] + target_node_customer_id.description = ( + "" # NOTE: allow inheritance to update this, otherwise a valid description would be skipped + ) + target_node_customer_id.tags = ["my_tag3", "my_tag4"] + target_node_customer_id.meta = {"my_key": "my_local_value", "my_new_key": "my_new_value"} with ( mock.patch("dbt_osmosis.core.osmosis._YAML_BUFFER_CACHE", {}), @@ -197,15 +198,16 @@ def test_inherit_upstream_column_knowledge_with_mutations(yaml_context: YamlRefa inherit_upstream_column_knowledge(yaml_context, target_node) yaml_file_model_section = _get_member_yaml(yaml_context, target_node) - assert target_node_columns["customer_id"].description == "THIS COLUMN IS UPDATED FOR TESTING" + target_node_customer_id = target_node.columns["customer_id"] + assert target_node_customer_id.description == "THIS COLUMN IS UPDATED FOR TESTING" assert ( - target_node_columns["customer_id"].meta + target_node_customer_id.meta == { - "my_key": "my_local_value", # NOTE: keys on the node itself always take precedence, hence it was not overridden + "my_key": "my_local_value", # NOTE: keys on the node itself always take precedence, hence `my_key` was not overridden "my_new_key": "my_new_value", } ) - assert sorted(target_node_columns["customer_id"].tags) == [ + assert sorted(target_node_customer_id.tags) == [ "my_tag1", "my_tag2", "my_tag3", @@ -229,100 +231,105 @@ def test_inherit_upstream_column_knowledge_with_mutations(yaml_context: YamlRefa ] -# def test_update_undocumented_columns_with_prior_knowledge_skip_add_tags(): -# manifest = load_manifest() -# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ -# "customer_id" -# ].description = "THIS COLUMN IS UPDATED FOR TESTING" -# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].meta = { -# "my_key": "my_value" -# } -# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].tags = [ -# "my_tag1", -# "my_tag2", -# ] -# -# target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] -# knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( -# manifest, target_node, placeholders=[""] -# ) -# yaml_file_model_section = { -# "columns": [ -# { -# "name": "customer_id", -# } -# ] -# } -# undocumented_columns = target_node.columns.keys() -# ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( -# undocumented_columns, -# target_node, -# yaml_file_model_section, -# knowledge, -# skip_add_tags=True, -# skip_merge_meta=False, -# add_progenitor_to_meta=False, -# ) -# -# assert yaml_file_model_section["columns"][0]["name"] == "customer_id" -# assert ( -# yaml_file_model_section["columns"][0]["description"] == "THIS COLUMN IS UPDATED FOR TESTING" -# ) -# assert yaml_file_model_section["columns"][0]["meta"] == {"my_key": "my_value"} -# assert "tags" not in yaml_file_model_section["columns"][0] -# -# assert target_node.columns["customer_id"].description == "THIS COLUMN IS UPDATED FOR TESTING" -# assert target_node.columns["customer_id"].meta == {"my_key": "my_value"} -# assert set(target_node.columns["customer_id"].tags) == set([]) -# -# -# def test_update_undocumented_columns_with_prior_knowledge_skip_merge_meta(): -# manifest = load_manifest() -# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ -# "customer_id" -# ].description = "THIS COLUMN IS UPDATED FOR TESTING" -# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].meta = { -# "my_key": "my_value" -# } -# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].tags = [ -# "my_tag1", -# "my_tag2", -# ] -# -# target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] -# knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( -# manifest, target_node, placeholders=[""] -# ) -# yaml_file_model_section = { -# "columns": [ -# { -# "name": "customer_id", -# } -# ] -# } -# undocumented_columns = target_node.columns.keys() -# ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( -# undocumented_columns, -# target_node, -# yaml_file_model_section, -# knowledge, -# skip_add_tags=False, -# skip_merge_meta=True, -# add_progenitor_to_meta=False, -# ) -# -# assert yaml_file_model_section["columns"][0]["name"] == "customer_id" -# assert ( -# yaml_file_model_section["columns"][0]["description"] == "THIS COLUMN IS UPDATED FOR TESTING" -# ) -# assert "meta" not in yaml_file_model_section["columns"][0] -# assert set(yaml_file_model_section["columns"][0]["tags"]) == set(["my_tag1", "my_tag2"]) -# -# assert target_node.columns["customer_id"].description == "THIS COLUMN IS UPDATED FOR TESTING" -# assert target_node.columns["customer_id"].meta == {} -# assert set(target_node.columns["customer_id"].tags) == set(["my_tag1", "my_tag2"]) -# -# +def test_inherit_upstream_column_knowledge_skip_add_tags(yaml_context: YamlRefactorContext): + yaml_context.settings.skip_add_tags = True + yaml_context.settings.skip_merge_meta = False + + manifest = yaml_context.project.manifest + customer_id_column = manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ + "customer_id" + ] + customer_id_column.description = "THIS COLUMN IS UPDATED FOR TESTING" + customer_id_column.meta = {"my_key": "my_value"} + customer_id_column.tags = ["my_tag1", "my_tag2"] + + target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] + target_node_customer_id = target_node.columns["customer_id"] + target_node_customer_id.description = "" # NOTE: allow inheritance to update this + target_node_customer_id.tags = ["my_tag3", "my_tag4"] + target_node_customer_id.meta = {"my_key": "my_value"} + + with ( + mock.patch("dbt_osmosis.core.osmosis._YAML_BUFFER_CACHE", {}), + mock.patch("dbt_osmosis.core.osmosis._COLUMN_LIST_CACHE", {}), + ): + inherit_upstream_column_knowledge(yaml_context, target_node) + yaml_file_model_section = _get_member_yaml(yaml_context, target_node) + + target_node_customer_id = target_node.columns["customer_id"] + assert target_node_customer_id.description == "THIS COLUMN IS UPDATED FOR TESTING" + assert target_node_customer_id.meta == {"my_key": "my_value"} + assert ( + sorted(target_node_customer_id.tags) == ["my_tag3", "my_tag4"] + ) # NOTE: nodes tags are not mutated beyond our original mutation in the manifest node since skip_add_tags is True + + assert yaml_file_model_section + assert yaml_file_model_section["columns"][0]["name"] == "customer_id" + assert ( + yaml_file_model_section["columns"][0]["description"] == "THIS COLUMN IS UPDATED FOR TESTING" + ) + assert yaml_file_model_section["columns"][0]["meta"] == {"my_key": "my_value"} + # TODO: consider a function which synchronizes a node with its yaml buffer, and then consider if inherit_upstream_column_knowledge should sync nodes + # in which case it would pick up manual mutations to the node and apply them to the yaml buffer (which could be useful I think) + assert ( + yaml_file_model_section["columns"][0].get("tags", []) == [] + ) # NOTE: yaml tags do not exist in buffer because we added them artificially to the node and skip_add_tags is True + + +def test_update_undocumented_columns_with_prior_knowledge_skip_merge_meta( + yaml_context: YamlRefactorContext, +): + yaml_context.settings.skip_add_tags = False + yaml_context.settings.skip_merge_meta = True + + manifest = yaml_context.project.manifest + stg_customer_columns = manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns + stg_customer_columns["customer_id"].description = "THIS COLUMN IS UPDATED FOR TESTING" + stg_customer_columns["customer_id"].meta = {"my_upstream_key": "my_upstream_value"} + stg_customer_columns["customer_id"].tags = ["my_tag1", "my_tag2"] + + target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] + target_node_columns = target_node.columns + target_node_columns["customer_id"].description = "" # NOTE: allow inheritance to update this + target_node_columns["customer_id"].tags = ["my_tag3", "my_tag4"] + target_node_columns["customer_id"].meta = {"my_key": "my_value"} + + with ( + mock.patch("dbt_osmosis.core.osmosis._YAML_BUFFER_CACHE", {}), + mock.patch("dbt_osmosis.core.osmosis._COLUMN_LIST_CACHE", {}), + ): + inherit_upstream_column_knowledge(yaml_context, target_node) + yaml_file_model_section = _get_member_yaml(yaml_context, target_node) + + assert target_node_columns["customer_id"].description == "THIS COLUMN IS UPDATED FOR TESTING" + assert ( + target_node_columns["customer_id"].meta == {"my_key": "my_value"} + ) # NOTE: nodes meta is not mutated beyond our original mutation in the manifest node since skip_merge_tags is True + assert sorted(target_node_columns["customer_id"].tags) == [ + "my_tag1", + "my_tag2", + "my_tag3", + "my_tag4", + ] + + assert yaml_file_model_section + assert yaml_file_model_section["columns"][0]["name"] == "customer_id" + assert ( + yaml_file_model_section["columns"][0]["description"] == "THIS COLUMN IS UPDATED FOR TESTING" + ) + # TODO: consider a function which synchronizes a node with its yaml buffer, and then consider if inherit_upstream_column_knowledge should sync nodes + # in which case it would pick up manual mutations to the node and apply them to the yaml buffer (which could be useful I think) + assert ( + yaml_file_model_section["columns"][0].get("meta", {}) == {} + ) # NOTE: yaml meta does not exist in buffer because we added it artificially to the node and skip_merge_meta is True + assert sorted(yaml_file_model_section["columns"][0]["tags"]) == [ + "my_tag1", + "my_tag2", + "my_tag3", + "my_tag4", + ] + + # def test_update_undocumented_columns_with_prior_knowledge_add_progenitor_to_meta(): # manifest = load_manifest() # manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ From 7647f55de2225b4070168adc44f91ce88581399f Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Tue, 31 Dec 2024 11:43:14 -0700 Subject: [PATCH 30/46] feat: add a force inherit description opt to have a fallback to legacy behavior --- src/dbt_osmosis/core/osmosis.py | 22 ++++++++++++++++--- .../test_column_level_knowledge_propagator.py | 9 ++------ 2 files changed, 21 insertions(+), 10 deletions(-) diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index 46b783c0..fed19bc8 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -306,19 +306,35 @@ class YamlRefactorSettings: """Settings for yaml based refactoring operations.""" fqn: str | None = None + """Filter models to action via a fully qualified name match.""" models: list[str] = field(default_factory=list) + """Filter models to action via a file path match.""" dry_run: bool = False + """Do not write changes to disk.""" catalog_file: str | None = None + """Path to the dbt catalog.json file to use preferentially instead of live warehouse introspection""" skip_add_columns: bool = False + """Skip adding missing columns in the yaml files.""" skip_add_tags: bool = False + """Skip appending upstream tags in the yaml files.""" skip_add_data_types: bool = False + """Skip adding data types in the yaml files.""" numeric_precision: bool = False + """Include numeric precision in the data type.""" char_length: bool = False + """Include character length in the data type.""" skip_merge_meta: bool = False + """Skip merging upstream meta fields in the yaml files.""" add_progenitor_to_meta: bool = False + """Add a custom progenitor field to the meta section indicating a column's origin.""" use_unrendered_descriptions: bool = False + """Use unrendered descriptions preserving things like {{ doc(...) }} which are otherwise pre-rendered in the manifest object""" add_inheritance_for_specified_keys: list[str] = field(default_factory=list) + """Include additional keys in the inheritance process.""" output_to_lower: bool = False + """Force column name and data type output to lowercase in the yaml files.""" + force_inherit_descriptions: bool = False + """Force inheritance of descriptions from upstream models, even if node has a valid description.""" @dataclass @@ -1117,7 +1133,9 @@ def _build_column_knowledge_graph( if incoming_val := graph_edge.pop(inheritable, current_val): graph_edge[inheritable] = incoming_val - if graph_edge.get("description", EMPTY_STRING) in context.placeholders: + if graph_edge.get("description", EMPTY_STRING) in context.placeholders or ( + generation == "generation_0" and context.settings.force_inherit_descriptions + ): _ = graph_edge.pop("description", None) if graph_edge.get("tags") == []: del graph_edge["tags"] @@ -1210,8 +1228,6 @@ def remove_columns_not_in_database( context: YamlRefactorContext, node: ResultNode | None = None ) -> None: """Remove columns from a dbt node and it's corresponding yaml section that are not present in the database. Changes are implicitly buffered until commit_yamls is called.""" - if context.settings.skip_add_columns: - return if node is None: for _, node in filter_models(context): remove_columns_not_in_database(context, node) diff --git a/tests/test_column_level_knowledge_propagator.py b/tests/test_column_level_knowledge_propagator.py index ab1de4de..9440a004 100644 --- a/tests/test_column_level_knowledge_propagator.py +++ b/tests/test_column_level_knowledge_propagator.py @@ -13,7 +13,6 @@ DbtConfiguration, YamlRefactorContext, YamlRefactorSettings, - _build_column_knowledge_graph, _build_node_ancestor_tree, _get_member_yaml, create_dbt_project_context, @@ -158,7 +157,7 @@ def test_inherit_upstream_column_knowledge(yaml_context: YamlRefactorContext): expect[column]["granularity"] = None target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] - # NOTE: we will only update empty / placeholders descriptions by design + # NOTE: we will only update empty / placeholders descriptions by design, see force_inherit_descriptions for legacy behavior target_node.columns["customer_id"].description = "" yaml_context.placeholders = ("",) @@ -172,8 +171,7 @@ def test_inherit_upstream_column_knowledge(yaml_context: YamlRefactorContext): def test_inherit_upstream_column_knowledge_with_mutations(yaml_context: YamlRefactorContext): - yaml_context.settings.skip_add_tags = False - yaml_context.settings.skip_merge_meta = False + yaml_context.settings.force_inherit_descriptions = True # NOTE: matches legacy behavior manifest = yaml_context.project.manifest customer_id_column = manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ @@ -185,9 +183,6 @@ def test_inherit_upstream_column_knowledge_with_mutations(yaml_context: YamlRefa target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] target_node_customer_id = target_node.columns["customer_id"] - target_node_customer_id.description = ( - "" # NOTE: allow inheritance to update this, otherwise a valid description would be skipped - ) target_node_customer_id.tags = ["my_tag3", "my_tag4"] target_node_customer_id.meta = {"my_key": "my_local_value", "my_new_key": "my_new_value"} From ab507b524e70a613eb44e93f1248d60cbc914599 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Tue, 31 Dec 2024 12:02:13 -0700 Subject: [PATCH 31/46] feat: more tests reenabled --- .../test_column_level_knowledge_propagator.py | 507 +++++++++--------- 1 file changed, 244 insertions(+), 263 deletions(-) diff --git a/tests/test_column_level_knowledge_propagator.py b/tests/test_column_level_knowledge_propagator.py index 9440a004..cc18c071 100644 --- a/tests/test_column_level_knowledge_propagator.py +++ b/tests/test_column_level_knowledge_propagator.py @@ -162,11 +162,14 @@ def test_inherit_upstream_column_knowledge(yaml_context: YamlRefactorContext): yaml_context.placeholders = ("",) yaml_context.settings.add_progenitor_to_meta = True + + # Perform inheritance on the node with ( mock.patch("dbt_osmosis.core.osmosis._YAML_BUFFER_CACHE", {}), mock.patch("dbt_osmosis.core.osmosis._COLUMN_LIST_CACHE", {}), ): inherit_upstream_column_knowledge(yaml_context, target_node) + assert {k: v.to_dict() for k, v in target_node.columns.items()} == expect @@ -186,6 +189,7 @@ def test_inherit_upstream_column_knowledge_with_mutations(yaml_context: YamlRefa target_node_customer_id.tags = ["my_tag3", "my_tag4"] target_node_customer_id.meta = {"my_key": "my_local_value", "my_new_key": "my_new_value"} + # Perform inheritance on the node with ( mock.patch("dbt_osmosis.core.osmosis._YAML_BUFFER_CACHE", {}), mock.patch("dbt_osmosis.core.osmosis._COLUMN_LIST_CACHE", {}), @@ -244,6 +248,7 @@ def test_inherit_upstream_column_knowledge_skip_add_tags(yaml_context: YamlRefac target_node_customer_id.tags = ["my_tag3", "my_tag4"] target_node_customer_id.meta = {"my_key": "my_value"} + # Perform inheritance on the node with ( mock.patch("dbt_osmosis.core.osmosis._YAML_BUFFER_CACHE", {}), mock.patch("dbt_osmosis.core.osmosis._COLUMN_LIST_CACHE", {}), @@ -289,6 +294,7 @@ def test_update_undocumented_columns_with_prior_knowledge_skip_merge_meta( target_node_columns["customer_id"].tags = ["my_tag3", "my_tag4"] target_node_columns["customer_id"].meta = {"my_key": "my_value"} + # Perform inheritance on the node with ( mock.patch("dbt_osmosis.core.osmosis._YAML_BUFFER_CACHE", {}), mock.patch("dbt_osmosis.core.osmosis._COLUMN_LIST_CACHE", {}), @@ -325,269 +331,244 @@ def test_update_undocumented_columns_with_prior_knowledge_skip_merge_meta( ] -# def test_update_undocumented_columns_with_prior_knowledge_add_progenitor_to_meta(): -# manifest = load_manifest() -# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ -# "customer_id" -# ].description = "THIS COLUMN IS UPDATED FOR TESTING" -# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].meta = { -# "my_key": "my_value" -# } -# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].tags = [ -# "my_tag1", -# "my_tag2", -# ] -# -# target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] -# knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( -# manifest, target_node, placeholders=[""] -# ) -# yaml_file_model_section = { -# "columns": [ -# { -# "name": "customer_id", -# } -# ] -# } -# undocumented_columns = target_node.columns.keys() -# ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( -# undocumented_columns, -# target_node, -# yaml_file_model_section, -# knowledge, -# skip_add_tags=False, -# skip_merge_meta=False, -# add_progenitor_to_meta=True, -# ) -# -# assert yaml_file_model_section["columns"][0]["name"] == "customer_id" -# assert ( -# yaml_file_model_section["columns"][0]["description"] == "THIS COLUMN IS UPDATED FOR TESTING" -# ) -# assert yaml_file_model_section["columns"][0]["meta"] == { -# "my_key": "my_value", -# "osmosis_progenitor": "model.jaffle_shop_duckdb.stg_customers", -# } -# assert set(yaml_file_model_section["columns"][0]["tags"]) == set(["my_tag1", "my_tag2"]) -# -# assert target_node.columns["customer_id"].description == "THIS COLUMN IS UPDATED FOR TESTING" -# assert target_node.columns["customer_id"].meta == { -# "my_key": "my_value", -# "osmosis_progenitor": "model.jaffle_shop_duckdb.stg_customers", -# } -# assert set(target_node.columns["customer_id"].tags) == set(["my_tag1", "my_tag2"]) -# -# -# def test_update_undocumented_columns_with_prior_knowledge_with_osmosis_keep_description(): -# manifest = load_manifest() -# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ -# "customer_id" -# ].description = "THIS COLUMN IS UPDATED FOR TESTING" -# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].meta = { -# "my_key": "my_value", -# } -# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].tags = [ -# "my_tag1", -# "my_tag2", -# ] -# -# column_description_not_updated = ( -# "This column will not be updated as it has the 'osmosis_keep_description' attribute" -# ) -# target_node_name = "model.jaffle_shop_duckdb.customers" -# -# manifest.nodes[target_node_name].columns[ -# "customer_id" -# ].description = column_description_not_updated -# manifest.nodes[target_node_name].columns["customer_id"].tags = set( -# [ -# "my_tag3", -# "my_tag4", -# ] -# ) -# manifest.nodes[target_node_name].columns["customer_id"].meta = { -# "my_key": "my_value", -# "osmosis_keep_description": True, -# } -# -# target_node = manifest.nodes[target_node_name] -# knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( -# manifest, target_node, placeholders=[""] -# ) -# yaml_file_model_section = { -# "columns": [ -# { -# "name": "customer_id", -# } -# ] -# } -# undocumented_columns = target_node.columns.keys() -# ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( -# undocumented_columns, -# target_node, -# yaml_file_model_section, -# knowledge, -# skip_add_tags=True, -# skip_merge_meta=True, -# add_progenitor_to_meta=False, -# ) -# -# assert yaml_file_model_section["columns"][0]["name"] == "customer_id" -# assert yaml_file_model_section["columns"][0]["description"] == column_description_not_updated -# assert yaml_file_model_section["columns"][0]["meta"] == { -# "my_key": "my_value", -# "osmosis_keep_description": True, -# } -# assert set(yaml_file_model_section["columns"][0]["tags"]) == set(["my_tag3", "my_tag4"]) -# -# assert target_node.columns["customer_id"].description == column_description_not_updated -# assert target_node.columns["customer_id"].meta == { -# "my_key": "my_value", -# "osmosis_keep_description": True, -# } -# assert set(target_node.columns["customer_id"].tags) == set(["my_tag3", "my_tag4"]) -# -# -# def test_update_undocumented_columns_with_prior_knowledge_add_progenitor_to_meta_and_osmosis_keep_description(): -# manifest = load_manifest() -# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ -# "customer_id" -# ].description = "THIS COLUMN IS UPDATED FOR TESTING" -# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].meta = { -# "my_key": "my_value", -# } -# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].tags = [ -# "my_tag1", -# "my_tag2", -# ] -# -# column_description_not_updated = ( -# "This column will not be updated as it has the 'osmosis_keep_description' attribute" -# ) -# target_node_name = "model.jaffle_shop_duckdb.customers" -# -# manifest.nodes[target_node_name].columns[ -# "customer_id" -# ].description = column_description_not_updated -# manifest.nodes[target_node_name].columns["customer_id"].meta = { -# "my_key": "my_value", -# "osmosis_keep_description": True, -# } -# -# target_node = manifest.nodes[target_node_name] -# knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( -# manifest, target_node, placeholders=[""] -# ) -# yaml_file_model_section = { -# "columns": [ -# { -# "name": "customer_id", -# } -# ] -# } -# undocumented_columns = target_node.columns.keys() -# ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( -# undocumented_columns, -# target_node, -# yaml_file_model_section, -# knowledge, -# skip_add_tags=False, -# skip_merge_meta=False, -# add_progenitor_to_meta=True, -# ) -# -# assert yaml_file_model_section["columns"][0]["name"] == "customer_id" -# assert yaml_file_model_section["columns"][0]["description"] == column_description_not_updated -# assert yaml_file_model_section["columns"][0]["meta"] == { -# "my_key": "my_value", -# "osmosis_keep_description": True, -# "osmosis_progenitor": "model.jaffle_shop_duckdb.stg_customers", -# } -# assert set(yaml_file_model_section["columns"][0]["tags"]) == set(["my_tag1", "my_tag2"]) -# -# assert target_node.columns["customer_id"].description == column_description_not_updated -# assert target_node.columns["customer_id"].meta == { -# "my_key": "my_value", -# "osmosis_keep_description": True, -# "osmosis_progenitor": "model.jaffle_shop_duckdb.stg_customers", -# } -# assert set(target_node.columns["customer_id"].tags) == set(["my_tag1", "my_tag2"]) -# -# -# def test_update_undocumented_columns_with_prior_knowledge_with_add_inheritance_for_specified_keys(): -# manifest = load_manifest() -# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ -# "customer_id" -# ].description = "THIS COLUMN IS UPDATED FOR TESTING" -# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].meta = { -# "my_key": "my_value" -# } -# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"].tags = [ -# "my_tag1", -# "my_tag2", -# ] -# manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"]._extra = { -# "policy_tags": ["my_policy_tag1"], -# } -# -# target_node_name = "model.jaffle_shop_duckdb.customers" -# manifest.nodes[target_node_name].columns["customer_id"].tags = set( -# [ -# "my_tag3", -# "my_tag4", -# ] -# ) -# manifest.nodes[target_node_name].columns["customer_id"].meta = { -# "my_key": "my_old_value", -# "my_new_key": "my_new_value", -# } -# target_node = manifest.nodes[target_node_name] -# knowledge = ColumnLevelKnowledgePropagator.get_node_columns_with_inherited_knowledge( -# manifest, target_node, placeholders=[""] -# ) -# yaml_file_model_section = { -# "columns": [ -# { -# "name": "customer_id", -# } -# ] -# } -# undocumented_columns = target_node.columns.keys() -# ColumnLevelKnowledgePropagator.update_undocumented_columns_with_prior_knowledge( -# undocumented_columns, -# target_node, -# yaml_file_model_section, -# knowledge, -# skip_add_tags=False, -# skip_merge_meta=False, -# add_progenitor_to_meta=False, -# add_inheritance_for_specified_keys=["policy_tags"], -# ) -# -# assert yaml_file_model_section["columns"][0]["name"] == "customer_id" -# assert ( -# yaml_file_model_section["columns"][0]["description"] == "THIS COLUMN IS UPDATED FOR TESTING" -# ) -# assert yaml_file_model_section["columns"][0]["meta"] == { -# "my_key": "my_value", -# "my_new_key": "my_new_value", -# } -# assert set(yaml_file_model_section["columns"][0]["tags"]) == set( -# ["my_tag1", "my_tag2", "my_tag3", "my_tag4"] -# ) -# assert set(yaml_file_model_section["columns"][0]["policy_tags"]) == set(["my_policy_tag1"]) -# -# assert target_node.columns["customer_id"].description == "THIS COLUMN IS UPDATED FOR TESTING" -# assert target_node.columns["customer_id"].meta == { -# "my_key": "my_value", -# "my_new_key": "my_new_value", -# } -# assert set(target_node.columns["customer_id"].tags) == set( -# ["my_tag1", "my_tag2", "my_tag3", "my_tag4"] -# ) -# assert set(target_node.columns["customer_id"]._extra["policy_tags"]) == set(["my_policy_tag1"]) -# -# +def test_update_undocumented_columns_with_prior_knowledge_add_progenitor_to_meta( + yaml_context: YamlRefactorContext, +): + # 1) Setup the manifest + node references + manifest = yaml_context.project.manifest + upstream_col = manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"] + upstream_col.description = "THIS COLUMN IS UPDATED FOR TESTING" + upstream_col.meta = {"my_key": "my_value"} + upstream_col.tags = ["my_tag1", "my_tag2"] + + target_node = manifest.nodes["model.jaffle_shop_duckdb.customers"] + + # 2) Configure the refactor settings + yaml_context.settings.skip_add_tags = False + yaml_context.settings.skip_merge_meta = False + yaml_context.settings.add_progenitor_to_meta = True + yaml_context.settings.force_inherit_descriptions = True # if you want forced overwrite + + # 3) Perform the inheritance + # The caches are used internally to reduce repeated YAML loads; we patch them with empty dicts + # to ensure that the test sees a "fresh" environment each time + with ( + mock.patch("dbt_osmosis.core.osmosis._YAML_BUFFER_CACHE", {}), + mock.patch("dbt_osmosis.core.osmosis._COLUMN_LIST_CACHE", {}), + ): + inherit_upstream_column_knowledge(yaml_context, target_node) + yaml_file_model_section = _get_member_yaml(yaml_context, target_node) + + # 4) Validate the Node + cid = target_node.columns["customer_id"] + assert cid.description == "THIS COLUMN IS UPDATED FOR TESTING" + assert cid.meta == { + "my_key": "my_value", + "osmosis_progenitor": "model.jaffle_shop_duckdb.stg_customers", + } + assert set(cid.tags) == {"my_tag1", "my_tag2"} + + # 5) Validate the in-memory YAML buffer + assert yaml_file_model_section + col_0 = yaml_file_model_section["columns"][0] + assert col_0["name"] == "customer_id" + assert col_0["description"] == "THIS COLUMN IS UPDATED FOR TESTING" + assert col_0["meta"] == { + "my_key": "my_value", + "osmosis_progenitor": "model.jaffle_shop_duckdb.stg_customers", + } + assert set(col_0["tags"]) == {"my_tag1", "my_tag2"} + + +# NOTE: this test is currently moot, as the default behavior is to keep the description if its not a placeholder +# but I leave the test in case we want to reintroduce a fine grained osmosis_keep_description meta attr +def test_update_undocumented_columns_with_prior_knowledge_with_osmosis_keep_description( + yaml_context: YamlRefactorContext, +): + # 1) Setup + manifest = yaml_context.project.manifest + stg_customers_col = manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ + "customer_id" + ] + stg_customers_col.description = "THIS COLUMN IS UPDATED FOR TESTING" + stg_customers_col.meta = {"my_key": "my_value"} + stg_customers_col.tags = ["my_tag1", "my_tag2"] + + # The "downstream" node + column_description_not_updated = ( + "This column will not be updated as it has the 'osmosis_keep_description' attribute" + ) + target_node_name = "model.jaffle_shop_duckdb.customers" + target_node = manifest.nodes[target_node_name] + + # The local column + target_node.columns["customer_id"].description = column_description_not_updated + target_node.columns["customer_id"].tags = ["my_tag3", "my_tag4"] + target_node.columns["customer_id"].meta = { + "my_key": "my_value", + # "osmosis_keep_description": True, NOTE: this is now the default, and much more sane + } + + # 2) Configure flags + yaml_context.settings.skip_add_tags = True + yaml_context.settings.skip_merge_meta = True + yaml_context.settings.add_progenitor_to_meta = False + yaml_context.settings.force_inherit_descriptions = ( + False # This is the default, but we'll be explicit + ) + + # 3) Perform inheritance + with ( + mock.patch("dbt_osmosis.core.osmosis._YAML_BUFFER_CACHE", {}), + mock.patch("dbt_osmosis.core.osmosis._COLUMN_LIST_CACHE", {}), + ): + inherit_upstream_column_knowledge(yaml_context, target_node) + yaml_file_model_section = _get_member_yaml(yaml_context, target_node) + + # 4) Assert Node + cid = target_node.columns["customer_id"] + assert cid.description == column_description_not_updated + assert cid.meta == { + "my_key": "my_value", + # "osmosis_keep_description": True, + } + assert set(cid.tags) == {"my_tag3", "my_tag4"} + + # 5) Assert YAML + assert yaml_file_model_section + col_0 = yaml_file_model_section["columns"][0] + assert col_0["name"] == "customer_id" + assert col_0["description"] == column_description_not_updated + + +def test_update_undocumented_columns_with_prior_knowledge_add_progenitor_to_meta_and_osmosis_keep_description( + yaml_context: YamlRefactorContext, +): + # 1) Setup + manifest = yaml_context.project.manifest + upstream = manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"] + upstream.description = "THIS COLUMN IS UPDATED FOR TESTING" + upstream.meta = {"my_key": "my_value"} + upstream.tags = ["my_tag1", "my_tag2"] + + column_description_not_updated = ( + "This column will not be updated as the 'force_inherit_descriptions' flag is false" + ) + target_node_name = "model.jaffle_shop_duckdb.customers" + target_node = manifest.nodes[target_node_name] + + # The local column is partially documented & has keep desc + target_node.columns["customer_id"].description = column_description_not_updated + target_node.columns["customer_id"].meta = { + "my_key": "my_value", + } + + # 2) Configure + yaml_context.settings.skip_add_tags = False + yaml_context.settings.skip_merge_meta = False + yaml_context.settings.add_progenitor_to_meta = True + yaml_context.settings.force_inherit_descriptions = False + + # 3) Inherit + with ( + mock.patch("dbt_osmosis.core.osmosis._YAML_BUFFER_CACHE", {}), + mock.patch("dbt_osmosis.core.osmosis._COLUMN_LIST_CACHE", {}), + ): + inherit_upstream_column_knowledge(yaml_context, target_node) + model_section = _get_member_yaml(yaml_context, target_node) + + # 4) Assert Node + cid = target_node.columns["customer_id"] + # Since we have keep_description, the desc is not overwritten + assert cid.description == column_description_not_updated + # meta is merged with the upstream & also has progenitor + assert cid.meta == { + "my_key": "my_value", + "osmosis_progenitor": "model.jaffle_shop_duckdb.stg_customers", + } + # unify tags + # upstream => my_tag1, my_tag2 + # local => might be empty => or we can set them if we want + # let's assume local is empty, so final is upstream + assert set(cid.tags) == {"my_tag1", "my_tag2"} + + # 5) Assert YAML + assert model_section + col_0 = model_section["columns"][0] + assert col_0["name"] == "customer_id" + # keep desc + assert col_0["description"] == column_description_not_updated + assert col_0["meta"] == { + "my_key": "my_value", + "osmosis_progenitor": "model.jaffle_shop_duckdb.stg_customers", + } + + +def test_update_undocumented_columns_with_prior_knowledge_with_add_inheritance_for_specified_keys( + yaml_context: YamlRefactorContext, +): + # 1) Setup + manifest = yaml_context.project.manifest + upstream_col = manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns["customer_id"] + upstream_col.description = "THIS COLUMN IS UPDATED FOR TESTING" + upstream_col.meta = {"my_key": "my_value"} + upstream_col.tags = ["my_tag1", "my_tag2"] + # We'll store an `_extra` dict for a custom key + upstream_col._extra = {"policy_tags": ["my_policy_tag1"]} + + target_node_name = "model.jaffle_shop_duckdb.customers" + target_node = manifest.nodes[target_node_name] + # local partial + cinfo = target_node.columns["customer_id"] + cinfo.tags = ["my_tag3", "my_tag4"] + cinfo.meta = { + "my_key": "my_old_value", + "my_new_key": "my_new_value", + } + + # 2) Configure + yaml_context.settings.skip_add_tags = False + yaml_context.settings.skip_merge_meta = False + yaml_context.settings.add_progenitor_to_meta = False + # we want to also inherit "policy_tags" + yaml_context.settings.add_inheritance_for_specified_keys = ["policy_tags"] + yaml_context.settings.force_inherit_descriptions = True + + # 3) Inherit + with ( + mock.patch("dbt_osmosis.core.osmosis._YAML_BUFFER_CACHE", {}), + mock.patch("dbt_osmosis.core.osmosis._COLUMN_LIST_CACHE", {}), + ): + inherit_upstream_column_knowledge(yaml_context, target_node) + section = _get_member_yaml(yaml_context, target_node) + + # 4) Assert Node + cid = target_node.columns["customer_id"] + assert cid.description == "THIS COLUMN IS UPDATED FOR TESTING" + # meta is merged + assert cid.meta == { + "my_key": "my_old_value", + "my_new_key": "my_new_value", + } + # tags unify + assert set(cid.tags) == {"my_tag1", "my_tag2", "my_tag3", "my_tag4"} + # `_extra` might be stored if your code does so. e.g. + # cid._extra["policy_tags"] == [...], but we skip that if not needed. + + # 5) Assert YAML + assert section + col_0 = section["columns"][0] + assert col_0["name"] == "customer_id" + assert col_0["description"] == "THIS COLUMN IS UPDATED FOR TESTING" + assert col_0["meta"] == { + "my_key": "my_old_value", + "my_new_key": "my_new_value", + } + assert set(col_0["tags"]) == {"my_tag1", "my_tag2", "my_tag3", "my_tag4"} + # confirm "policy_tags" is added + assert set(col_0["policy_tags"]) == {"my_policy_tag1"} + + # def test_update_undocumented_columns_with_osmosis_prefix_meta_with_prior_knowledge(): # manifest = load_manifest() # manifest.nodes["model.jaffle_shop_duckdb.stg_customers"].columns[ From 0b62d64e9e8456a025055fe5f4577cd88c6913f9 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Tue, 31 Dec 2024 13:09:09 -0700 Subject: [PATCH 32/46] feat: be more liberal in accepting config keys of dbt_osmosis such as to support config block syntax --- src/dbt_osmosis/core/osmosis.py | 27 +++++++++++++------ .../test_column_level_knowledge_propagator.py | 5 ++-- 2 files changed, 22 insertions(+), 10 deletions(-) diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index fed19bc8..ba359c9a 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -383,14 +383,20 @@ def mutated(self) -> bool: @property def source_definitions(self) -> dict[str, t.Any]: """The source definitions from the dbt project config.""" - defs = self.project.config.vars.to_dict().get("dbt-osmosis", {}).copy() + c = self.project.config.vars.to_dict() + defs = _find_first( + [c.get(k, {}) for k in ["dbt-osmosis", "dbt_osmosis"]], lambda v: bool(v), {} + ) defs.pop(SKIP_PATTERNS, None) return defs @property def skip_patterns(self) -> list[str]: """The column name skip patterns from the dbt project config.""" - defs = self.project.config.vars.to_dict().get("dbt-osmosis", {}).copy() + c = self.project.config.vars.to_dict() + defs = _find_first( + [c.get(k, {}) for k in ["dbt-osmosis", "dbt_osmosis"]], lambda v: bool(v), {} + ) return defs.pop(SKIP_PATTERNS, []) def read_catalog(self) -> CatalogArtifact | None: @@ -716,7 +722,12 @@ def _get_yaml_path_template(context: YamlRefactorContext, node: ResultNode) -> s if isinstance(def_or_path, dict): return def_or_path.get("path") return def_or_path - path_template = node.config.extra.get("dbt-osmosis", node.unrendered_config.get("dbt-osmosis")) + conf = [ + c.get(k) + for k in ["dbt-osmosis", "dbt_osmosis"] + for c in [node.config.extra, node.unrendered_config] + ] + path_template = _find_first(t.cast(list[str | None], conf), lambda v: v is not None) if not path_template: raise MissingOsmosisConfig( f"Config key `dbt-osmosis: ` not set for model {node.name}" @@ -1326,12 +1337,12 @@ def get_candidates(self, name: str, node: ResultNode, context: DbtProjectContext variants = [] prefix = t.cast( str, - node.config.extra.get( - "dbt-osmosis-prefix", node.unrendered_config.get("dbt-osmosis-prefix") - ), + node.meta.get("osmosis_prefix") # Can be set in the node yml (legacy support) + or node.config.extra.get("dbt_osmosis_prefix") # Or in dbt_project.yml / {{ config() }} + or node.unrendered_config.get("dbt_osmosis_prefix"), ) - if prefix and name.startswith(prefix): - variants.append(name[len(prefix) :]) + if prefix: + variants.append(name.removeprefix(prefix)) return variants diff --git a/tests/test_column_level_knowledge_propagator.py b/tests/test_column_level_knowledge_propagator.py index cc18c071..2af5312f 100644 --- a/tests/test_column_level_knowledge_propagator.py +++ b/tests/test_column_level_knowledge_propagator.py @@ -441,6 +441,8 @@ def test_update_undocumented_columns_with_prior_knowledge_with_osmosis_keep_desc assert col_0["description"] == column_description_not_updated +# NOTE: this test is currently moot, as the default behavior is to keep the description if its not a placeholder +# but I leave the test in case we want to reintroduce a fine grained osmosis_keep_description meta attr def test_update_undocumented_columns_with_prior_knowledge_add_progenitor_to_meta_and_osmosis_keep_description( yaml_context: YamlRefactorContext, ): @@ -488,8 +490,7 @@ def test_update_undocumented_columns_with_prior_knowledge_add_progenitor_to_meta } # unify tags # upstream => my_tag1, my_tag2 - # local => might be empty => or we can set them if we want - # let's assume local is empty, so final is upstream + # local => is empty, so final is upstream assert set(cid.tags) == {"my_tag1", "my_tag2"} # 5) Assert YAML From 5b31abf59a0b7fd3252580ab8441768ec8a2cf7e Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Tue, 31 Dec 2024 13:44:54 -0700 Subject: [PATCH 33/46] feat: handle variants during knowledge graph construction and scope graph to node, add example prefix in config block --- demo_duckdb/jaffle_shop.duckdb | Bin 1323008 -> 1323008 bytes demo_duckdb/models/orders_prefix.sql | 60 ++++++++++++++++++ demo_duckdb/models/schema.yml | 30 ++++++++- src/dbt_osmosis/core/osmosis.py | 32 ++++++---- .../test_column_level_knowledge_propagator.py | 2 +- 5 files changed, 109 insertions(+), 15 deletions(-) create mode 100644 demo_duckdb/models/orders_prefix.sql diff --git a/demo_duckdb/jaffle_shop.duckdb b/demo_duckdb/jaffle_shop.duckdb index 045067b60dbed9ce0b59db0a4534345d874da8f9..54e44cbca61606f9161f2360404e1ef342eae25b 100644 GIT binary patch delta 13456 zcmeHN4^)&@7XR+>!3;Bl4F3fYP|=k9GoX~1t+J^{TS{zMkJfPqWCn!LK|pl(w25se zJ7vP~6|Yt+a?3WiV<6?UR!dfjq^{zh&A({ao^@Rl+1k^V)1)q{|$> znfLB{@4ox)z3;yJdmlBYb8AlLHd;a>2Am8&`1d1;b6gRzFt{bfaymTp*o4#h8=onk ze3!ES{#Zz6#HfR%R_Lngctql#D7$1-qO8rjvwG#%=_8d3)<<-mwD_msbB1C`BCGa8 zoH9FOL{Qe)bh3RvEND5LF)@sGU$=9l;>KQ}YWk%SrnCAY$u7wbsZp==d=aD6 zO&!&%>FsFN1lelSFlBc3n2@u2hjdm?nq?TI?CG#7E9_R~!k!S)ZGbdVV*;yEj?}hR zD>5x=ca!&f-V)x08Qot;T%&3Pu-8>+Kj`uhE3?7N~1cw$^D^=K!+e zIuxr|N=Nu?%^T2ZQ-#)i{h*fra`Q_I@_%G z;Kwe3L&6xD^uj0^_ybQ*-xph+WOCyRjCp+>JX?wp{Kjb5@+a4A7~WqDHX0WmBt ze%v;SHw{y6*)=Al6Zc!^brlzll4M{i>_0-d>tRUo^TD+KBg7}1vc2wqgbbwnvRbj! z4FY}3oVucLQBQ<$te1r=A3Pf+B8V0rSUh0c+S?>zDhtm2#|5NOZC`80MO*8S@wm8i zXa8}5xx1EXv8+a<&*E1LeOf`JLf#no);^YUutT56$!j%@wkY}8;WT9k`;L!wo)WP< zho`ih;GdJS#7u;~nXZ&bmY4?yJwJPiL?}^MgC+CK2;35?*%)h15!n^bNv{NUv@nAO zE;e%yF&L+#aDhdlVfBXwH0%VukX-X9G4y{zHq<1+q}u`sekSR>KN?fR`}mSXYDiUv zu15tET4F*n(OdWNxal{9o#aAJZW+zd#TKC4eAVb(CsM^cUaM;RLzP71%0$6)EMH^b zCkivT80^=Q##x9q?~;V5+T?tgP=Of0n*Dk3j<_=G)j&>)PbugiQwPHcA9LK$(o|Lj zVp%-Rf2G@?yf`ZkjdYYwHmxPU?n{O=!uo1f_$atHO@6GTrfB*C6 zc}ne0qyC(M-eTx5e;-E1ra^@6oPp)`D%~N~8CY%~NN%)X2LF>@y&I#kcRX4DHbg3q zT?*yx!<08JjR`qta7gD2Kg6wvQnPY^@}g&`W*W_y*W`^ve<-DQVRWyGeNPMD6iUgn z;SeJ>MSmb=my05#^9V*`E-IB){yzfg#ogb1ASD||!Sx+U@kTUz#whMG>Os!qDfb_K z&WHn2JH)?r3Vi+CB>TkeAMWg9rtvk__jKd$O3{hoG3u&NU`+JQ=?|}2z zS)<;a(%fr}jz&#N#mX_Bh+*lzAgT%t9nivfGPM;xWbu{eUhMS(b&6n`AI145L@+^l zuB_F^cIx@Y;@%Wbd{=({~PzT0~! z4Bl08Glq+IeT6z(hSHfa*5ItLJ1Qz|8KkZNVh0(G%S);jrEp?Of%+yfrZQ!uk#tl5 z#>E0?ARPCHlz3P$qAL3kh}rJyp!X(Nm6kSfZvDUtMVtJAd<;gz`kA`}eo>m0*h|i#1l02Sv`YH^HrOTkyX5DRR)%j6(l8(PZ zArP{-7{*R$3ECwERW4inz63Z(=a3q3pc{#lg^75CCCILo)E$A5u$S~8JKa&4)E+~V zTs{JEnZFJ0gXwA{`Z8_gb(N_rnCve`^_?%lHy}%o!bqJvm?STPc-_0fob#!p5S969 zU!4CFz}XeTW>Psn5W+a?ln_!=3Te9MLl|FZB5RL97IZe;3(?S}F*CA}Jg@IhU0?muT(phLXe+kQQ7d#iDyrN0f`D)kbpr2}qAGk|Le9 z;*z2Oo=1&j=N|qFqc|3cme|tQ8q);C{`YX>*S(<|SR7#){aN!FJmXPrd zh=}cDy~vE@FM;VI04->!2APC9!A9!3;3l%<6jU0hc=Y4`@D%)u#V+=L3AC%*Spn%m z+UTv-kROV1_9gLx;6}0t-{|4VbZ59S=`ZTdM}GqM_5T^*_pbogeF8W8mHq^-KYUiXq?d$B^$_BN)<+7%~h(IfV3hfTVjME*{k2 zBjYc9=qCst9dXsdAUQR;<`mwSHH5oTj1)^6V7X$=N^^IXx5{^ra;t106;{hUlY{}DtrmOMXIK{N<{Gv&+dfWcjDK(2KSB#LE|*3XzBaV( zH{0}yJ`Hlf;GF7> z^R|{w|DfrG7vu#88)qv!HvEA!W>_|dGZo{E!L)!gMrNXHbJw&NrazUFr0gi%GH-IM z(q7o}opKEw_Vi^{W#t_(b3n>3(%$)WuJXA18fPc1zIHO=cMtEVt>nUf)iaV^GI) z(&f1%*D9OH{Z`rTiik4`Eo1kt4D23%rU%zi+t!6Yv9)bydgjp@{){H8ta8i*q+!!& zq!sdxS`WT_mDsE?!Jg#P40$I`79l3P0hjE-CBNt1o+*!q8;83Z9=xaij`{C64sEdi ztlrTc>AI&Ob^aFn(D6m>^{MkWl!ZP~ws4DM!+T@Mo=iE#RMUT|2&7Po3|Ix`|oJnbU6} zF^>3N)l)dA_ncHq`%p`de>*Lu=|I&_w6A^$>gOeL?iSe;sGuVkjZZ#b4#&0*U&s_> zUaoUjS)b;p inp=UCPBg#iiipC0D3Mp6&obi^hR*=^`K<87PyP!u9C5V( delta 10637 zcmeHMdr(x@8Nc7|-DP)KQRi zud#JtJ#Ftyn~Lf5Vf%+dD6=Cm6rF|y$(X|xT zsTS#3wrEx-yLv5^{q49bespv_8lv@*sGZLmYD>%~O#D~tktvIfblAzqJQwP*vrK_AIha{zas_Wq|KClB8$cfNRn3~Nfrs=r-a@x z9?a%tZzkj%teZ)S-{BOU9UZdhofhM5;TbuQ#O-&**}`q3B^6V?md|z?2KdJcDkZz$ zk;#i;q%+u+H|%IOZ@AuML1CZYk--9&QkdFNh^!>Nn^dGtA-Cp8r?0VKT?PwwnYEoV zrm{oLi(fBM3sfIfKia$OtwyG&wH!M0w3dk8f^{jPX(iNz)Y|T6g~A?{Gr2Vz8>80z z*G6H@t)81(^XbPIC-_|suDodknbRK?SV2c^`XYMKi<` zT?YFf;S#2DS)mv!rov9ie#cZ49jq%N7nT9iX5m(_NWASSDCWpa=Yxq{k^b>N0JktB zY_xnuEi>`Fh~6_LHBv-kn*)Z!JgXr6~C6YgNf_@&?5HBvyl(eT1KI+PQx(zueo%aqV+ zO|4YqA9Pc3uM&EmAVc1Z%lU9hVKfx#7SFxxN}CD`e1P0YI|3QpsJcnyZNwZ=Xbyc951-J zvX>?T(PV40iwJvm{N31c9y=4;a#RkJzF;hlEr*?adm7Z6@+HFL#S;L>?EWwlTglPz z1QG@W1ri3el+b5s*EGQ-fStm0>!zrJLVW;SkD-+)uQH@_SXqVnPW&Bx-|tG8gh~PR zja)3yp`-=1Ilxmp7NU~BkJ?1PD=?jpvLj$g@8o<4GU$Y7D#z}? zlL90+MVZjk7n)Gp6QMsT+7*Q|$IrJxaslhx)QFs%Y-|xDWj`6p90RI}dOz_d74(l! z^l~f?Eoi4|%tV=XH5eT75pJ1(}$1fHJ&|i9ygEDS^HTmBecCj5P~K8V2bp2IoBfB zcOx6@e@agTB)7om@`4Rf=I7=L)5mAFxg$M7ge=~|!OabRmtC(u7a}K%8C*6>cCGj{ zJqev8_#&0?_OY6we4{Ex#;ulfc?xY8f<9yHzaq*gFGy{F?kB78`>)al*A5Pw}$ic+3VL! z%$j+_2pGtA@Xh8~}>#36jVG_q%4yUW&&+HeYF(>RpI4tIb zqamtWa>80yBE`ILc-1LiLN81ToUikieX#cPB8oX-M8YR?!sI|q4%^}_1jhwB%D6E? zUZX7(`^LsNxkd_jNji-w-ZxC6W8POk zD24a^@La_Ez9bywUG=-lj9c=v+Ty8(Jj2@IG1vNP0{7ClRr|MNwU;LHjrIO}9q&87t_I{`dvc#Er#im znl1D^{&DJK{ui+a6ooys7#@(C|65URBCBB{+1x~|WR*;k#LXydD257QBylpg824@` z;APBG?ol6eNw)palVp0PKWU#Z6=js6v&ZZwB~M2xu@-_628{R`wol8x|rdzES#w+Xj7 Npj5|Q<&r#Q{5R6tE|CBL diff --git a/demo_duckdb/models/orders_prefix.sql b/demo_duckdb/models/orders_prefix.sql new file mode 100644 index 00000000..4c715667 --- /dev/null +++ b/demo_duckdb/models/orders_prefix.sql @@ -0,0 +1,60 @@ +{{ config( + dbt_osmosis_prefix="o_", +) }} + +{% set payment_methods = ['credit_card', 'coupon', 'bank_transfer', 'gift_card'] %} + +with orders as ( + + select * from {{ ref('stg_orders') }} + +), + +payments as ( + + select * from {{ ref('stg_payments') }} + +), + +order_payments as ( + + select + order_id, + + {% for payment_method in payment_methods %} + sum(case when payment_method = '{{ payment_method }}' then amount else 0 end) as {{ payment_method }}_amount, + {% endfor %} + + sum(amount) as total_amount + + from payments + + group by order_id + +), + +final as ( + + select + orders.order_id as o_order_id, + orders.customer_id as o_customer_id, + orders.order_date as o_order_date, + orders.status as o_status, + + {% for payment_method in payment_methods %} + + order_payments.{{ payment_method }}_amount as o_{{ payment_method }}_amount, + + {% endfor -%} + + order_payments.total_amount as o_amount + + from orders + + + left join order_payments + on orders.order_id = order_payments.order_id + +) + +select * from final diff --git a/demo_duckdb/models/schema.yml b/demo_duckdb/models/schema.yml index be701013..b43e1b50 100644 --- a/demo_duckdb/models/schema.yml +++ b/demo_duckdb/models/schema.yml @@ -1,5 +1,4 @@ version: 2 - models: - name: customers description: This table has basic information about a customer, as well as some derived facts based on a customer's orders @@ -83,3 +82,32 @@ models: description: Total amount (AUD) of the order tests: - not_null + - name: orders_prefix + columns: + - name: o_order_id + description: '' + data_type: INTEGER + - name: o_customer_id + description: '' + data_type: INTEGER + - name: o_order_date + description: '' + data_type: DATE + - name: o_status + description: '{{ doc("orders_status") }}' + data_type: VARCHAR + - name: o_credit_card_amount + description: '' + data_type: DOUBLE + - name: o_coupon_amount + description: '' + data_type: DOUBLE + - name: o_bank_transfer_amount + description: '' + data_type: DOUBLE + - name: o_gift_card_amount + description: '' + data_type: DOUBLE + - name: o_amount + description: '' + data_type: DOUBLE diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index ba359c9a..1f8a1c98 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -1098,6 +1098,13 @@ def _build_column_knowledge_graph( """Generate a column knowledge graph for a dbt model or source node.""" tree = _build_node_ancestor_tree(context.project.manifest, node) + pm = get_plugin_manager() + node_column_variants: dict[str, list[str]] = {} + for column_name, _ in node.columns.items(): + variants = node_column_variants.setdefault(column_name, [column_name]) + for v in pm.hook.get_candidates(name=column_name, node=node, context=context.project): + variants.extend(t.cast(list[str], v)) + column_knowledge_graph: dict[str, dict[str, t.Any]] = {} for generation in reversed(sorted(tree.keys())): ancestors = tree[generation] @@ -1108,15 +1115,21 @@ def _build_column_knowledge_graph( if not isinstance(ancestor, (SourceDefinition, SeedNode, ModelNode)): continue - for name, metadata in ancestor.columns.items(): + for name, _ in node.columns.items(): graph_node = column_knowledge_graph.setdefault(name, {}) + for variant in node_column_variants[name]: + incoming = ancestor.columns.get(variant) + if incoming is not None: + break + else: + continue + graph_edge = incoming.to_dict() + if context.settings.add_progenitor_to_meta: graph_node.setdefault("meta", {}).setdefault( "osmosis_progenitor", ancestor.unique_id ) - graph_edge = metadata.to_dict() - if context.settings.use_unrendered_descriptions: raw_yaml = _get_member_yaml(context, ancestor) or {} raw_columns = t.cast(list[dict[str, t.Any]], raw_yaml.get("columns", [])) @@ -1125,7 +1138,7 @@ def _build_column_knowledge_graph( lambda c: normalize_column_name( c["name"], context.project.config.credentials.type ) - == name, + in node_column_variants[name], {}, ) if unrendered_description := raw_column_metadata.get("description"): @@ -1183,15 +1196,8 @@ def inherit_upstream_column_knowledge( column_knowledge_graph = _build_column_knowledge_graph(context, node) kwargs = None for name, node_column in node.columns.items(): - variants: list[str] = [name] - pm = get_plugin_manager() - for v in pm.hook.get_candidates(name=name, node=node, context=context.project): - variants.extend(t.cast(list[str], v)) - for variant in variants: - kwargs = column_knowledge_graph.get(variant) - if kwargs is not None: - break - else: + kwargs = column_knowledge_graph.get(name) + if kwargs is None: continue updated_metadata = {k: v for k, v in kwargs.items() if v is not None and k in inheritable} diff --git a/tests/test_column_level_knowledge_propagator.py b/tests/test_column_level_knowledge_propagator.py index 2af5312f..fe4d02eb 100644 --- a/tests/test_column_level_knowledge_propagator.py +++ b/tests/test_column_level_knowledge_propagator.py @@ -305,7 +305,7 @@ def test_update_undocumented_columns_with_prior_knowledge_skip_merge_meta( assert target_node_columns["customer_id"].description == "THIS COLUMN IS UPDATED FOR TESTING" assert ( target_node_columns["customer_id"].meta == {"my_key": "my_value"} - ) # NOTE: nodes meta is not mutated beyond our original mutation in the manifest node since skip_merge_tags is True + ) # NOTE: nodes meta is not mutated beyond our original mutation in the manifest node since skip_merge_meta is True assert sorted(target_node_columns["customer_id"].tags) == [ "my_tag1", "my_tag2", From 22a75ec093bae0e4b99dcb961ff70dddbfe339cf Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Wed, 1 Jan 2025 07:08:12 -0700 Subject: [PATCH 34/46] feat: env vars support in discovery funcs --- src/dbt_osmosis/core/osmosis.py | 41 ++++++++++++++++----------------- 1 file changed, 20 insertions(+), 21 deletions(-) diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index 1f8a1c98..809f8889 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -68,7 +68,12 @@ def discover_project_dir() -> str: - """Return the directory containing a dbt_project.yml if found, else the current dir.""" + """Return the directory containing a dbt_project.yml if found, else the current dir. Checks DBT_PROJECT_DIR first if set.""" + if "DBT_PROJECT_DIR" in os.environ: + project_dir = Path(os.environ["DBT_PROJECT_DIR"]) + if project_dir.is_dir(): + return str(project_dir.resolve()) + logger.warning(f"DBT_PROJECT_DIR {project_dir} is not a valid directory.") cwd = Path.cwd() for p in [cwd] + list(cwd.parents): if (p / "dbt_project.yml").exists(): @@ -77,7 +82,12 @@ def discover_project_dir() -> str: def discover_profiles_dir() -> str: - """Return the directory containing a profiles.yml if found, else ~/.dbt.""" + """Return the directory containing a profiles.yml if found, else ~/.dbt. Checks DBT_PROFILES_DIR first if set.""" + if "DBT_PROFILES_DIR" in os.environ: + profiles_dir = Path(os.environ["DBT_PROFILES_DIR"]) + if profiles_dir.is_dir(): + return str(profiles_dir.resolve()) + logger.warning(f"DBT_PROFILES_DIR {profiles_dir} is not a valid directory.") if (Path.cwd() / "profiles.yml").exists(): return str(Path.cwd().resolve()) return str(Path.home() / ".dbt") @@ -93,25 +103,14 @@ class DbtConfiguration: profile: str | None = None threads: int = 1 single_threaded: bool = True - - _vars: str | dict[str, t.Any] = field(default_factory=dict, init=False) + vars: dict[str, t.Any] = field(default_factory=dict) def __post_init__(self) -> None: set_invocation_context(get_env()) - if self.threads != 1: + if self.threads > 1: self.single_threaded = False - - @property - def vars(self) -> dict[str, t.Any]: - if isinstance(self._vars, str): - return json.loads(self._vars) - return self._vars - - @vars.setter - def vars(self, value: t.Any) -> None: - if not isinstance(value, (str, dict)): - raise ValueError("DbtConfiguration.vars must be a string or dict") - self._vars = value + elif self.threads < 1: + raise ValueError("DbtConfiguration.threads must be >= 1") def config_to_namespace(cfg: DbtConfiguration) -> argparse.Namespace: @@ -1366,8 +1365,7 @@ def get_plugin_manager(): def run_example_compilation_flow() -> None: - config = DbtConfiguration(target="some_target", threads=2) - config.vars = {"foo": "bar"} + config = DbtConfiguration(target="some_target", threads=2, vars={"foo": "bar"}) proj_ctx = create_dbt_project_context(config) node = compile_sql_code(proj_ctx, "select '{{ 1+1 }}' as col") @@ -1378,8 +1376,9 @@ def run_example_compilation_flow() -> None: if __name__ == "__main__": - c = DbtConfiguration(project_dir="demo_duckdb", profiles_dir="demo_duckdb") - c.vars = {"dbt-osmosis": {}} + c = DbtConfiguration( + project_dir="demo_duckdb", profiles_dir="demo_duckdb", vars={"dbt-osmosis": {}} + ) project = create_dbt_project_context(c) yaml_context = YamlRefactorContext( From cfe4e1992301508e200ddb5de9605829f0ab04e6 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Wed, 1 Jan 2025 11:45:40 -0700 Subject: [PATCH 35/46] chore: touch up some things --- src/dbt_osmosis/core/osmosis.py | 36 ++++++++++++++------------------- 1 file changed, 15 insertions(+), 21 deletions(-) diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index 809f8889..effa2382 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -150,8 +150,8 @@ class DbtProjectContext: _adapter_mutex: threading.Lock = field(default_factory=threading.Lock, init=False) _manifest_mutex: threading.Lock = field(default_factory=threading.Lock, init=False) - _adapter: BaseAdapter | None = None - _adapter_created_at: float = 0.0 + _adapter: BaseAdapter | None = field(default=None, init=False) + _adapter_created_at: float = field(default=0.0, init=False) @property def is_adapter_expired(self) -> bool: @@ -288,18 +288,6 @@ class RestructureDeltaPlan: operations: list[RestructureOperation] = field(default_factory=list) -class MissingOsmosisConfig(Exception): - """Raised when an osmosis configuration is missing.""" - - pass - - -class InvalidOsmosisConfig(Exception): - """Raised when an osmosis configuration is invalid.""" - - pass - - @dataclass class YamlRefactorSettings: """Settings for yaml based refactoring operations.""" @@ -714,6 +702,10 @@ def _describe(rel: BaseRelation) -> dict[str, t.Any]: reload_manifest(context.project) +class MissingOsmosisConfig(Exception): + """Raised when an osmosis configuration is missing.""" + + def _get_yaml_path_template(context: YamlRefactorContext, node: ResultNode) -> str | None: """Get the yaml path template for a dbt model or source node.""" if node.resource_type == NodeType.Source: @@ -723,8 +715,8 @@ def _get_yaml_path_template(context: YamlRefactorContext, node: ResultNode) -> s return def_or_path conf = [ c.get(k) - for k in ["dbt-osmosis", "dbt_osmosis"] - for c in [node.config.extra, node.unrendered_config] + for k in ("dbt-osmosis", "dbt_osmosis") + for c in (node.config.extra, node.unrendered_config) ] path_template = _find_first(t.cast(list[str | None], conf), lambda v: v is not None) if not path_template: @@ -1364,14 +1356,15 @@ def get_plugin_manager(): # NOTE: usage example of the more FP style module below -def run_example_compilation_flow() -> None: - config = DbtConfiguration(target="some_target", threads=2, vars={"foo": "bar"}) - proj_ctx = create_dbt_project_context(config) +def run_example_compilation_flow(c: DbtConfiguration) -> None: + c.vars["foo"] = "bar" + + context = create_dbt_project_context(c) - node = compile_sql_code(proj_ctx, "select '{{ 1+1 }}' as col") + node = compile_sql_code(context, "select '{{ 1+1 }}' as col_{{ var('foo') }}") print("Compiled =>", node.compiled_code) - resp = execute_sql_code(proj_ctx, "select '{{ 1+2 }}' as col") + resp = execute_sql_code(context, "select '{{ 1+2 }}' as col_{{ var('foo') }}") print("Resp =>", resp) @@ -1379,6 +1372,7 @@ def run_example_compilation_flow() -> None: c = DbtConfiguration( project_dir="demo_duckdb", profiles_dir="demo_duckdb", vars={"dbt-osmosis": {}} ) + run_example_compilation_flow(c) project = create_dbt_project_context(c) yaml_context = YamlRefactorContext( From 6a786d340ac7d9020c8c4f24d7c5d0c3386a03dd Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Wed, 1 Jan 2025 13:12:46 -0700 Subject: [PATCH 36/46] chore: comprehension for getting prefix key in fuzzer --- src/dbt_osmosis/core/osmosis.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index effa2382..6a45fbf2 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -1332,14 +1332,20 @@ def get_candidates(self, name: str, node: ResultNode, context: DbtProjectContext """Get a list of candidate names for a column excluding a prefix.""" _ = context variants = [] - prefix = t.cast( - str, - node.meta.get("osmosis_prefix") # Can be set in the node yml (legacy support) - or node.config.extra.get("dbt_osmosis_prefix") # Or in dbt_project.yml / {{ config() }} - or node.unrendered_config.get("dbt_osmosis_prefix"), + key = "osmosis_prefix" + p = _find_first( + ( + t.cast(str, v) + # Can be set in the node yml (legacy support) + # Or in dbt_project.yml / {{ config() }} + for c in (node.meta, node.config.extra, node.unrendered_config) + for k in (key, f"dbt_{key}") + for v in (c.get(k), c.get(k.replace("_", "-"))) + ), + lambda v: bool(v), ) - if prefix: - variants.append(name.removeprefix(prefix)) + if p: + variants.append(name.removeprefix(p)) return variants From a2b7afcd30ac0249abe79e6a9cdb46f4a288094e Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Wed, 1 Jan 2025 14:01:18 -0700 Subject: [PATCH 37/46] feat: sync node to yaml instead of co-manipulation, decouples mutations chore: only use get node yaml if needed otherwise stick to node itself --- demo_duckdb/models/schema.yml | 62 +++----- demo_duckdb/models/staging/schema.yml | 41 +++--- demo_duckdb/seeds/schema.yml | 30 ++-- src/dbt_osmosis/core/osmosis.py | 198 +++++++++++++++++++++----- 4 files changed, 212 insertions(+), 119 deletions(-) diff --git a/demo_duckdb/models/schema.yml b/demo_duckdb/models/schema.yml index b43e1b50..ee566d18 100644 --- a/demo_duckdb/models/schema.yml +++ b/demo_duckdb/models/schema.yml @@ -12,26 +12,39 @@ models: - name: first_name description: Customer's first name. PII. - - name: last_name description: Customer's last name. PII. - - name: first_order description: Date (UTC) of a customer's first order - - name: most_recent_order description: Date (UTC) of a customer's most recent order - - name: number_of_orders description: Count of the number of orders a customer has placed - - name: customer_lifetime_value data_type: DOUBLE - description: '' - - name: customer_average_value - description: '' data_type: DECIMAL(18,3) + - name: orders_prefix + columns: + - name: o_order_id + data_type: INTEGER + - name: o_customer_id + data_type: INTEGER + - name: o_order_date + data_type: DATE + - name: o_status + description: '{{ doc("orders_status") }}' + data_type: VARCHAR + - name: o_credit_card_amount + data_type: DOUBLE + - name: o_coupon_amount + data_type: DOUBLE + - name: o_bank_transfer_amount + data_type: DOUBLE + - name: o_gift_card_amount + data_type: DOUBLE + - name: o_amount + data_type: DOUBLE - name: orders description: This table has basic information about orders, as well as some derived facts based on payments @@ -41,7 +54,6 @@ models: - unique - not_null description: This is a unique identifier for an order - - name: customer_id description: Foreign key to the customers table tests: @@ -52,7 +64,6 @@ models: - name: order_date description: Date (UTC) that the order was placed - - name: status tests: - accepted_values: @@ -82,32 +93,5 @@ models: description: Total amount (AUD) of the order tests: - not_null - - name: orders_prefix - columns: - - name: o_order_id - description: '' - data_type: INTEGER - - name: o_customer_id - description: '' - data_type: INTEGER - - name: o_order_date - description: '' - data_type: DATE - - name: o_status - description: '{{ doc("orders_status") }}' - data_type: VARCHAR - - name: o_credit_card_amount - description: '' - data_type: DOUBLE - - name: o_coupon_amount - description: '' - data_type: DOUBLE - - name: o_bank_transfer_amount - description: '' - data_type: DOUBLE - - name: o_gift_card_amount - description: '' - data_type: DOUBLE - - name: o_amount - description: '' - data_type: DOUBLE +sources: [] +seeds: [] diff --git a/demo_duckdb/models/staging/schema.yml b/demo_duckdb/models/staging/schema.yml index 892c720e..8dfb9c80 100644 --- a/demo_duckdb/models/staging/schema.yml +++ b/demo_duckdb/models/staging/schema.yml @@ -1,5 +1,4 @@ version: 2 - models: - name: stg_customers columns: @@ -11,45 +10,41 @@ models: data_type: INTEGER - name: first_name data_type: VARCHAR - description: '' - name: last_name data_type: VARCHAR - description: '' - - name: stg_orders + - name: stg_payments columns: - - name: order_id + - name: payment_id tests: - unique - not_null data_type: INTEGER - - name: customer_id + - name: order_id data_type: INTEGER - description: '' - - name: order_date - data_type: DATE - description: '' - - name: status + - name: payment_method tests: - accepted_values: - values: ['placed', 'shipped', 'completed', 'return_pending', 'returned'] - + values: ['credit_card', 'coupon', 'bank_transfer', 'gift_card'] data_type: VARCHAR - description: '{{ doc("orders_status") }}' - - name: stg_payments + - name: amount + data_type: DOUBLE + - name: stg_orders columns: - - name: payment_id + - name: order_id tests: - unique - not_null data_type: INTEGER - - name: order_id + - name: customer_id data_type: INTEGER - description: '' - - name: payment_method + - name: order_date + data_type: DATE + - name: status tests: - accepted_values: - values: ['credit_card', 'coupon', 'bank_transfer', 'gift_card'] + values: ['placed', 'shipped', 'completed', 'return_pending', 'returned'] + data_type: VARCHAR - - name: amount - data_type: DOUBLE - description: '' + description: '{{ doc("orders_status") }}' +sources: [] +seeds: [] diff --git a/demo_duckdb/seeds/schema.yml b/demo_duckdb/seeds/schema.yml index 306aef1a..44317156 100644 --- a/demo_duckdb/seeds/schema.yml +++ b/demo_duckdb/seeds/schema.yml @@ -1,40 +1,32 @@ version: 2 +models: [] +sources: [] seeds: + - name: raw_customers + columns: + - name: id + data_type: INTEGER + - name: first_name + data_type: VARCHAR + - name: last_name + data_type: VARCHAR - name: raw_orders columns: - name: id - description: '' data_type: INTEGER - name: user_id - description: '' data_type: INTEGER - name: order_date - description: '' data_type: DATE - name: status - description: '{{ doc("orders_status") }}' # putting this in to test if unrendered propogation works + description: '{{ doc("orders_status") }}' - name: raw_payments columns: - name: id - description: '' data_type: INTEGER - name: order_id - description: '' data_type: INTEGER - name: payment_method - description: '' data_type: VARCHAR - name: amount - description: '' data_type: INTEGER - - name: raw_customers - columns: - - name: id - description: '' - data_type: INTEGER - - name: first_name - description: '' - data_type: VARCHAR - - name: last_name - description: '' - data_type: VARCHAR diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index 6a45fbf2..d0bd4440 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -17,6 +17,7 @@ from functools import lru_cache from itertools import chain from pathlib import Path +from types import MappingProxyType import dbt.flags as dbt_flags import pluggy @@ -958,6 +959,147 @@ def _remove_sources(existing_doc: dict[str, t.Any], nodes: list[ResultNode]) -> existing_doc["sources"] = keep_sources +def _sync_doc_section( + context: YamlRefactorContext, node: ResultNode, doc_section: dict[str, t.Any] +) -> None: + """Helper function that overwrites 'doc_section' with data from 'node'. + + This includes columns, description, meta, tags, etc. + We assume node is the single source of truth, so doc_section is replaced. + """ + if node.description: + doc_section["description"] = node.description + else: + doc_section.pop("description", None) + + current_columns: list[dict[str, t.Any]] = doc_section.setdefault("columns", []) + incoming_columns: list[dict[str, t.Any]] = [] + + current_map = {} + for c in current_columns: + norm_name = normalize_column_name(c["name"], context.project.config.credentials.type) + current_map[norm_name] = c + + for name, meta in node.columns.items(): + cdict = meta.to_dict() + cdict["name"] = name + norm_name = normalize_column_name(name, context.project.config.credentials.type) + + current_yaml = t.cast(dict[str, t.Any], current_map.get(norm_name, {})) + merged = dict(current_yaml) + + for k, v in cdict.items(): + if k == "description" and not v: + merged.pop("description", None) + else: + merged[k] = v + + if not merged.get("description"): + merged.pop("description", None) + if merged.get("tags") == []: + merged.pop("tags", None) + if merged.get("meta") == {}: + merged.pop("meta", None) + + for k in list(merged.keys()): + if not merged[k]: + merged.pop(k) + + incoming_columns.append(merged) + + doc_section["columns"] = incoming_columns + + +def sync_node_to_yaml(context: YamlRefactorContext, node: ResultNode | None = None) -> None: + """Synchronize a single node's columns, description, tags, meta, etc. from the manifest into its corresponding YAML file. + + We assume the manifest node is the single source of truth, so the YAML file is overwritten to match. + + - If the YAML file doesn't exist yet, we create it with minimal structure. + - If the YAML file exists, we read it from the file/ cache, locate the node's section, + and then overwrite that section to match the node's current columns, meta, etc. + + This is a one-way sync: + Manifest Node => YAML + + All changes to the Node (columns, metadata, etc.) should happen before calling this function. + """ + if node is None: + for _, node in filter_models(context): + sync_node_to_yaml(context, node) + return + + current_path = get_current_yaml_path(context, node) + if not current_path or not current_path.exists(): + current_path = get_target_yaml_path(context, node) + + doc: dict[str, t.Any] = _read_yaml(context, current_path) + if not doc: + doc = {"version": 2} + + doc.setdefault("models", []) + doc.setdefault("sources", []) + doc.setdefault("seeds", []) + + if node.resource_type == NodeType.Source: + sync_list_key = "sources" + elif node.resource_type == NodeType.Seed: + sync_list_key = "seeds" + else: + sync_list_key = "models" + + if node.resource_type == NodeType.Source: + # The doc structure => sources: [ { "name": , "tables": [...]}, ... ] + # Step A: find or create the source + doc_source: dict[str, t.Any] | None = None + for s in doc["sources"]: + if s.get("name") == node.source_name: + doc_source = s + break + if not doc_source: + doc_source = { + "name": node.source_name, + "tables": [], + } + doc["sources"].append(doc_source) + + # Step B: find or create the table + doc_table: dict[str, t.Any] | None = None + for t_ in doc_source["tables"]: + if t_.get("name") == node.name: + doc_table = t_ + break + if not doc_table: + doc_table = { + "name": node.name, + "columns": [], + } + doc_source["tables"].append(doc_table) + + # We'll store the columns & description on "doc_table" + # For source, "description" is stored at table-level in the Node + _sync_doc_section(context, node, doc_table) + + else: + # Models or Seeds => doc[ "models" ] or doc[ "seeds" ] is a list of { "name", "description", "columns", ... } + doc_list = doc[sync_list_key] + doc_obj: dict[str, t.Any] | None = None + for item in doc_list: + if item.get("name") == node.name: + doc_obj = item + break + if not doc_obj: + doc_obj = { + "name": node.name, + "columns": [], + } + doc_list.append(doc_obj) + + _sync_doc_section(context, node, doc_obj) + + _write_yaml(context, current_path, doc) + + def apply_restructure_plan( context: YamlRefactorContext, plan: RestructureDeltaPlan, *, confirm: bool = False ) -> None: @@ -1059,8 +1201,10 @@ def _build_node_ancestor_tree( return tree -def _get_member_yaml(context: YamlRefactorContext, member: ResultNode) -> dict[str, t.Any] | None: - """Get the parsed YAML for a dbt model or source node.""" +def _get_node_yaml( + context: YamlRefactorContext, member: ResultNode +) -> MappingProxyType[str, t.Any] | None: + """Get a read-only view of the parsed YAML for a dbt model or source node.""" project_dir = Path(context.project.config.project_root) if isinstance(member, SourceDefinition): @@ -1070,7 +1214,9 @@ def _get_member_yaml(context: YamlRefactorContext, member: ResultNode) -> dict[s sources = t.cast(list[dict[str, t.Any]], _read_yaml(context, path).get("sources", [])) source = _find_first(sources, lambda s: s["name"] == member.source_name, {}) tables = source.get("tables", []) - return _find_first(tables, lambda tbl: tbl["name"] == member.name) + maybe_doc = _find_first(tables, lambda tbl: tbl["name"] == member.name) + if maybe_doc is not None: + return MappingProxyType(maybe_doc) elif isinstance(member, (ModelNode, SeedNode)): if not member.patch_path: @@ -1078,7 +1224,9 @@ def _get_member_yaml(context: YamlRefactorContext, member: ResultNode) -> dict[s path = project_dir.joinpath(member.patch_path.split("://")[-1]) section = f"{member.resource_type}s" models = t.cast(list[dict[str, t.Any]], _read_yaml(context, path).get(section, [])) - return _find_first(models, lambda model: model["name"] == member.name) + maybe_doc = _find_first(models, lambda model: model["name"] == member.name) + if maybe_doc is not None: + return MappingProxyType(maybe_doc) return None @@ -1122,7 +1270,7 @@ def _build_column_knowledge_graph( ) if context.settings.use_unrendered_descriptions: - raw_yaml = _get_member_yaml(context, ancestor) or {} + raw_yaml = _get_node_yaml(context, ancestor) or {} raw_columns = t.cast(list[dict[str, t.Any]], raw_yaml.get("columns", [])) raw_column_metadata = _find_first( raw_columns, @@ -1183,7 +1331,6 @@ def inherit_upstream_column_knowledge( if extra not in inheritable: inheritable.append(extra) - yaml_section = _get_member_yaml(context, node) column_knowledge_graph = _build_column_knowledge_graph(context, node) kwargs = None for name, node_column in node.columns.items(): @@ -1194,15 +1341,6 @@ def inherit_upstream_column_knowledge( updated_metadata = {k: v for k, v in kwargs.items() if v is not None and k in inheritable} node.columns[name] = node_column.replace(**updated_metadata) - if not yaml_section: - continue - for column in yaml_section.get("columns", []): - yaml_name = normalize_column_name( - column["name"], context.project.config.credentials.type - ) - if yaml_name == name: - column.update(**updated_metadata) - def inject_missing_columns(context: YamlRefactorContext, node: ResultNode | None = None) -> None: """Add missing columns to a dbt node and it's corresponding yaml section. Changes are implicitly buffered until commit_yamls is called.""" @@ -1212,12 +1350,9 @@ def inject_missing_columns(context: YamlRefactorContext, node: ResultNode | None for _, node in filter_models(context): inject_missing_columns(context, node) return - yaml_section = _get_member_yaml(context, node) - if yaml_section is None: - return current_columns = { - normalize_column_name(c["name"], context.project.config.credentials.type) - for c in yaml_section.get("columns", []) + normalize_column_name(c.name, context.project.config.credentials.type) + for c in node.columns.values() } incoming_columns = get_columns(context, get_table_ref(node)) for incoming_name, incoming_meta in incoming_columns.items(): @@ -1229,7 +1364,6 @@ def inject_missing_columns(context: YamlRefactorContext, node: ResultNode | None if dtype := incoming_meta.type: gen_col["data_type"] = dtype.lower() if context.settings.output_to_lower else dtype node.columns[incoming_name] = ColumnInfo.from_dict(gen_col) - yaml_section.setdefault("columns", []).append(gen_col) def remove_columns_not_in_database( @@ -1240,21 +1374,15 @@ def remove_columns_not_in_database( for _, node in filter_models(context): remove_columns_not_in_database(context, node) return - yaml_section = _get_member_yaml(context, node) - if yaml_section is None: - return current_columns = { - normalize_column_name(c["name"], context.project.config.credentials.type) - for c in yaml_section.get("columns", []) + normalize_column_name(c.name, context.project.config.credentials.type) + for c in node.columns.values() } incoming_columns = get_columns(context, get_table_ref(node)) extra_columns = current_columns - set(incoming_columns.keys()) for extra_column in extra_columns: logger.info(f"Detected and removing extra column {extra_column} in node {node.unique_id}") _ = node.columns.pop(extra_column, None) - yaml_section["columns"] = [ - c for c in yaml_section.get("columns", []) if c["name"] != extra_column - ] def sort_columns_as_in_database( @@ -1265,9 +1393,6 @@ def sort_columns_as_in_database( for _, node in filter_models(context): sort_columns_as_in_database(context, node) return - yaml_section = _get_member_yaml(context, node) - if yaml_section is None: - return incoming_columns = get_columns(context, get_table_ref(node)) def _position(column: dict[str, t.Any]): @@ -1276,7 +1401,6 @@ def _position(column: dict[str, t.Any]): return 99999 return db_info.index - t.cast(list[dict[str, t.Any]], yaml_section["columns"]).sort(key=_position) node.columns = { k: v for k, v in sorted(node.columns.items(), key=lambda i: _position(i[1].to_dict())) } @@ -1291,10 +1415,6 @@ def sort_columns_alphabetically( for _, node in filter_models(context): sort_columns_alphabetically(context, node) return - yaml_section = _get_member_yaml(context, node) - if yaml_section is None: - return - t.cast(list[dict[str, t.Any]], yaml_section["columns"]).sort(key=lambda c: c["name"]) node.columns = {k: v for k, v in sorted(node.columns.items(), key=lambda i: i[0])} context.register_mutations(1) @@ -1378,7 +1498,8 @@ def run_example_compilation_flow(c: DbtConfiguration) -> None: c = DbtConfiguration( project_dir="demo_duckdb", profiles_dir="demo_duckdb", vars={"dbt-osmosis": {}} ) - run_example_compilation_flow(c) + + # run_example_compilation_flow(c) project = create_dbt_project_context(c) yaml_context = YamlRefactorContext( @@ -1393,6 +1514,7 @@ def run_example_compilation_flow(c: DbtConfiguration) -> None: (remove_columns_not_in_database, (yaml_context,), {}), (inherit_upstream_column_knowledge, (yaml_context,), {}), (sort_columns_as_in_database, (yaml_context,), {}), + (sync_node_to_yaml, (yaml_context,), {}), (commit_yamls, (yaml_context,), {}), ) steps = iter(t.cast(t.Any, steps)) From e58cebdfeb20cb7eb8877f2d2fceb8605a8c450d Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Wed, 1 Jan 2025 17:09:00 -0700 Subject: [PATCH 38/46] chore: call sync node in relevant tests --- demo_duckdb/models/staging/schema.yml | 32 ++++++++--------- .../test_column_level_knowledge_propagator.py | 36 +++++++++---------- 2 files changed, 34 insertions(+), 34 deletions(-) diff --git a/demo_duckdb/models/staging/schema.yml b/demo_duckdb/models/staging/schema.yml index 8dfb9c80..83705596 100644 --- a/demo_duckdb/models/staging/schema.yml +++ b/demo_duckdb/models/staging/schema.yml @@ -12,22 +12,6 @@ models: data_type: VARCHAR - name: last_name data_type: VARCHAR - - name: stg_payments - columns: - - name: payment_id - tests: - - unique - - not_null - data_type: INTEGER - - name: order_id - data_type: INTEGER - - name: payment_method - tests: - - accepted_values: - values: ['credit_card', 'coupon', 'bank_transfer', 'gift_card'] - data_type: VARCHAR - - name: amount - data_type: DOUBLE - name: stg_orders columns: - name: order_id @@ -46,5 +30,21 @@ models: data_type: VARCHAR description: '{{ doc("orders_status") }}' + - name: stg_payments + columns: + - name: payment_id + tests: + - unique + - not_null + data_type: INTEGER + - name: order_id + data_type: INTEGER + - name: payment_method + tests: + - accepted_values: + values: ['credit_card', 'coupon', 'bank_transfer', 'gift_card'] + data_type: VARCHAR + - name: amount + data_type: DOUBLE sources: [] seeds: [] diff --git a/tests/test_column_level_knowledge_propagator.py b/tests/test_column_level_knowledge_propagator.py index fe4d02eb..f7bd1d8d 100644 --- a/tests/test_column_level_knowledge_propagator.py +++ b/tests/test_column_level_knowledge_propagator.py @@ -14,9 +14,10 @@ YamlRefactorContext, YamlRefactorSettings, _build_node_ancestor_tree, - _get_member_yaml, + _get_node_yaml, create_dbt_project_context, inherit_upstream_column_knowledge, + sync_node_to_yaml, ) dbt_version = Version(dbt.version.get_installed_version().to_version_string(skip_matcher=True)) @@ -195,7 +196,8 @@ def test_inherit_upstream_column_knowledge_with_mutations(yaml_context: YamlRefa mock.patch("dbt_osmosis.core.osmosis._COLUMN_LIST_CACHE", {}), ): inherit_upstream_column_knowledge(yaml_context, target_node) - yaml_file_model_section = _get_member_yaml(yaml_context, target_node) + sync_node_to_yaml(yaml_context, target_node) + yaml_file_model_section = _get_node_yaml(yaml_context, target_node) target_node_customer_id = target_node.columns["customer_id"] assert target_node_customer_id.description == "THIS COLUMN IS UPDATED FOR TESTING" @@ -254,7 +256,8 @@ def test_inherit_upstream_column_knowledge_skip_add_tags(yaml_context: YamlRefac mock.patch("dbt_osmosis.core.osmosis._COLUMN_LIST_CACHE", {}), ): inherit_upstream_column_knowledge(yaml_context, target_node) - yaml_file_model_section = _get_member_yaml(yaml_context, target_node) + sync_node_to_yaml(yaml_context, target_node) + yaml_file_model_section = _get_node_yaml(yaml_context, target_node) target_node_customer_id = target_node.columns["customer_id"] assert target_node_customer_id.description == "THIS COLUMN IS UPDATED FOR TESTING" @@ -269,11 +272,7 @@ def test_inherit_upstream_column_knowledge_skip_add_tags(yaml_context: YamlRefac yaml_file_model_section["columns"][0]["description"] == "THIS COLUMN IS UPDATED FOR TESTING" ) assert yaml_file_model_section["columns"][0]["meta"] == {"my_key": "my_value"} - # TODO: consider a function which synchronizes a node with its yaml buffer, and then consider if inherit_upstream_column_knowledge should sync nodes - # in which case it would pick up manual mutations to the node and apply them to the yaml buffer (which could be useful I think) - assert ( - yaml_file_model_section["columns"][0].get("tags", []) == [] - ) # NOTE: yaml tags do not exist in buffer because we added them artificially to the node and skip_add_tags is True + assert yaml_file_model_section["columns"][0]["tags"] == ["my_tag3", "my_tag4"] def test_update_undocumented_columns_with_prior_knowledge_skip_merge_meta( @@ -300,7 +299,8 @@ def test_update_undocumented_columns_with_prior_knowledge_skip_merge_meta( mock.patch("dbt_osmosis.core.osmosis._COLUMN_LIST_CACHE", {}), ): inherit_upstream_column_knowledge(yaml_context, target_node) - yaml_file_model_section = _get_member_yaml(yaml_context, target_node) + sync_node_to_yaml(yaml_context, target_node) + yaml_file_model_section = _get_node_yaml(yaml_context, target_node) assert target_node_columns["customer_id"].description == "THIS COLUMN IS UPDATED FOR TESTING" assert ( @@ -318,11 +318,7 @@ def test_update_undocumented_columns_with_prior_knowledge_skip_merge_meta( assert ( yaml_file_model_section["columns"][0]["description"] == "THIS COLUMN IS UPDATED FOR TESTING" ) - # TODO: consider a function which synchronizes a node with its yaml buffer, and then consider if inherit_upstream_column_knowledge should sync nodes - # in which case it would pick up manual mutations to the node and apply them to the yaml buffer (which could be useful I think) - assert ( - yaml_file_model_section["columns"][0].get("meta", {}) == {} - ) # NOTE: yaml meta does not exist in buffer because we added it artificially to the node and skip_merge_meta is True + assert yaml_file_model_section["columns"][0]["meta"] == {"my_key": "my_value"} assert sorted(yaml_file_model_section["columns"][0]["tags"]) == [ "my_tag1", "my_tag2", @@ -357,7 +353,8 @@ def test_update_undocumented_columns_with_prior_knowledge_add_progenitor_to_meta mock.patch("dbt_osmosis.core.osmosis._COLUMN_LIST_CACHE", {}), ): inherit_upstream_column_knowledge(yaml_context, target_node) - yaml_file_model_section = _get_member_yaml(yaml_context, target_node) + sync_node_to_yaml(yaml_context, target_node) + yaml_file_model_section = _get_node_yaml(yaml_context, target_node) # 4) Validate the Node cid = target_node.columns["customer_id"] @@ -423,7 +420,8 @@ def test_update_undocumented_columns_with_prior_knowledge_with_osmosis_keep_desc mock.patch("dbt_osmosis.core.osmosis._COLUMN_LIST_CACHE", {}), ): inherit_upstream_column_knowledge(yaml_context, target_node) - yaml_file_model_section = _get_member_yaml(yaml_context, target_node) + sync_node_to_yaml(yaml_context, target_node) + yaml_file_model_section = _get_node_yaml(yaml_context, target_node) # 4) Assert Node cid = target_node.columns["customer_id"] @@ -477,7 +475,8 @@ def test_update_undocumented_columns_with_prior_knowledge_add_progenitor_to_meta mock.patch("dbt_osmosis.core.osmosis._COLUMN_LIST_CACHE", {}), ): inherit_upstream_column_knowledge(yaml_context, target_node) - model_section = _get_member_yaml(yaml_context, target_node) + sync_node_to_yaml(yaml_context, target_node) + model_section = _get_node_yaml(yaml_context, target_node) # 4) Assert Node cid = target_node.columns["customer_id"] @@ -541,7 +540,8 @@ def test_update_undocumented_columns_with_prior_knowledge_with_add_inheritance_f mock.patch("dbt_osmosis.core.osmosis._COLUMN_LIST_CACHE", {}), ): inherit_upstream_column_knowledge(yaml_context, target_node) - section = _get_member_yaml(yaml_context, target_node) + sync_node_to_yaml(yaml_context, target_node) + section = _get_node_yaml(yaml_context, target_node) # 4) Assert Node cid = target_node.columns["customer_id"] From 199450427483bef5ff1641d44dfdb05be95eb2ce Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Wed, 1 Jan 2025 17:38:48 -0700 Subject: [PATCH 39/46] chore: last bit of test update --- tests/test_column_level_knowledge.py | 43 --------- tests/test_yaml_context.py | 4 +- ...propagator.py => test_yaml_inheritance.py} | 8 -- tests/test_yaml_knowledge_graph.py | 88 +++++++++++++++++++ 4 files changed, 90 insertions(+), 53 deletions(-) delete mode 100644 tests/test_column_level_knowledge.py rename tests/{test_column_level_knowledge_propagator.py => test_yaml_inheritance.py} (99%) create mode 100644 tests/test_yaml_knowledge_graph.py diff --git a/tests/test_column_level_knowledge.py b/tests/test_column_level_knowledge.py deleted file mode 100644 index 359ab018..00000000 --- a/tests/test_column_level_knowledge.py +++ /dev/null @@ -1,43 +0,0 @@ -def test_noop(): - pass - - -# TODO: refactor this test -# from dbt_osmosis.core.column_level_knowledge import get_prior_knowledge -# -# -# class TestDbtYamlManager: -# def test_get_prior_knowledge(test): -# knowledge = { -# "myColumn": { -# "progenitor": "source.my_model.source.Order", -# "generation": "generation_0", -# "name": "my_column", -# }, -# "my_column": { -# "progenitor": "model.my_model.mart.Order", -# "generation": "generation_0", -# "name": "my_column", -# }, -# } -# assert ( -# get_prior_knowledge(knowledge, "my_column")["progenitor"] -# == "source.my_model.source.Order" -# ) -# -# def test_get_prior_knowledge_with_camel_case(test): -# knowledge = { -# "myColumn": { -# "progenitor": "model.my_model.dwh.Order", -# "generation": "generation_1", -# "name": "myColumn", -# }, -# "my_column": { -# "progenitor": "model.my_model.mart.Order", -# "generation": "generation_0", -# "name": "my_column", -# }, -# } -# assert ( -# get_prior_knowledge(knowledge, "my_column")["progenitor"] == "model.my_model.dwh.Order" -# ) diff --git a/tests/test_yaml_context.py b/tests/test_yaml_context.py index 857918e2..eb582b90 100644 --- a/tests/test_yaml_context.py +++ b/tests/test_yaml_context.py @@ -60,8 +60,8 @@ def _customer_column_types(yaml_context: YamlRefactorContext) -> dict[str, str]: node = next(n for n in yaml_context.project.manifest.nodes.values() if n.name == "customers") assert node - catalog_key = get_table_ref(node) - columns = get_columns(yaml_context, catalog_key) + ref = get_table_ref(node) + columns = get_columns(yaml_context, ref) assert columns column_types = dict({name: meta.type for name, meta in columns.items()}) diff --git a/tests/test_column_level_knowledge_propagator.py b/tests/test_yaml_inheritance.py similarity index 99% rename from tests/test_column_level_knowledge_propagator.py rename to tests/test_yaml_inheritance.py index f7bd1d8d..4b9c38e3 100644 --- a/tests/test_column_level_knowledge_propagator.py +++ b/tests/test_yaml_inheritance.py @@ -80,7 +80,6 @@ def test_inherit_upstream_column_knowledge(yaml_context: YamlRefactorContext): "constraints": [], "quote": None, "tags": [], - "granularity": None, }, "first_name": { "name": "first_name", @@ -90,7 +89,6 @@ def test_inherit_upstream_column_knowledge(yaml_context: YamlRefactorContext): "constraints": [], "quote": None, "tags": [], - "granularity": None, }, "last_name": { "name": "last_name", @@ -100,7 +98,6 @@ def test_inherit_upstream_column_knowledge(yaml_context: YamlRefactorContext): "constraints": [], "quote": None, "tags": [], - "granularity": None, }, "first_order": { "name": "first_order", @@ -110,7 +107,6 @@ def test_inherit_upstream_column_knowledge(yaml_context: YamlRefactorContext): "constraints": [], "quote": None, "tags": [], - "granularity": None, }, "most_recent_order": { "name": "most_recent_order", @@ -120,7 +116,6 @@ def test_inherit_upstream_column_knowledge(yaml_context: YamlRefactorContext): "constraints": [], "quote": None, "tags": [], - "granularity": None, }, "number_of_orders": { "name": "number_of_orders", @@ -130,7 +125,6 @@ def test_inherit_upstream_column_knowledge(yaml_context: YamlRefactorContext): "constraints": [], "quote": None, "tags": [], - "granularity": None, }, "customer_lifetime_value": { "name": "customer_lifetime_value", @@ -140,7 +134,6 @@ def test_inherit_upstream_column_knowledge(yaml_context: YamlRefactorContext): "constraints": [], "quote": None, "tags": [], - "granularity": None, }, "customer_average_value": { "name": "customer_average_value", @@ -150,7 +143,6 @@ def test_inherit_upstream_column_knowledge(yaml_context: YamlRefactorContext): "constraints": [], "quote": None, "tags": [], - "granularity": None, }, } if dbt_version >= Version("1.9.0"): diff --git a/tests/test_yaml_knowledge_graph.py b/tests/test_yaml_knowledge_graph.py new file mode 100644 index 00000000..213fc000 --- /dev/null +++ b/tests/test_yaml_knowledge_graph.py @@ -0,0 +1,88 @@ +# pyright: reportPrivateUsage=false +import typing as t + +import pytest + +from dbt_osmosis.core.osmosis import ( + DbtConfiguration, + YamlRefactorContext, + YamlRefactorSettings, + _build_column_knowledge_graph, + create_dbt_project_context, +) + + +@pytest.fixture(scope="module") +def yaml_context() -> YamlRefactorContext: + c = DbtConfiguration(project_dir="demo_duckdb", profiles_dir="demo_duckdb") + c.vars = {"dbt-osmosis": {}} + project = create_dbt_project_context(c) + context = YamlRefactorContext( + project, settings=YamlRefactorSettings(add_progenitor_to_meta=True, dry_run=True) + ) + return context + + +class TestDbtYamlManager: + def test_get_prior_knowledge(self, yaml_context: YamlRefactorContext): + # Progenitor gives us an idea of where the inherited traits will come from + knowledge: dict[str, t.Any] = { + "customer_id": { + "meta": {"osmosis_progenitor": "model.jaffle_shop_duckdb.stg_customers"}, + "name": "customer_id", + "data_type": "INTEGER", + "constraints": [], + "description": "This is a unique identifier for a customer", + }, + "first_name": { + "meta": {"osmosis_progenitor": "seed.jaffle_shop_duckdb.raw_customers"}, + "name": "first_name", + "data_type": "VARCHAR", + "constraints": [], + "description": "Customer's first name. PII.", + }, + "last_name": { + "meta": {"osmosis_progenitor": "seed.jaffle_shop_duckdb.raw_customers"}, + "name": "last_name", + "data_type": "VARCHAR", + "constraints": [], + "description": "Customer's last name. PII.", + }, + "first_order": { + "meta": {"osmosis_progenitor": "model.jaffle_shop_duckdb.customers"}, + "name": "first_order", + "description": "Date (UTC) of a customer's first order", + "constraints": [], + }, + "most_recent_order": { + "meta": {"osmosis_progenitor": "model.jaffle_shop_duckdb.customers"}, + "name": "most_recent_order", + "description": "Date (UTC) of a customer's most recent order", + "constraints": [], + }, + "number_of_orders": { + "meta": {"osmosis_progenitor": "model.jaffle_shop_duckdb.customers"}, + "name": "number_of_orders", + "description": "Count of the number of orders a customer has placed", + "constraints": [], + }, + "customer_lifetime_value": { + "meta": {"osmosis_progenitor": "model.jaffle_shop_duckdb.customers"}, + "name": "customer_lifetime_value", + "data_type": "DOUBLE", + "constraints": [], + }, + "customer_average_value": { + "meta": {"osmosis_progenitor": "model.jaffle_shop_duckdb.customers"}, + "name": "customer_average_value", + "data_type": "DECIMAL(18,3)", + "constraints": [], + }, + } + assert ( + _build_column_knowledge_graph( + yaml_context, + yaml_context.project.manifest.nodes["model.jaffle_shop_duckdb.customers"], + ) + == knowledge + ) From 91c9279401080e30eb6e057eb8082d1455103efc Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Wed, 1 Jan 2025 19:16:03 -0700 Subject: [PATCH 40/46] chore: get workbench to working state at least --- pyproject.toml | 1 + src/dbt_osmosis/core/osmosis.py | 9 +- .../{app_v2.py => workbench/app.py} | 109 +++++++++--------- .../{ => workbench}/components/dashboard.py | 0 .../{ => workbench}/components/editor.py | 0 .../{ => workbench}/components/feed.py | 0 .../{ => workbench}/components/preview.py | 0 .../{ => workbench}/components/profiler.py | 0 .../{ => workbench}/components/renderer.py | 2 +- .../{ => workbench}/requirements.txt | 4 +- uv.lock | 19 ++- 11 files changed, 79 insertions(+), 65 deletions(-) rename src/dbt_osmosis/{app_v2.py => workbench/app.py} (75%) rename src/dbt_osmosis/{ => workbench}/components/dashboard.py (100%) rename src/dbt_osmosis/{ => workbench}/components/editor.py (100%) rename src/dbt_osmosis/{ => workbench}/components/feed.py (100%) rename src/dbt_osmosis/{ => workbench}/components/preview.py (100%) rename src/dbt_osmosis/{ => workbench}/components/profiler.py (100%) rename src/dbt_osmosis/{ => workbench}/components/renderer.py (96%) rename src/dbt_osmosis/{ => workbench}/requirements.txt (64%) diff --git a/pyproject.toml b/pyproject.toml index e37808a5..27ceb193 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,6 +42,7 @@ workbench = [ "ydata-profiling>=3.6.0", "feedparser~=6.0.10", "streamlit-elements-fluence>=0.1.4", + "setuptools~=75.6.0", ] openai = ["openai>0.28.0"] diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index d0bd4440..51975166 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -22,6 +22,7 @@ import dbt.flags as dbt_flags import pluggy import ruamel.yaml +from agate.table import Table # pyright: ignore[reportMissingTypeStubs] from dbt.adapters.base.column import Column as BaseColumn from dbt.adapters.base.impl import BaseAdapter from dbt.adapters.base.relation import BaseRelation @@ -447,7 +448,7 @@ def compile_sql_code(context: DbtProjectContext, raw_sql: str) -> ManifestSQLNod return compiled_node -def execute_sql_code(context: DbtProjectContext, raw_sql: str) -> AdapterResponse: +def execute_sql_code(context: DbtProjectContext, raw_sql: str) -> tuple[AdapterResponse, Table]: """Execute jinja SQL using the context's manifest and adapter.""" if _has_jinja(raw_sql): comp = compile_sql_code(context, raw_sql) @@ -455,8 +456,8 @@ def execute_sql_code(context: DbtProjectContext, raw_sql: str) -> AdapterRespons else: sql_to_exec = raw_sql - resp, _ = context.adapter.execute(sql_to_exec, auto_begin=False, fetch=True) - return resp + resp, table = context.adapter.execute(sql_to_exec, auto_begin=False, fetch=True) + return resp, table # Node filtering @@ -1490,7 +1491,7 @@ def run_example_compilation_flow(c: DbtConfiguration) -> None: node = compile_sql_code(context, "select '{{ 1+1 }}' as col_{{ var('foo') }}") print("Compiled =>", node.compiled_code) - resp = execute_sql_code(context, "select '{{ 1+2 }}' as col_{{ var('foo') }}") + resp, _ = execute_sql_code(context, "select '{{ 1+2 }}' as col_{{ var('foo') }}") print("Resp =>", resp) diff --git a/src/dbt_osmosis/app_v2.py b/src/dbt_osmosis/workbench/app.py similarity index 75% rename from src/dbt_osmosis/app_v2.py rename to src/dbt_osmosis/workbench/app.py index b66dd5df..c2f17c60 100644 --- a/src/dbt_osmosis/app_v2.py +++ b/src/dbt_osmosis/workbench/app.py @@ -1,7 +1,9 @@ +# pyright: reportMissingTypeStubs=false, reportAny=false, reportUnusedCallResult=false, reportUnknownMemberType=false, reportUntypedFunctionDecorator=false import argparse import decimal import os import sys +import typing as t from collections import OrderedDict from datetime import date, datetime from textwrap import dedent @@ -15,18 +17,27 @@ from streamlit import session_state as state from streamlit_elements_fluence import elements, event, sync -from dbt_osmosis.components.dashboard import Dashboard -from dbt_osmosis.components.editor import Editor -from dbt_osmosis.components.editor import Tabs as EditorTabs -from dbt_osmosis.components.feed import RssFeed -from dbt_osmosis.components.preview import Preview -from dbt_osmosis.components.profiler import Profiler -from dbt_osmosis.components.renderer import Renderer -from dbt_osmosis.vendored.dbt_core_interface import ( - DbtProject, - default_profiles_dir, - default_project_dir, +from dbt_osmosis.core.osmosis import ( + DbtConfiguration, + compile_sql_code, + create_dbt_project_context, + discover_profiles_dir, + discover_project_dir, + execute_sql_code, + reload_manifest, ) +from dbt_osmosis.core.osmosis import ( + DbtProjectContext as DbtProject, +) +from dbt_osmosis.workbench.components.dashboard import Dashboard +from dbt_osmosis.workbench.components.editor import Editor +from dbt_osmosis.workbench.components.editor import Tabs as EditorTabs +from dbt_osmosis.workbench.components.feed import RssFeed +from dbt_osmosis.workbench.components.preview import Preview +from dbt_osmosis.workbench.components.profiler import Profiler +from dbt_osmosis.workbench.components.renderer import Renderer + +st.set_page_config(page_title="dbt-osmosis Workbench", page_icon="🌊", layout="wide") default_prompt = ( "-- This is a scratch model\n-- it will not persist if you jump to another model\n-- you can" @@ -98,12 +109,12 @@ def _get_demo_query() -> str: ) -def _parse_args() -> dict: +def _parse_args() -> dict[str, t.Any]: """Parse command line arguments""" try: parser = argparse.ArgumentParser(description="dbt osmosis workbench") - parser.add_argument("--profiles-dir", help="dbt profile directory") - parser.add_argument("--project-dir", help="dbt project directory") + _ = parser.add_argument("--profiles-dir", help="dbt profile directory") + _ = parser.add_argument("--project-dir", help="dbt project directory") args = vars(parser.parse_args(sys.argv[1:])) except Exception: args = {} @@ -113,10 +124,10 @@ def _parse_args() -> dict: def change_target() -> None: """Change the target profile""" ctx: DbtProject = state.w.ctx - if ctx.config.target_name != state.target_profile: - print(f"Changing target to {state.target_profile}") - ctx.base_config.target = state.target_profile - ctx.safe_parse_project(reinit=True) + if ctx.config.target_name != state.w.target_profile: + print(f"Changing target to {state.w.target_profile}") + ctx.config.target_name = state.w.target_profile + reload_manifest(ctx) state.w.raw_sql += " " # invalidate cache on next compile? state.w.cache_version += 1 @@ -125,7 +136,7 @@ def inject_model() -> None: """Inject model into editor""" ctx: DbtProject = state.w.ctx if state.model is not None and state.model != "SCRATCH": - path = os.path.join(ctx.project_root, state.model.original_file_path) + path = os.path.join(ctx.config.project_root, state.model.original_file_path) with open(path, "r") as f: state.w.raw_sql = f.read() state.w.editor.update_content("SQL", state.w.raw_sql) @@ -138,9 +149,9 @@ def save_model() -> None: """Save model to disk""" ctx: DbtProject = state.w.ctx if state.model is not None and state.model != "SCRATCH": - path = os.path.join(ctx.project_root, state.model.original_file_path) + path = os.path.join(ctx.config.project_root, state.model.original_file_path) with open(path, "w") as f: - f.write(state.w.editor.get_content("SQL")) + _ = f.write(state.w.editor.get_content("SQL")) print(f"Saved model to {path}") @@ -148,8 +159,7 @@ def sidebar(ctx: DbtProject) -> None: # Model selector with st.sidebar.expander("💡 Models", expanded=True): st.caption( - "Select a model to use as a starting point for your query. The filter supports" - " typeahead. All changes are ephemeral unless you save the model." + "Select a model to use as a starting point for your query. The filter supports typeahead. All changes are ephemeral unless you save the model." ) state.w.model = st.selectbox( "Select a model", @@ -166,8 +176,7 @@ def sidebar(ctx: DbtProject) -> None: # Profile selector with st.sidebar.expander("💁 Profiles", expanded=True): st.caption( - "Select a profile used for materializing, compiling, and testing models.\n\nIf you" - " change profiles, you may need to modify the workbench query to invalidate the cache." + "Select a profile used for materializing, compiling, and testing models.\n\nIf you change profiles, you may need to modify the workbench query to invalidate the cache." ) state.w.target_profile = st.radio( f"Loaded profiles from {ctx.config.profile_name}", @@ -180,8 +189,7 @@ def sidebar(ctx: DbtProject) -> None: # Query template with st.sidebar.expander("📝 Query Template"): st.caption( - "This is a template query that will be used when executing SQL. The {sql} variable will" - " be replaced with the compiled SQL." + "This is a template query that will be used when executing SQL. The {sql} variable will be replaced with the compiled SQL." ) state.w.sql_template = st.text_area( "SQL Template", @@ -192,22 +200,20 @@ def sidebar(ctx: DbtProject) -> None: # Refresh instructions st.sidebar.write("Notes") st.sidebar.caption( - "Refresh the page to reparse dbt. This is useful if any updated models or macros in your" - " physical project on disk have changed and are not yet reflected in the workbench as" - " refable or updated." + "Refresh the page to reparse dbt. This is useful if any updated models or macros in your physical project on disk have changed and are not yet reflected in the workbench as refable or updated." ) def compile(ctx: DbtProject, sql: str) -> str: """Compile SQL using dbt context.""" try: - with ctx.adapter.connection_named("__sql_workbench__"): - return ctx.compile_code(sql).compiled_code + return compile_sql_code(ctx, sql).compiled_code or "" except Exception as e: return str(e) -def ser(x): +def ser(x: t.Any) -> t.Any: + """Serialize a value for JSON.""" if isinstance(x, decimal.Decimal): return float(x) if isinstance(x, date): @@ -226,28 +232,25 @@ def run_query() -> None: sql = state.w.compiled_sql try: state.w.sql_query_state = "running" - with ctx.adapter.connection_named("__sql_workbench__"): - result = ctx.execute_code(state.w.sql_template.format(sql=sql)) + resp, table = execute_sql_code(ctx, state.w.sql_template.format(sql=sql)) except Exception as error: state.w.sql_query_state = "error" state.w.sql_adapter_resp = str(error) state.w.sql_result_columns = [] else: state.w.sql_query_state = "success" - state.w.sql_adapter_resp = result.adapter_response - output = [ - OrderedDict(zip(result.table.column_names, (ser(v) for v in row))) - for row in result.table.rows - ] + state.w.sql_adapter_resp = resp + output = [OrderedDict(zip(table.column_names, (ser(v) for v in row))) for row in table.rows] # pyright: ignore[reportUnknownVariableType,reportUnknownArgumentType] state.w.sql_result_df = pd.DataFrame(output) state.w.sql_result_columns = [ - {"field": c, "headerName": c.upper()} for c in result.table.column_names + {"field": c, "headerName": c.upper()} for c in t.cast(tuple[str], table.column_names) ] state.w.sql_result_rows = output -@st.cache -def convert_df_to_csv(dataframe: pd.DataFrame) -> bytes: +# TODO: is this used? +@st.cache_data +def convert_df_to_csv(_: pd.DataFrame) -> bytes: """Convert a dataframe to a CSV file.""" return state.w.sql_result_df.to_csv().encode("utf-8") @@ -266,7 +269,7 @@ def convert_profile_report_to_html(profile: ydata_profiling.ProfileReport) -> st return profile.to_html() -def run_profile(minimal: bool = True) -> str: +def run_profile(minimal: bool = True) -> None: """Run a profile report and return the HTML report.""" if not state.w.sql_result_df.empty: state.w.profile_html = convert_profile_report_to_html(build_profile_report(minimal)) @@ -289,8 +292,8 @@ def main(): profiler=Profiler(board, 0, 20, 8, 9, minW=3, minH=3), feed=RssFeed(board, 8, 20, 4, 9, minW=3, minH=3), # Base Args - project_dir=args.get("project_dir") or str(default_project_dir()), - profiles_dir=args.get("profiles_dir") or str(default_profiles_dir()), + project_dir=args.get("project_dir") or discover_project_dir(), + profiles_dir=args.get("profiles_dir") or discover_profiles_dir(), # SQL Editor compiled_sql="", raw_sql="", @@ -320,16 +323,15 @@ def main(): else: w.raw_sql = default_prompt # Initialize dbt context - w.ctx = DbtProject( - project_dir=w.project_dir, - profiles_dir=w.profiles_dir, + w.ctx = create_dbt_project_context( + config=DbtConfiguration(project_dir=w.project_dir, profiles_dir=w.profiles_dir) ) w.target_profile = w.ctx.config.target_name # Demo compilation hook + seed editor w.editor.tabs[EditorTabs.SQL]["content"] = w.raw_sql w.compiled_sql = compile(w.ctx, w.raw_sql) if w.raw_sql else "" # Grab nodes - model_nodes = [] + model_nodes: list[t.Any] = [] for node in w.ctx.manifest.nodes.values(): if node.resource_type == "model" and node.package_name == w.ctx.config.project_name: model_nodes.append(node) @@ -340,7 +342,7 @@ def main(): # Update editor content w.editor.update_content("SQL", w.raw_sql) # Generate RSS feed - feed = feedparser.parse("https://news.ycombinator.com/rss") + feed = t.cast(t.Any, feedparser.parse("https://news.ycombinator.com/rss")) feed_contents = [] for entry in feed.entries: feed_contents.append( @@ -356,7 +358,7 @@ def main(): """ ) ) - w.feed_contents = "".join(feed_contents) + w.feed_contents = "".join(t.cast(list[str], feed_contents)) else: # Load state w = state.w @@ -367,7 +369,7 @@ def main(): sidebar(ctx) # Render Interface - with elements("dashboard"): + with elements("dashboard"): # pyright: ignore[reportGeneralTypeIssues] # Bind hotkeys, maybe one day we can figure out how to override Monaco's cmd+enter binding event.Hotkey("ctrl+enter", sync(), bindInputs=True, overrideDefault=True) event.Hotkey("command+s", sync(), bindInputs=True, overrideDefault=True) @@ -384,5 +386,4 @@ def main(): if __name__ == "__main__": - st.set_page_config(page_title="dbt-osmosis Workbench", page_icon="🌊", layout="wide") main() diff --git a/src/dbt_osmosis/components/dashboard.py b/src/dbt_osmosis/workbench/components/dashboard.py similarity index 100% rename from src/dbt_osmosis/components/dashboard.py rename to src/dbt_osmosis/workbench/components/dashboard.py diff --git a/src/dbt_osmosis/components/editor.py b/src/dbt_osmosis/workbench/components/editor.py similarity index 100% rename from src/dbt_osmosis/components/editor.py rename to src/dbt_osmosis/workbench/components/editor.py diff --git a/src/dbt_osmosis/components/feed.py b/src/dbt_osmosis/workbench/components/feed.py similarity index 100% rename from src/dbt_osmosis/components/feed.py rename to src/dbt_osmosis/workbench/components/feed.py diff --git a/src/dbt_osmosis/components/preview.py b/src/dbt_osmosis/workbench/components/preview.py similarity index 100% rename from src/dbt_osmosis/components/preview.py rename to src/dbt_osmosis/workbench/components/preview.py diff --git a/src/dbt_osmosis/components/profiler.py b/src/dbt_osmosis/workbench/components/profiler.py similarity index 100% rename from src/dbt_osmosis/components/profiler.py rename to src/dbt_osmosis/workbench/components/profiler.py diff --git a/src/dbt_osmosis/components/renderer.py b/src/dbt_osmosis/workbench/components/renderer.py similarity index 96% rename from src/dbt_osmosis/components/renderer.py rename to src/dbt_osmosis/workbench/components/renderer.py index fe805209..52f047b1 100644 --- a/src/dbt_osmosis/components/renderer.py +++ b/src/dbt_osmosis/workbench/components/renderer.py @@ -54,7 +54,7 @@ def __call__( key=( md5( state.w.compiled_sql.encode("utf-8") - + state.target_profile.encode("utf-8") + + state.w.target_profile.encode("utf-8") ).hexdigest() if state.w.compiled_sql else "__empty__" diff --git a/src/dbt_osmosis/requirements.txt b/src/dbt_osmosis/workbench/requirements.txt similarity index 64% rename from src/dbt_osmosis/requirements.txt rename to src/dbt_osmosis/workbench/requirements.txt index ef8ffe8d..d2b1cc71 100644 --- a/src/dbt_osmosis/requirements.txt +++ b/src/dbt_osmosis/workbench/requirements.txt @@ -1,8 +1,8 @@ # STREAMLIT DEMO APP DEPENDENCIES # Database adapter -dbt-duckdb>=1.4.0,<1.5.0 -dbt-sqlite>=1.4.0,<1.5.0 +dbt-duckdb>=1.8.0,<2.0.0 +dbt-sqlite>=1.8.0,<2.0.0 # dbt Osmosis dbt-osmosis[workbench] diff --git a/uv.lock b/uv.lock index c9917abf..0bcf674b 100644 --- a/uv.lock +++ b/uv.lock @@ -18,7 +18,7 @@ dependencies = [ { name = "parsedatetime" }, { name = "python-slugify" }, { name = "pytimeparse" }, - { name = "tzdata", marker = "sys_platform == 'win32'" }, + { name = "tzdata", marker = "platform_system == 'Windows'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/29/77/6f5df1c68bf056f5fdefc60ccc616303c6211e71cd6033c830c12735f605/agate-1.9.1.tar.gz", hash = "sha256:bc60880c2ee59636a2a80cd8603d63f995be64526abf3cbba12f00767bcd5b3d", size = 202303 } wheels = [ @@ -185,7 +185,7 @@ name = "click" version = "8.1.8" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "colorama", marker = "platform_system == 'Windows'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/b9/2e/0090cbf739cee7d23781ad4b89a9894a41538e4fcf4c31dcdd705b78eb8b/click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a", size = 226593 } wheels = [ @@ -474,6 +474,7 @@ sqlite = [ ] workbench = [ { name = "feedparser" }, + { name = "setuptools" }, { name = "streamlit" }, { name = "streamlit-ace" }, { name = "streamlit-elements-fluence" }, @@ -495,6 +496,7 @@ requires-dist = [ { name = "rich", specifier = ">=10" }, { name = "ruamel-yaml", specifier = ">=0.17" }, { name = "ruff", marker = "extra == 'dev'", specifier = "~=0.8.4" }, + { name = "setuptools", marker = "extra == 'workbench'", specifier = "~=75.6.0" }, { name = "streamlit", marker = "extra == 'workbench'", specifier = ">=1.20.0" }, { name = "streamlit-ace", marker = "extra == 'workbench'", specifier = ">=0.1.0" }, { name = "streamlit-elements-fluence", marker = "extra == 'workbench'", specifier = ">=0.1.4" }, @@ -2379,6 +2381,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/83/11/00d3c3dfc25ad54e731d91449895a79e4bf2384dc3ac01809010ba88f6d5/seaborn-0.13.2-py3-none-any.whl", hash = "sha256:636f8336facf092165e27924f223d3c62ca560b1f2bb5dff7ab7fad265361987", size = 294914 }, ] +[[package]] +name = "setuptools" +version = "75.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/54/292f26c208734e9a7f067aea4a7e282c080750c4546559b58e2e45413ca0/setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6", size = 1337429 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/21/47d163f615df1d30c094f6c8bbb353619274edccf0327b185cc2493c2c33/setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d", size = 1224032 }, +] + [[package]] name = "sgmllib3k" version = "1.0.0" @@ -2496,7 +2507,7 @@ dependencies = [ { name = "toml" }, { name = "tornado" }, { name = "typing-extensions" }, - { name = "watchdog", marker = "sys_platform != 'darwin'" }, + { name = "watchdog", marker = "platform_system != 'Darwin'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/78/33/14b5ac0369ecf0af675911e5e84b934e6fcc2cec850857d2390eb373b0a6/streamlit-1.41.1.tar.gz", hash = "sha256:6626d32b098ba1458b71eebdd634c62af2dd876380e59c4b6a1e828a39d62d69", size = 8712473 } wheels = [ @@ -2606,7 +2617,7 @@ name = "tqdm" version = "4.67.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "colorama", marker = "platform_system == 'Windows'" }, ] sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737 } wheels = [ From 210d79b51cfbe64b8827ba4ab8ad7c0fc0ec7fba Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Thu, 2 Jan 2025 00:38:15 -0700 Subject: [PATCH 41/46] feat: add dev utils, key on thread conn for ttl, add raw catalog gen func, clean empty resource keys --- .gitignore | 3 + .pre-commit-config.yaml | 43 +- Makefile | 56 +++ demo_duckdb/dbt_project.yml | 9 +- .../models/jaffle_shop/main/customers.yml | 26 ++ .../models/jaffle_shop/main/orders.yml | 50 +++ .../models/jaffle_shop/main/orders_prefix.yml | 23 ++ demo_duckdb/models/schema.yml | 97 ----- .../jaffle_shop/main/stg_customers.yml | 14 + .../staging/jaffle_shop/main/stg_orders.yml | 20 + .../staging/jaffle_shop/main/stg_payments.yml | 18 + demo_duckdb/models/staging/schema.yml | 50 --- .../seeds/jaffle_shop/main/raw_customers.yml | 10 + .../seeds/jaffle_shop/main/raw_orders.yml | 12 + .../seeds/jaffle_shop/main/raw_payments.yml | 12 + demo_duckdb/seeds/schema.yml | 32 -- pyproject.toml | 26 +- requirements.txt | 47 +-- src/dbt_osmosis/core/llm.py | 6 +- src/dbt_osmosis/core/osmosis.py | 128 ++++-- .../workbench/components/profiler.py | 2 +- uv.lock | 366 +++++++++--------- 22 files changed, 584 insertions(+), 466 deletions(-) create mode 100644 demo_duckdb/models/jaffle_shop/main/customers.yml create mode 100644 demo_duckdb/models/jaffle_shop/main/orders.yml create mode 100644 demo_duckdb/models/jaffle_shop/main/orders_prefix.yml delete mode 100644 demo_duckdb/models/schema.yml create mode 100644 demo_duckdb/models/staging/jaffle_shop/main/stg_customers.yml create mode 100644 demo_duckdb/models/staging/jaffle_shop/main/stg_orders.yml create mode 100644 demo_duckdb/models/staging/jaffle_shop/main/stg_payments.yml delete mode 100644 demo_duckdb/models/staging/schema.yml create mode 100644 demo_duckdb/seeds/jaffle_shop/main/raw_customers.yml create mode 100644 demo_duckdb/seeds/jaffle_shop/main/raw_orders.yml create mode 100644 demo_duckdb/seeds/jaffle_shop/main/raw_payments.yml delete mode 100644 demo_duckdb/seeds/schema.yml diff --git a/.gitignore b/.gitignore index 632f9e80..f6a674de 100644 --- a/.gitignore +++ b/.gitignore @@ -131,3 +131,6 @@ dmypy.json # Nix .devenv + +# Makefile touch target +.uv-installed-* diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9624b53e..c9d74c33 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,13 +1,30 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v4.5.0 hooks: - - id: end-of-file-fixer - - id: trailing-whitespace + - id: check-ast - id: check-json - id: check-yaml + - id: check-toml + - id: end-of-file-fixer + - id: trailing-whitespace - id: detect-private-key - id: debug-statements + - repo: https://github.com/commitizen-tools/commitizen + rev: v3.13.0 + hooks: + - id: commitizen + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.8.0 + hooks: + - id: ruff-format + args: ["--preview"] + - id: ruff + args: [--fix] + - repo: https://github.com/gitleaks/gitleaks + rev: v8.18.2 + hooks: + - id: gitleaks - repo: https://github.com/rhysd/actionlint rev: v1.7.1 hooks: @@ -16,18 +33,16 @@ repos: rev: v0.10.0.1 hooks: - id: shellcheck - # TODO: replace with ruff - # - repo: https://github.com/psf/black - # rev: 24.4.2 - # hooks: - # - id: black - # args: ["--config", "pyproject.toml"] - # - repo: https://github.com/pycqa/isort - # rev: 5.13.2 - # hooks: - # - id: isort - # args: ["-sp", "pyproject.toml"] - repo: https://github.com/hadolint/hadolint rev: v2.13.0-beta hooks: - id: hadolint-docker + - repo: local + hooks: + - id: uv-export-prod + name: uv-export-prod + entry: uv export -o requirements.txt --no-hashes --frozen + language: system + types: [file] + files: pyproject.toml|uv.lock + pass_filenames: false diff --git a/Makefile b/Makefile index e69de29b..4b36f579 100644 --- a/Makefile +++ b/Makefile @@ -0,0 +1,56 @@ +# Author: Alex B +# Description: Makefile for setting up the development environment. Includes convenient aliases for common tasks. + +PY_VERSION := $(shell cat .python-version) + +.PHONY: uv venv pre-commit dev all test lint format scan scan-new-baseline \ + scan-without-baseline clean + +check: format lint test + +.uv-installed-$(PY_VERSION): .python-version + @if [ ! -f .python-version ]; then echo 'Please create a .python-version file with the desired Python version'; exit 1; fi + @if command -v uv > /dev/null; then echo 'Verified uv is installed'; else echo 'Please install uv by running `curl -LsSf https://astral.sh/uv/install.sh | sh` or visit https://docs.astral.sh/uv/ for more information'; exit 1; fi + @uv tool update-shell + @uv python install + @rm -f .uv-installed-* + @touch .uv-installed-$(PY_VERSION) + + +uv: .uv-installed-$(PY_VERSION) + +.venv: .uv-installed-$(PY_VERSION) + @uv venv .venv + +venv: .venv + +.git/hooks/pre-commit: .uv-installed-$(PY_VERSION) + @uv tool install pre-commit + @uv tool run pre-commit install + +pre-commit: .git/hooks/pre-commit + +dev: .venv .git/hooks/pre-commit + @uv sync --extra=dev --extra=duckdb + +clean: + @rm -rf .venv target demo_duckdb/target demo_sqlite/target + +lint: .uv-installed-$(PY_VERSION) + @uvx ruff check + +format: .uv-installed-$(PY_VERSION) + @uvx ruff check --fix --select I + @uvx ruff format --preview + +test: .uv-installed-$(PY_VERSION) + @uv run pytest tests/ + +scan: .uv-installed-$(PY_VERSION) + @uvx bandit -r src -b tests/bandit_baseline.json + +scan-new-baseline: .uv-installed-$(PY_VERSION) + @uvx bandit -r src -f json -o tests/bandit_baseline.json + +scan-without-baseline: .uv-installed-$(PY_VERSION) + @uvx bandit -r src diff --git a/demo_duckdb/dbt_project.yml b/demo_duckdb/dbt_project.yml index 6a30ba5a..f48f7376 100644 --- a/demo_duckdb/dbt_project.yml +++ b/demo_duckdb/dbt_project.yml @@ -20,11 +20,14 @@ clean-targets: require-dbt-version: [">=1.0.0", "<2.0.0"] models: - +dbt-osmosis: schema.yml - jaffle_shop: + jaffle_shop_duckdb: + +dbt-osmosis: "{node.database}/{node.schema}/{node.name}.yml" materialized: table staging: materialized: view seeds: - +dbt-osmosis: schema.yml + jaffle_shop_duckdb: + +dbt-osmosis: "{node.database}/{node.schema}/{node.name}.yml" + +flags: {} diff --git a/demo_duckdb/models/jaffle_shop/main/customers.yml b/demo_duckdb/models/jaffle_shop/main/customers.yml new file mode 100644 index 00000000..2fce7fbd --- /dev/null +++ b/demo_duckdb/models/jaffle_shop/main/customers.yml @@ -0,0 +1,26 @@ +version: 2 +models: + - name: customers + description: This table has basic information about a customer, as well as some derived facts based on a customer's orders + + columns: + - name: customer_id + description: This is a unique identifier for a customer + tests: + - unique + - not_null + + - name: first_name + description: Customer's first name. PII. + - name: last_name + description: Customer's last name. PII. + - name: first_order + description: Date (UTC) of a customer's first order + - name: most_recent_order + description: Date (UTC) of a customer's most recent order + - name: number_of_orders + description: Count of the number of orders a customer has placed + - name: customer_lifetime_value + data_type: DOUBLE + - name: customer_average_value + data_type: DECIMAL(18,3) diff --git a/demo_duckdb/models/jaffle_shop/main/orders.yml b/demo_duckdb/models/jaffle_shop/main/orders.yml new file mode 100644 index 00000000..aa759f22 --- /dev/null +++ b/demo_duckdb/models/jaffle_shop/main/orders.yml @@ -0,0 +1,50 @@ +version: 2 +models: + - name: orders + description: This table has basic information about orders, as well as some derived facts based on payments + + columns: + - name: order_id + tests: + - unique + - not_null + description: This is a unique identifier for an order + - name: customer_id + description: Foreign key to the customers table + tests: + - not_null + - relationships: + to: ref('customers') + field: customer_id + + - name: order_date + description: Date (UTC) that the order was placed + - name: status + tests: + - accepted_values: + values: ['placed', 'shipped', 'completed', 'return_pending', 'returned'] + + description: '{{ doc("orders_status") }}' + - name: credit_card_amount + description: Amount of the order (AUD) paid for by credit card + tests: + - not_null + + - name: coupon_amount + description: Amount of the order (AUD) paid for by coupon + tests: + - not_null + + - name: bank_transfer_amount + description: Amount of the order (AUD) paid for by bank transfer + tests: + - not_null + + - name: gift_card_amount + description: Amount of the order (AUD) paid for by gift card + tests: + - not_null + - name: amount + description: Total amount (AUD) of the order + tests: + - not_null diff --git a/demo_duckdb/models/jaffle_shop/main/orders_prefix.yml b/demo_duckdb/models/jaffle_shop/main/orders_prefix.yml new file mode 100644 index 00000000..d8c1d1e2 --- /dev/null +++ b/demo_duckdb/models/jaffle_shop/main/orders_prefix.yml @@ -0,0 +1,23 @@ +version: 2 +models: + - name: orders_prefix + columns: + - name: o_order_id + data_type: INTEGER + - name: o_customer_id + data_type: INTEGER + - name: o_order_date + data_type: DATE + - name: o_status + description: '{{ doc("orders_status") }}' + data_type: VARCHAR + - name: o_credit_card_amount + data_type: DOUBLE + - name: o_coupon_amount + data_type: DOUBLE + - name: o_bank_transfer_amount + data_type: DOUBLE + - name: o_gift_card_amount + data_type: DOUBLE + - name: o_amount + data_type: DOUBLE diff --git a/demo_duckdb/models/schema.yml b/demo_duckdb/models/schema.yml deleted file mode 100644 index ee566d18..00000000 --- a/demo_duckdb/models/schema.yml +++ /dev/null @@ -1,97 +0,0 @@ -version: 2 -models: - - name: customers - description: This table has basic information about a customer, as well as some derived facts based on a customer's orders - - columns: - - name: customer_id - description: This is a unique identifier for a customer - tests: - - unique - - not_null - - - name: first_name - description: Customer's first name. PII. - - name: last_name - description: Customer's last name. PII. - - name: first_order - description: Date (UTC) of a customer's first order - - name: most_recent_order - description: Date (UTC) of a customer's most recent order - - name: number_of_orders - description: Count of the number of orders a customer has placed - - name: customer_lifetime_value - data_type: DOUBLE - - name: customer_average_value - data_type: DECIMAL(18,3) - - name: orders_prefix - columns: - - name: o_order_id - data_type: INTEGER - - name: o_customer_id - data_type: INTEGER - - name: o_order_date - data_type: DATE - - name: o_status - description: '{{ doc("orders_status") }}' - data_type: VARCHAR - - name: o_credit_card_amount - data_type: DOUBLE - - name: o_coupon_amount - data_type: DOUBLE - - name: o_bank_transfer_amount - data_type: DOUBLE - - name: o_gift_card_amount - data_type: DOUBLE - - name: o_amount - data_type: DOUBLE - - name: orders - description: This table has basic information about orders, as well as some derived facts based on payments - - columns: - - name: order_id - tests: - - unique - - not_null - description: This is a unique identifier for an order - - name: customer_id - description: Foreign key to the customers table - tests: - - not_null - - relationships: - to: ref('customers') - field: customer_id - - - name: order_date - description: Date (UTC) that the order was placed - - name: status - tests: - - accepted_values: - values: ['placed', 'shipped', 'completed', 'return_pending', 'returned'] - - description: '{{ doc("orders_status") }}' - - name: credit_card_amount - description: Amount of the order (AUD) paid for by credit card - tests: - - not_null - - - name: coupon_amount - description: Amount of the order (AUD) paid for by coupon - tests: - - not_null - - - name: bank_transfer_amount - description: Amount of the order (AUD) paid for by bank transfer - tests: - - not_null - - - name: gift_card_amount - description: Amount of the order (AUD) paid for by gift card - tests: - - not_null - - name: amount - description: Total amount (AUD) of the order - tests: - - not_null -sources: [] -seeds: [] diff --git a/demo_duckdb/models/staging/jaffle_shop/main/stg_customers.yml b/demo_duckdb/models/staging/jaffle_shop/main/stg_customers.yml new file mode 100644 index 00000000..7614d47c --- /dev/null +++ b/demo_duckdb/models/staging/jaffle_shop/main/stg_customers.yml @@ -0,0 +1,14 @@ +version: 2 +models: + - name: stg_customers + columns: + - name: customer_id + tests: + - unique + - not_null + + data_type: INTEGER + - name: first_name + data_type: VARCHAR + - name: last_name + data_type: VARCHAR diff --git a/demo_duckdb/models/staging/jaffle_shop/main/stg_orders.yml b/demo_duckdb/models/staging/jaffle_shop/main/stg_orders.yml new file mode 100644 index 00000000..f70d2d19 --- /dev/null +++ b/demo_duckdb/models/staging/jaffle_shop/main/stg_orders.yml @@ -0,0 +1,20 @@ +version: 2 +models: + - name: stg_orders + columns: + - name: order_id + tests: + - unique + - not_null + data_type: INTEGER + - name: customer_id + data_type: INTEGER + - name: order_date + data_type: DATE + - name: status + tests: + - accepted_values: + values: ['placed', 'shipped', 'completed', 'return_pending', 'returned'] + + data_type: VARCHAR + description: '{{ doc("orders_status") }}' diff --git a/demo_duckdb/models/staging/jaffle_shop/main/stg_payments.yml b/demo_duckdb/models/staging/jaffle_shop/main/stg_payments.yml new file mode 100644 index 00000000..2004386f --- /dev/null +++ b/demo_duckdb/models/staging/jaffle_shop/main/stg_payments.yml @@ -0,0 +1,18 @@ +version: 2 +models: + - name: stg_payments + columns: + - name: payment_id + tests: + - unique + - not_null + data_type: INTEGER + - name: order_id + data_type: INTEGER + - name: payment_method + tests: + - accepted_values: + values: ['credit_card', 'coupon', 'bank_transfer', 'gift_card'] + data_type: VARCHAR + - name: amount + data_type: DOUBLE diff --git a/demo_duckdb/models/staging/schema.yml b/demo_duckdb/models/staging/schema.yml deleted file mode 100644 index 83705596..00000000 --- a/demo_duckdb/models/staging/schema.yml +++ /dev/null @@ -1,50 +0,0 @@ -version: 2 -models: - - name: stg_customers - columns: - - name: customer_id - tests: - - unique - - not_null - - data_type: INTEGER - - name: first_name - data_type: VARCHAR - - name: last_name - data_type: VARCHAR - - name: stg_orders - columns: - - name: order_id - tests: - - unique - - not_null - data_type: INTEGER - - name: customer_id - data_type: INTEGER - - name: order_date - data_type: DATE - - name: status - tests: - - accepted_values: - values: ['placed', 'shipped', 'completed', 'return_pending', 'returned'] - - data_type: VARCHAR - description: '{{ doc("orders_status") }}' - - name: stg_payments - columns: - - name: payment_id - tests: - - unique - - not_null - data_type: INTEGER - - name: order_id - data_type: INTEGER - - name: payment_method - tests: - - accepted_values: - values: ['credit_card', 'coupon', 'bank_transfer', 'gift_card'] - data_type: VARCHAR - - name: amount - data_type: DOUBLE -sources: [] -seeds: [] diff --git a/demo_duckdb/seeds/jaffle_shop/main/raw_customers.yml b/demo_duckdb/seeds/jaffle_shop/main/raw_customers.yml new file mode 100644 index 00000000..63c629ab --- /dev/null +++ b/demo_duckdb/seeds/jaffle_shop/main/raw_customers.yml @@ -0,0 +1,10 @@ +version: 2 +seeds: + - name: raw_customers + columns: + - name: id + data_type: INTEGER + - name: first_name + data_type: VARCHAR + - name: last_name + data_type: VARCHAR diff --git a/demo_duckdb/seeds/jaffle_shop/main/raw_orders.yml b/demo_duckdb/seeds/jaffle_shop/main/raw_orders.yml new file mode 100644 index 00000000..e0da702c --- /dev/null +++ b/demo_duckdb/seeds/jaffle_shop/main/raw_orders.yml @@ -0,0 +1,12 @@ +version: 2 +seeds: + - name: raw_orders + columns: + - name: id + data_type: INTEGER + - name: user_id + data_type: INTEGER + - name: order_date + data_type: DATE + - name: status + description: '{{ doc("orders_status") }}' diff --git a/demo_duckdb/seeds/jaffle_shop/main/raw_payments.yml b/demo_duckdb/seeds/jaffle_shop/main/raw_payments.yml new file mode 100644 index 00000000..ba81753a --- /dev/null +++ b/demo_duckdb/seeds/jaffle_shop/main/raw_payments.yml @@ -0,0 +1,12 @@ +version: 2 +seeds: + - name: raw_payments + columns: + - name: id + data_type: INTEGER + - name: order_id + data_type: INTEGER + - name: payment_method + data_type: VARCHAR + - name: amount + data_type: INTEGER diff --git a/demo_duckdb/seeds/schema.yml b/demo_duckdb/seeds/schema.yml deleted file mode 100644 index 44317156..00000000 --- a/demo_duckdb/seeds/schema.yml +++ /dev/null @@ -1,32 +0,0 @@ -version: 2 -models: [] -sources: [] -seeds: - - name: raw_customers - columns: - - name: id - data_type: INTEGER - - name: first_name - data_type: VARCHAR - - name: last_name - data_type: VARCHAR - - name: raw_orders - columns: - - name: id - data_type: INTEGER - - name: user_id - data_type: INTEGER - - name: order_date - data_type: DATE - - name: status - description: '{{ doc("orders_status") }}' - - name: raw_payments - columns: - - name: id - data_type: INTEGER - - name: order_id - data_type: INTEGER - - name: payment_method - data_type: VARCHAR - - name: amount - data_type: INTEGER diff --git a/pyproject.toml b/pyproject.toml index 27ceb193..e181380d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [project] name = "dbt-osmosis" -version = "0.14.0" +version = "1.0.0" description = "A dbt utility for managing YAML to make developing with dbt more delightful." readme = "README.md" license = { text = "Apache-2.0" } @@ -24,25 +24,25 @@ classifiers = [ requires-python = ">=3.9,<3.13" dependencies = [ - "click>7", - "dbt-core>=1.8,<1.10", - "ruamel.yaml>=0.17", + "click>7,<9", + "dbt-core>=1.8,<=1.10", + "ruamel.yaml~=0.18.7", "rich>=10", - "pluggy>=1.5.0", + "pluggy>=1.5.0,<2", ] [project.optional-dependencies] -duckdb = ["dbt-duckdb>=1.0.0"] -sqlite = ["dbt-sqlite>=1.0.0"] -postgres = ["dbt-postgres>=1.0.0"] +duckdb = ["dbt-duckdb>=1.0.0,<2"] +sqlite = ["dbt-sqlite>=1.0.0,<2"] +postgres = ["dbt-postgres>=1.0.0,<2"] workbench = [ - "streamlit>=1.20.0", - "streamlit-ace>=0.1.0", - "ydata-profiling>=3.6.0", - "feedparser~=6.0.10", + "streamlit>=1.20.0,<1.36.0", + "streamlit-ace~=0.1.1", + "ydata-profiling~=4.12.1", + "feedparser~=6.0.11", "streamlit-elements-fluence>=0.1.4", - "setuptools~=75.6.0", + "setuptools>=70", ] openai = ["openai>0.28.0"] diff --git a/requirements.txt b/requirements.txt index b4223742..791415bf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,68 +1,47 @@ # This file was autogenerated by uv via the following command: -# uv export --no-hashes --extra duckdb --extra sqlite --extra dev +# uv export -o requirements.txt --no-hashes --frozen -e . agate==1.9.1 annotated-types==0.7.0 -astroid==3.3.8 attrs==24.3.0 babel==2.16.0 -black==24.10.0 certifi==2024.12.14 -cfgv==3.4.0 charset-normalizer==3.4.1 click==8.1.8 colorama==0.4.6 daff==1.3.46 -dbt-adapters==1.13.0 -dbt-common==1.14.0 -dbt-core==1.9.1 -dbt-duckdb==1.9.1 +dbt-adapters==1.7.0 +dbt-common==1.10.0 +dbt-core==1.8.8 dbt-extractor==0.5.1 -dbt-semantic-interfaces==0.7.4 -dbt-sqlite==1.4.0 +dbt-semantic-interfaces==0.5.1 deepdiff==7.0.1 -dill==0.3.9 -distlib==0.3.9 -duckdb==1.1.3 -exceptiongroup==1.2.2 ; python_full_version < '3.11' -filelock==3.16.1 -gitdb==4.0.11 -gitpython==3.1.43 -identify==2.6.4 idna==3.10 importlib-metadata==6.11.0 -iniconfig==2.0.0 isodate==0.6.1 -isort==5.13.2 jinja2==3.1.5 jsonschema==4.23.0 jsonschema-specifications==2024.10.1 leather==0.4.0 +logbook==1.5.3 markdown-it-py==3.0.0 markupsafe==3.0.2 mashumaro==3.14 -mccabe==0.7.0 mdurl==0.1.2 +minimal-snowplow-tracker==0.0.2 more-itertools==10.5.0 msgpack==1.1.0 -mypy==1.14.1 -mypy-extensions==1.0.0 networkx==3.2.1 ; python_full_version < '3.10' networkx==3.4.2 ; python_full_version >= '3.10' -nodeenv==1.9.1 ordered-set==4.1.0 -packaging==24.2 +packaging==23.2 parsedatetime==2.6 pathspec==0.12.1 -platformdirs==4.3.6 pluggy==1.5.0 -pre-commit==4.0.1 -protobuf==5.29.2 +protobuf==4.25.5 pydantic==2.10.4 pydantic-core==2.27.2 pygments==2.18.0 -pylint==3.3.3 -pytest==7.4.4 python-dateutil==2.9.0.post0 python-slugify==8.0.4 pytimeparse==1.1.8 @@ -75,15 +54,9 @@ rpds-py==0.22.3 ruamel-yaml==0.18.7 ruamel-yaml-clib==0.2.12 ; platform_python_implementation == 'CPython' six==1.17.0 -smmap==5.0.1 -snowplow-tracker==1.0.4 sqlparse==0.5.3 text-unidecode==1.3 -tomli==2.2.1 ; python_full_version < '3.11' -tomlkit==0.13.2 -types-requests==2.32.0.20241016 typing-extensions==4.12.2 -tzdata==2024.2 ; sys_platform == 'win32' +tzdata==2024.2 ; platform_system == 'Windows' urllib3==2.3.0 -virtualenv==20.28.0 zipp==3.21.0 diff --git a/src/dbt_osmosis/core/llm.py b/src/dbt_osmosis/core/llm.py index b6c62405..d37fe13c 100644 --- a/src/dbt_osmosis/core/llm.py +++ b/src/dbt_osmosis/core/llm.py @@ -1,9 +1,7 @@ -import os +# WIP: this will eventually be a class that will handle the LLM process -import openai - -def create_llm_prompt(sql_content, filename): +def create_llm_prompt(sql_content: str, filename: str) -> list[dict[str, str]]: header = """DO NOT ADD A HEADER TO DBT YAML. THIS CODE WILL APPEND TO AN EXISTING YAML FILE. diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index 51975166..fa6a7a4b 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -14,12 +14,15 @@ from collections.abc import Iterable, Iterator from concurrent.futures import FIRST_EXCEPTION, Future, ThreadPoolExecutor, wait from dataclasses import dataclass, field +from datetime import datetime, timezone from functools import lru_cache from itertools import chain from pathlib import Path +from threading import get_ident from types import MappingProxyType import dbt.flags as dbt_flags +import dbt.utils as dbt_utils import pluggy import ruamel.yaml from agate.table import Table # pyright: ignore[reportMissingTypeStubs] @@ -27,6 +30,7 @@ from dbt.adapters.base.impl import BaseAdapter from dbt.adapters.base.relation import BaseRelation from dbt.adapters.contracts.connection import AdapterResponse +from dbt.adapters.contracts.relation import RelationConfig from dbt.adapters.factory import get_adapter, register_adapter from dbt.config.runtime import RuntimeConfig from dbt.context.providers import generate_runtime_macro_context @@ -39,7 +43,7 @@ SeedNode, SourceDefinition, ) -from dbt.contracts.results import CatalogArtifact, ColumnMetadata +from dbt.contracts.results import CatalogArtifact, CatalogResults, ColumnMetadata from dbt.contracts.results import ( CatalogKey as TableRef, ) @@ -47,6 +51,7 @@ from dbt.node_types import NodeType from dbt.parser.manifest import ManifestLoader, process_node from dbt.parser.sql import SqlBlockParser, SqlMacroParser +from dbt.task.docs.generate import Catalog from dbt.task.sql import SqlCompileRunner from dbt.tracking import disable_tracking from dbt_common.clients.system import get_env @@ -120,8 +125,8 @@ def config_to_namespace(cfg: DbtConfiguration) -> argparse.Namespace: return argparse.Namespace( project_dir=cfg.project_dir, profiles_dir=cfg.profiles_dir, - target=cfg.target, - profile=cfg.profile, + target=cfg.target or os.getenv("DBT_TARGET"), + profile=cfg.profile or os.getenv("DBT_PROFILE"), threads=cfg.threads, single_threaded=cfg.single_threaded, vars=cfg.vars, @@ -148,26 +153,36 @@ class DbtProjectContext: manifest: Manifest sql_parser: SqlBlockParser macro_parser: SqlMacroParser - adapter_ttl: float = 3600.0 + connection_ttl: float = 3600.0 _adapter_mutex: threading.Lock = field(default_factory=threading.Lock, init=False) _manifest_mutex: threading.Lock = field(default_factory=threading.Lock, init=False) _adapter: BaseAdapter | None = field(default=None, init=False) - _adapter_created_at: float = field(default=0.0, init=False) + _connection_created_at: dict[int, float] = field(default_factory=dict, init=False) @property - def is_adapter_expired(self) -> bool: + def is_connection_expired(self) -> bool: """Check if the adapter has expired based on the adapter TTL.""" - return time.time() - self._adapter_created_at > self.adapter_ttl + return ( + time.time() - self._connection_created_at.setdefault(get_ident(), 0.0) + > self.connection_ttl + ) @property def adapter(self) -> BaseAdapter: """Get the adapter instance, creating a new one if the current one has expired.""" with self._adapter_mutex: - if not self._adapter or self.is_adapter_expired: - self._adapter = instantiate_adapter(self.config) - self._adapter.set_macro_resolver(self.manifest) - self._adapter_created_at = time.time() + if not self._adapter: + adapter = instantiate_adapter(self.config) + adapter.set_macro_resolver(self.manifest) + _ = adapter.acquire_connection() + self._adapter = adapter + self._connection_created_at[get_ident()] = time.time() + elif self.is_connection_expired: + self._adapter.connections.release() + self._adapter.connections.clear_thread_connection() + _ = self._adapter.acquire_connection() + self._connection_created_at[get_ident()] = time.time() return self._adapter @property @@ -300,8 +315,6 @@ class YamlRefactorSettings: """Filter models to action via a file path match.""" dry_run: bool = False """Do not write changes to disk.""" - catalog_file: str | None = None - """Path to the dbt catalog.json file to use preferentially instead of live warehouse introspection""" skip_add_columns: bool = False """Skip adding missing columns in the yaml files.""" skip_add_tags: bool = False @@ -324,6 +337,10 @@ class YamlRefactorSettings: """Force column name and data type output to lowercase in the yaml files.""" force_inherit_descriptions: bool = False """Force inheritance of descriptions from upstream models, even if node has a valid description.""" + catalog_path: str | None = None + """Path to the dbt catalog.json file to use preferentially instead of live warehouse introspection""" + create_catalog_if_not_exists: bool = False + """Generate the catalog.json for the project if it doesn't exist and use it for introspective queries.""" @dataclass @@ -353,7 +370,7 @@ class YamlRefactorContext: ) _mutation_count: int = field(default=0, init=False) - _catalog: CatalogArtifact | None = field(default=None, init=False) + _catalog: CatalogResults | None = field(default=None, init=False) def register_mutations(self, count: int) -> None: """Increment the mutation count by a specified amount.""" @@ -388,16 +405,13 @@ def skip_patterns(self) -> list[str]: ) return defs.pop(SKIP_PATTERNS, []) - def read_catalog(self) -> CatalogArtifact | None: + def read_catalog(self) -> CatalogResults | None: """Read the catalog file if it exists.""" - if self._catalog: - return self._catalog - if not self.settings.catalog_file: - return None - fp = Path(self.settings.catalog_file) - if not fp.exists(): - return None - self._catalog = CatalogArtifact.from_dict(json.loads(fp.read_text())) + if not self._catalog: + catalog = load_catalog(self.settings) + if not catalog and self.settings.create_catalog_if_not_exists: + catalog = generate_catalog(self.project) + self._catalog = catalog return self._catalog def __post_init__(self) -> None: @@ -405,14 +419,52 @@ def __post_init__(self) -> None: self.placeholders = (EMPTY_STRING, *self.placeholders) -def load_catalog(settings: YamlRefactorSettings) -> CatalogArtifact | None: - """Load the catalog file if it exists and return a CatalogArtifact instance.""" - if not settings.catalog_file: +def load_catalog(settings: YamlRefactorSettings) -> CatalogResults | None: + """Load the catalog file if it exists and return a CatalogResults instance.""" + if not settings.catalog_path: return None - fp = Path(settings.catalog_file) + fp = Path(settings.catalog_path) if not fp.exists(): return None - return CatalogArtifact.from_dict(json.loads(fp.read_text())) + return t.cast(CatalogResults, CatalogArtifact.from_dict(json.loads(fp.read_text()))) # pyright: ignore[reportInvalidCast] + + +# NOTE: this is mostly adapted from dbt-core with some cruft removed, strict pyright is not a fan of dbt's shenanigans +def generate_catalog(context: DbtProjectContext) -> CatalogResults | None: + """Generate the dbt catalog file for the project.""" + catalogable_nodes = chain( + [ + t.cast(RelationConfig, node) # pyright: ignore[reportInvalidCast] + for node in context.manifest.nodes.values() + if node.is_relational and not node.is_ephemeral_model + ], + [t.cast(RelationConfig, node) for node in context.manifest.sources.values()], # pyright: ignore[reportInvalidCast] + ) + table, exceptions = context.adapter.get_filtered_catalog( + catalogable_nodes, + context.manifest.get_used_schemas(), # pyright: ignore[reportArgumentType] + ) + + catalog = Catalog( + [dict(zip(table.column_names, map(dbt_utils._coerce_decimal, row))) for row in table] # pyright: ignore[reportUnknownArgumentType,reportPrivateUsage] + ) + + errors: list[str] | None = None + if exceptions: + errors = [str(e) for e in exceptions] + + nodes, sources = catalog.make_unique_id_map(context.manifest) + artifact = CatalogArtifact.from_results( # pyright: ignore[reportAttributeAccessIssue] + nodes=nodes, + sources=sources, + generated_at=datetime.now(timezone.utc), + compile_results=None, + errors=errors, + ) + artifact.write( # Cache it same as dbt + os.path.join(context.config.project_target_path, "catalog.json") + ) + return t.cast(CatalogResults, artifact) # Basic compile & execute @@ -1038,22 +1090,18 @@ def sync_node_to_yaml(context: YamlRefactorContext, node: ResultNode | None = No if not doc: doc = {"version": 2} - doc.setdefault("models", []) - doc.setdefault("sources", []) - doc.setdefault("seeds", []) - if node.resource_type == NodeType.Source: - sync_list_key = "sources" + resource_k = "sources" elif node.resource_type == NodeType.Seed: - sync_list_key = "seeds" + resource_k = "seeds" else: - sync_list_key = "models" + resource_k = "models" if node.resource_type == NodeType.Source: # The doc structure => sources: [ { "name": , "tables": [...]}, ... ] # Step A: find or create the source doc_source: dict[str, t.Any] | None = None - for s in doc["sources"]: + for s in doc.setdefault(resource_k, []): if s.get("name") == node.source_name: doc_source = s break @@ -1083,7 +1131,7 @@ def sync_node_to_yaml(context: YamlRefactorContext, node: ResultNode | None = No else: # Models or Seeds => doc[ "models" ] or doc[ "seeds" ] is a list of { "name", "description", "columns", ... } - doc_list = doc[sync_list_key] + doc_list = doc.setdefault(resource_k, []) doc_obj: dict[str, t.Any] | None = None for item in doc_list: if item.get("name") == node.name: @@ -1098,6 +1146,10 @@ def sync_node_to_yaml(context: YamlRefactorContext, node: ResultNode | None = No _sync_doc_section(context, node, doc_obj) + for k in ("models", "sources", "seeds"): + if len(doc.get(k, [])) == 0: + _ = doc.pop(k, None) + _write_yaml(context, current_path, doc) @@ -1503,6 +1555,8 @@ def run_example_compilation_flow(c: DbtConfiguration) -> None: # run_example_compilation_flow(c) project = create_dbt_project_context(c) + _ = generate_catalog(project) + yaml_context = YamlRefactorContext( project, settings=YamlRefactorSettings(use_unrendered_descriptions=True) ) diff --git a/src/dbt_osmosis/workbench/components/profiler.py b/src/dbt_osmosis/workbench/components/profiler.py index 58e7399e..8d6bf0d2 100644 --- a/src/dbt_osmosis/workbench/components/profiler.py +++ b/src/dbt_osmosis/workbench/components/profiler.py @@ -1,5 +1,5 @@ from streamlit import session_state as state -from streamlit_elements_fluence import extras, html, mui +from streamlit_elements_fluence import html, mui from .dashboard import Dashboard diff --git a/uv.lock b/uv.lock index 0bcf674b..75103a20 100644 --- a/uv.lock +++ b/uv.lock @@ -335,7 +335,7 @@ sdist = { url = "https://files.pythonhosted.org/packages/0e/fc/82796c10545f3df98 [[package]] name = "dbt-adapters" -version = "1.13.0" +version = "1.7.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "agate" }, @@ -345,14 +345,14 @@ dependencies = [ { name = "pytz" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ad/96/4c937891a2832d0520e5e311d38604df8547f4b8266d7d3bedbc2245c068/dbt_adapters-1.13.0.tar.gz", hash = "sha256:2e4e743d3613e7d72319edf6eb85ea2ca57133472e32ff0148c1325f21ff6d16", size = 106862 } +sdist = { url = "https://files.pythonhosted.org/packages/b8/62/5bae7041fd92d4423185f5c36208509a996c1f0822ee295aba6c429197c7/dbt_adapters-1.7.0.tar.gz", hash = "sha256:ad3392794ed0504e2082e19b3e447701982af1ab28b91f829bb3feb986bd1b29", size = 103914 } wheels = [ - { url = "https://files.pythonhosted.org/packages/7a/d7/3b627f5b2d1a390762c2b3fbfb690fd4395b94dc70957ee7cdfe50198596/dbt_adapters-1.13.0-py3-none-any.whl", hash = "sha256:4888f9b7d0ddb709e8219e722ee01678e31aade2b8b19595cf094a177c71dc56", size = 164945 }, + { url = "https://files.pythonhosted.org/packages/e1/54/ee1a9f086fe3eb35d380c043efdb1ce929b84cfeed9458766ea1f90c85a5/dbt_adapters-1.7.0-py3-none-any.whl", hash = "sha256:f192294112d5722c6a0981a104f7a9f57548aeeefe31b0b9d5708493f74a09f5", size = 162022 }, ] [[package]] name = "dbt-common" -version = "1.14.0" +version = "1.10.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "agate" }, @@ -368,14 +368,14 @@ dependencies = [ { name = "requests" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/ec/59/b0758aa2219ae4df642e179c879e42e5c90071275eae545cfe3ebd506da2/dbt_common-1.14.0.tar.gz", hash = "sha256:2227e24a165780c5368320dedd3c6bc40038dedece48af03daab43c11bf20372", size = 80035 } +sdist = { url = "https://files.pythonhosted.org/packages/f3/50/3ee5937e3b72fda7e1aa11deed4bb6b19993137a92517f9a79cf60000d1a/dbt_common-1.10.0.tar.gz", hash = "sha256:cfd9f46e9de4f9c2c95ec70210d1be53ac41e279038d18a7928cbbfd3f9b67af", size = 77514 } wheels = [ - { url = "https://files.pythonhosted.org/packages/fc/03/25d3618dc3af65996e3fde27859ad8a7fdf94889a327430a99f33d811db0/dbt_common-1.14.0-py3-none-any.whl", hash = "sha256:239b568a0dd764a431b93cdfe247628622c975f2eed8abf3bc04f4dc770ad161", size = 82948 }, + { url = "https://files.pythonhosted.org/packages/8e/16/71f32590949af81e89f0c4376bf615e18ffae22be081a551a96f7e1ce615/dbt_common-1.10.0-py3-none-any.whl", hash = "sha256:06e6fe9c6f7e13a6f1b513f98f7b1d093f1fb9c02cdbebb3b8904ec6746cc8cf", size = 82187 }, ] [[package]] name = "dbt-core" -version = "1.9.1" +version = "1.8.8" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "agate" }, @@ -386,7 +386,9 @@ dependencies = [ { name = "dbt-extractor" }, { name = "dbt-semantic-interfaces" }, { name = "jinja2" }, + { name = "logbook" }, { name = "mashumaro", extra = ["msgpack"] }, + { name = "minimal-snowplow-tracker" }, { name = "networkx", version = "3.2.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, { name = "networkx", version = "3.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, { name = "packaging" }, @@ -395,13 +397,12 @@ dependencies = [ { name = "pytz" }, { name = "pyyaml" }, { name = "requests" }, - { name = "snowplow-tracker" }, { name = "sqlparse" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/93/34/f7613d15ddad74a72992175df0147c82931e63a0b116e4e2bec5e45a6a11/dbt_core-1.9.1.tar.gz", hash = "sha256:38c931dd5206fdb11a9db1decf1075ce891ad9f4692bd00a9ba760a6cfe4358d", size = 865279 } +sdist = { url = "https://files.pythonhosted.org/packages/22/2c/9f868af9b6271fd87e858a2cc70c50973d8b1f47f141febabe87a09f133c/dbt_core-1.8.8.tar.gz", hash = "sha256:f79c3c076ce218da371c618bfceb0cf78fc894dd962bcd805bbf04b7e29283c6", size = 824564 } wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/38/0125f4a9c808ba061fa9c621fda88399bd5cacc4ff35399a8710e69bb335/dbt_core-1.9.1-py3-none-any.whl", hash = "sha256:a1db009c30f08a95fcf2620f70b2258aeca43a480229d7eb710e2243f5f622ea", size = 944639 }, + { url = "https://files.pythonhosted.org/packages/ac/97/f5fe35037be344888abe713b71dfd57f76abeb240c299e530e06001e27de/dbt_core-1.8.8-py3-none-any.whl", hash = "sha256:763b7e0e5b43f5ff56c846a79b626b4a8f58d74d703fe28eee6c3f1917778f4e", size = 900468 }, ] [[package]] @@ -444,7 +445,7 @@ wheels = [ [[package]] name = "dbt-osmosis" -version = "0.14.0" +version = "1.0.0" source = { editable = "." } dependencies = [ { name = "click" }, @@ -483,24 +484,24 @@ workbench = [ [package.metadata] requires-dist = [ - { name = "click", specifier = ">7" }, - { name = "dbt-core", specifier = ">=1.8,<1.10" }, - { name = "dbt-duckdb", marker = "extra == 'duckdb'", specifier = ">=1.0.0" }, - { name = "dbt-postgres", marker = "extra == 'postgres'", specifier = ">=1.0.0" }, - { name = "dbt-sqlite", marker = "extra == 'sqlite'", specifier = ">=1.0.0" }, - { name = "feedparser", marker = "extra == 'workbench'", specifier = "~=6.0.10" }, + { name = "click", specifier = ">7,<9" }, + { name = "dbt-core", specifier = ">=1.8,<=1.10" }, + { name = "dbt-duckdb", marker = "extra == 'duckdb'", specifier = ">=1.0.0,<2" }, + { name = "dbt-postgres", marker = "extra == 'postgres'", specifier = ">=1.0.0,<2" }, + { name = "dbt-sqlite", marker = "extra == 'sqlite'", specifier = ">=1.0.0,<2" }, + { name = "feedparser", marker = "extra == 'workbench'", specifier = "~=6.0.11" }, { name = "openai", marker = "extra == 'openai'", specifier = ">0.28.0" }, - { name = "pluggy", specifier = ">=1.5.0" }, + { name = "pluggy", specifier = ">=1.5.0,<2" }, { name = "pre-commit", marker = "extra == 'dev'", specifier = ">3.0.0" }, { name = "pytest", marker = "extra == 'dev'", specifier = "~=8.3.4" }, { name = "rich", specifier = ">=10" }, - { name = "ruamel-yaml", specifier = ">=0.17" }, + { name = "ruamel-yaml", specifier = "~=0.18.7" }, { name = "ruff", marker = "extra == 'dev'", specifier = "~=0.8.4" }, - { name = "setuptools", marker = "extra == 'workbench'", specifier = "~=75.6.0" }, - { name = "streamlit", marker = "extra == 'workbench'", specifier = ">=1.20.0" }, - { name = "streamlit-ace", marker = "extra == 'workbench'", specifier = ">=0.1.0" }, + { name = "setuptools", marker = "extra == 'workbench'", specifier = ">=70" }, + { name = "streamlit", marker = "extra == 'workbench'", specifier = ">=1.20.0,<1.36.0" }, + { name = "streamlit-ace", marker = "extra == 'workbench'", specifier = "~=0.1.1" }, { name = "streamlit-elements-fluence", marker = "extra == 'workbench'", specifier = ">=0.1.4" }, - { name = "ydata-profiling", marker = "extra == 'workbench'", specifier = ">=3.6.0" }, + { name = "ydata-profiling", marker = "extra == 'workbench'", specifier = "~=4.12.1" }, ] [[package]] @@ -521,7 +522,7 @@ wheels = [ [[package]] name = "dbt-semantic-interfaces" -version = "0.7.4" +version = "0.5.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "click" }, @@ -534,9 +535,9 @@ dependencies = [ { name = "pyyaml" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/53/67/86c4c068d1c6371928064bb9fe44eb8fbc4e15bb101e924d2e596c6fa511/dbt_semantic_interfaces-0.7.4.tar.gz", hash = "sha256:dcedda6702ecabb633aa4e8ab3b1eb7f9c4301dcc0026076a4a0ef64f9e59cf0", size = 90809 } +sdist = { url = "https://files.pythonhosted.org/packages/ba/1b/c7516c333db7a287fded9083209063731d9095e4958c9cda7c73b17178c7/dbt_semantic_interfaces-0.5.1.tar.gz", hash = "sha256:3a497abef1ba8112affdf804b26bfdcd5468ed95cc924b509068e18d371c7c4d", size = 76089 } wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/4d/9e0eec04821154c51c05dab77bc3eed95337c42f471d6ffe7b4a699e8cb1/dbt_semantic_interfaces-0.7.4-py3-none-any.whl", hash = "sha256:63965478ef27056f20a8c9a0f59b1355ebbc15133c1a6f0d368d93996a31dd5d", size = 141981 }, + { url = "https://files.pythonhosted.org/packages/31/31/ec1943c95ea18eabfcf7d6c882e7265e245f749dc465101343028eac33b8/dbt_semantic_interfaces-0.5.1-py3-none-any.whl", hash = "sha256:b95ff3a6721dc30f6278cb84933d95e0ef27766e67eeb6bb41906242e77f7c9b", size = 119672 }, ] [[package]] @@ -1130,6 +1131,12 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/df/41/73cc26a2634b538cfe813f618c91e7e9960b8c163f8f0c94a2b0f008b9da/llvmlite-0.43.0-cp39-cp39-win_amd64.whl", hash = "sha256:47e147cdda9037f94b399bf03bfd8a6b6b1f2f90be94a454e3386f006455a9b4", size = 28123489 }, ] +[[package]] +name = "logbook" +version = "1.5.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2f/d9/16ac346f7c0102835814cc9e5b684aaadea101560bb932a2403bd26b2320/Logbook-1.5.3.tar.gz", hash = "sha256:66f454ada0f56eae43066f604a222b09893f98c1adc18df169710761b8f32fe8", size = 85783 } + [[package]] name = "markdown-it-py" version = "3.0.0" @@ -1266,6 +1273,16 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979 }, ] +[[package]] +name = "minimal-snowplow-tracker" +version = "0.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "requests" }, + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e4/9f/004f810169a48ed5c520279d98327e7793b6491f09d42cb2c5636c994f34/minimal-snowplow-tracker-0.0.2.tar.gz", hash = "sha256:acabf7572db0e7f5cbf6983d495eef54081f71be392330eb3aadb9ccb39daaa4", size = 12542 } + [[package]] name = "more-itertools" version = "10.5.0" @@ -1414,54 +1431,45 @@ wheels = [ [[package]] name = "numpy" -version = "2.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a9/75/10dd1f8116a8b796cb2c737b674e02d02e80454bda953fa7e65d8c12b016/numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78", size = 18902015 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/21/91/3495b3237510f79f5d81f2508f9f13fea78ebfdf07538fc7444badda173d/numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece", size = 21165245 }, - { url = "https://files.pythonhosted.org/packages/05/33/26178c7d437a87082d11019292dce6d3fe6f0e9026b7b2309cbf3e489b1d/numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04", size = 13738540 }, - { url = "https://files.pythonhosted.org/packages/ec/31/cc46e13bf07644efc7a4bf68df2df5fb2a1a88d0cd0da9ddc84dc0033e51/numpy-2.0.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66", size = 5300623 }, - { url = "https://files.pythonhosted.org/packages/6e/16/7bfcebf27bb4f9d7ec67332ffebee4d1bf085c84246552d52dbb548600e7/numpy-2.0.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b", size = 6901774 }, - { url = "https://files.pythonhosted.org/packages/f9/a3/561c531c0e8bf082c5bef509d00d56f82e0ea7e1e3e3a7fc8fa78742a6e5/numpy-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd", size = 13907081 }, - { url = "https://files.pythonhosted.org/packages/fa/66/f7177ab331876200ac7563a580140643d1179c8b4b6a6b0fc9838de2a9b8/numpy-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318", size = 19523451 }, - { url = "https://files.pythonhosted.org/packages/25/7f/0b209498009ad6453e4efc2c65bcdf0ae08a182b2b7877d7ab38a92dc542/numpy-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8", size = 19927572 }, - { url = "https://files.pythonhosted.org/packages/3e/df/2619393b1e1b565cd2d4c4403bdd979621e2c4dea1f8532754b2598ed63b/numpy-2.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326", size = 14400722 }, - { url = "https://files.pythonhosted.org/packages/22/ad/77e921b9f256d5da36424ffb711ae79ca3f451ff8489eeca544d0701d74a/numpy-2.0.2-cp310-cp310-win32.whl", hash = "sha256:984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97", size = 6472170 }, - { url = "https://files.pythonhosted.org/packages/10/05/3442317535028bc29cf0c0dd4c191a4481e8376e9f0db6bcf29703cadae6/numpy-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131", size = 15905558 }, - { url = "https://files.pythonhosted.org/packages/8b/cf/034500fb83041aa0286e0fb16e7c76e5c8b67c0711bb6e9e9737a717d5fe/numpy-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448", size = 21169137 }, - { url = "https://files.pythonhosted.org/packages/4a/d9/32de45561811a4b87fbdee23b5797394e3d1504b4a7cf40c10199848893e/numpy-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195", size = 13703552 }, - { url = "https://files.pythonhosted.org/packages/c1/ca/2f384720020c7b244d22508cb7ab23d95f179fcfff33c31a6eeba8d6c512/numpy-2.0.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57", size = 5298957 }, - { url = "https://files.pythonhosted.org/packages/0e/78/a3e4f9fb6aa4e6fdca0c5428e8ba039408514388cf62d89651aade838269/numpy-2.0.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a", size = 6905573 }, - { url = "https://files.pythonhosted.org/packages/a0/72/cfc3a1beb2caf4efc9d0b38a15fe34025230da27e1c08cc2eb9bfb1c7231/numpy-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669", size = 13914330 }, - { url = "https://files.pythonhosted.org/packages/ba/a8/c17acf65a931ce551fee11b72e8de63bf7e8a6f0e21add4c937c83563538/numpy-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951", size = 19534895 }, - { url = "https://files.pythonhosted.org/packages/ba/86/8767f3d54f6ae0165749f84648da9dcc8cd78ab65d415494962c86fac80f/numpy-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9", size = 19937253 }, - { url = "https://files.pythonhosted.org/packages/df/87/f76450e6e1c14e5bb1eae6836478b1028e096fd02e85c1c37674606ab752/numpy-2.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15", size = 14414074 }, - { url = "https://files.pythonhosted.org/packages/5c/ca/0f0f328e1e59f73754f06e1adfb909de43726d4f24c6a3f8805f34f2b0fa/numpy-2.0.2-cp311-cp311-win32.whl", hash = "sha256:a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4", size = 6470640 }, - { url = "https://files.pythonhosted.org/packages/eb/57/3a3f14d3a759dcf9bf6e9eda905794726b758819df4663f217d658a58695/numpy-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc", size = 15910230 }, - { url = "https://files.pythonhosted.org/packages/45/40/2e117be60ec50d98fa08c2f8c48e09b3edea93cfcabd5a9ff6925d54b1c2/numpy-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:df55d490dea7934f330006d0f81e8551ba6010a5bf035a249ef61a94f21c500b", size = 20895803 }, - { url = "https://files.pythonhosted.org/packages/46/92/1b8b8dee833f53cef3e0a3f69b2374467789e0bb7399689582314df02651/numpy-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8df823f570d9adf0978347d1f926b2a867d5608f434a7cff7f7908c6570dcf5e", size = 13471835 }, - { url = "https://files.pythonhosted.org/packages/7f/19/e2793bde475f1edaea6945be141aef6c8b4c669b90c90a300a8954d08f0a/numpy-2.0.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9a92ae5c14811e390f3767053ff54eaee3bf84576d99a2456391401323f4ec2c", size = 5038499 }, - { url = "https://files.pythonhosted.org/packages/e3/ff/ddf6dac2ff0dd50a7327bcdba45cb0264d0e96bb44d33324853f781a8f3c/numpy-2.0.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a842d573724391493a97a62ebbb8e731f8a5dcc5d285dfc99141ca15a3302d0c", size = 6633497 }, - { url = "https://files.pythonhosted.org/packages/72/21/67f36eac8e2d2cd652a2e69595a54128297cdcb1ff3931cfc87838874bd4/numpy-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05e238064fc0610c840d1cf6a13bf63d7e391717d247f1bf0318172e759e692", size = 13621158 }, - { url = "https://files.pythonhosted.org/packages/39/68/e9f1126d757653496dbc096cb429014347a36b228f5a991dae2c6b6cfd40/numpy-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a", size = 19236173 }, - { url = "https://files.pythonhosted.org/packages/d1/e9/1f5333281e4ebf483ba1c888b1d61ba7e78d7e910fdd8e6499667041cc35/numpy-2.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:96a55f64139912d61de9137f11bf39a55ec8faec288c75a54f93dfd39f7eb40c", size = 19634174 }, - { url = "https://files.pythonhosted.org/packages/71/af/a469674070c8d8408384e3012e064299f7a2de540738a8e414dcfd639996/numpy-2.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec9852fb39354b5a45a80bdab5ac02dd02b15f44b3804e9f00c556bf24b4bded", size = 14099701 }, - { url = "https://files.pythonhosted.org/packages/d0/3d/08ea9f239d0e0e939b6ca52ad403c84a2bce1bde301a8eb4888c1c1543f1/numpy-2.0.2-cp312-cp312-win32.whl", hash = "sha256:671bec6496f83202ed2d3c8fdc486a8fc86942f2e69ff0e986140339a63bcbe5", size = 6174313 }, - { url = "https://files.pythonhosted.org/packages/b2/b5/4ac39baebf1fdb2e72585c8352c56d063b6126be9fc95bd2bb5ef5770c20/numpy-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:cfd41e13fdc257aa5778496b8caa5e856dc4896d4ccf01841daee1d96465467a", size = 15606179 }, - { url = "https://files.pythonhosted.org/packages/43/c1/41c8f6df3162b0c6ffd4437d729115704bd43363de0090c7f913cfbc2d89/numpy-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c", size = 21169942 }, - { url = "https://files.pythonhosted.org/packages/39/bc/fd298f308dcd232b56a4031fd6ddf11c43f9917fbc937e53762f7b5a3bb1/numpy-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd", size = 13711512 }, - { url = "https://files.pythonhosted.org/packages/96/ff/06d1aa3eeb1c614eda245c1ba4fb88c483bee6520d361641331872ac4b82/numpy-2.0.2-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b", size = 5306976 }, - { url = "https://files.pythonhosted.org/packages/2d/98/121996dcfb10a6087a05e54453e28e58694a7db62c5a5a29cee14c6e047b/numpy-2.0.2-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729", size = 6906494 }, - { url = "https://files.pythonhosted.org/packages/15/31/9dffc70da6b9bbf7968f6551967fc21156207366272c2a40b4ed6008dc9b/numpy-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1", size = 13912596 }, - { url = "https://files.pythonhosted.org/packages/b9/14/78635daab4b07c0930c919d451b8bf8c164774e6a3413aed04a6d95758ce/numpy-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd", size = 19526099 }, - { url = "https://files.pythonhosted.org/packages/26/4c/0eeca4614003077f68bfe7aac8b7496f04221865b3a5e7cb230c9d055afd/numpy-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d", size = 19932823 }, - { url = "https://files.pythonhosted.org/packages/f1/46/ea25b98b13dccaebddf1a803f8c748680d972e00507cd9bc6dcdb5aa2ac1/numpy-2.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d", size = 14404424 }, - { url = "https://files.pythonhosted.org/packages/c8/a6/177dd88d95ecf07e722d21008b1b40e681a929eb9e329684d449c36586b2/numpy-2.0.2-cp39-cp39-win32.whl", hash = "sha256:905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa", size = 6476809 }, - { url = "https://files.pythonhosted.org/packages/ea/2b/7fc9f4e7ae5b507c1a3a21f0f15ed03e794c1242ea8a242ac158beb56034/numpy-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73", size = 15911314 }, - { url = "https://files.pythonhosted.org/packages/8f/3b/df5a870ac6a3be3a86856ce195ef42eec7ae50d2a202be1f5a4b3b340e14/numpy-2.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8", size = 21025288 }, - { url = "https://files.pythonhosted.org/packages/2c/97/51af92f18d6f6f2d9ad8b482a99fb74e142d71372da5d834b3a2747a446e/numpy-2.0.2-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4", size = 6762793 }, - { url = "https://files.pythonhosted.org/packages/12/46/de1fbd0c1b5ccaa7f9a005b66761533e2f6a3e560096682683a223631fe9/numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c", size = 19334885 }, - { url = "https://files.pythonhosted.org/packages/cc/dc/d330a6faefd92b446ec0f0dfea4c3207bb1fef3c4771d19cf4543efd2c78/numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385", size = 15828784 }, +version = "1.26.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/6e/09db70a523a96d25e115e71cc56a6f9031e7b8cd166c1ac8438307c14058/numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010", size = 15786129 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/94/ace0fdea5241a27d13543ee117cbc65868e82213fb31a8eb7fe9ff23f313/numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0", size = 20631468 }, + { url = "https://files.pythonhosted.org/packages/20/f7/b24208eba89f9d1b58c1668bc6c8c4fd472b20c45573cb767f59d49fb0f6/numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a", size = 13966411 }, + { url = "https://files.pythonhosted.org/packages/fc/a5/4beee6488160798683eed5bdb7eead455892c3b4e1f78d79d8d3f3b084ac/numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4", size = 14219016 }, + { url = "https://files.pythonhosted.org/packages/4b/d7/ecf66c1cd12dc28b4040b15ab4d17b773b87fa9d29ca16125de01adb36cd/numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f", size = 18240889 }, + { url = "https://files.pythonhosted.org/packages/24/03/6f229fe3187546435c4f6f89f6d26c129d4f5bed40552899fcf1f0bf9e50/numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a", size = 13876746 }, + { url = "https://files.pythonhosted.org/packages/39/fe/39ada9b094f01f5a35486577c848fe274e374bbf8d8f472e1423a0bbd26d/numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2", size = 18078620 }, + { url = "https://files.pythonhosted.org/packages/d5/ef/6ad11d51197aad206a9ad2286dc1aac6a378059e06e8cf22cd08ed4f20dc/numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07", size = 5972659 }, + { url = "https://files.pythonhosted.org/packages/19/77/538f202862b9183f54108557bfda67e17603fc560c384559e769321c9d92/numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5", size = 15808905 }, + { url = "https://files.pythonhosted.org/packages/11/57/baae43d14fe163fa0e4c47f307b6b2511ab8d7d30177c491960504252053/numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71", size = 20630554 }, + { url = "https://files.pythonhosted.org/packages/1a/2e/151484f49fd03944c4a3ad9c418ed193cfd02724e138ac8a9505d056c582/numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef", size = 13997127 }, + { url = "https://files.pythonhosted.org/packages/79/ae/7e5b85136806f9dadf4878bf73cf223fe5c2636818ba3ab1c585d0403164/numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e", size = 14222994 }, + { url = "https://files.pythonhosted.org/packages/3a/d0/edc009c27b406c4f9cbc79274d6e46d634d139075492ad055e3d68445925/numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5", size = 18252005 }, + { url = "https://files.pythonhosted.org/packages/09/bf/2b1aaf8f525f2923ff6cfcf134ae5e750e279ac65ebf386c75a0cf6da06a/numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a", size = 13885297 }, + { url = "https://files.pythonhosted.org/packages/df/a0/4e0f14d847cfc2a633a1c8621d00724f3206cfeddeb66d35698c4e2cf3d2/numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a", size = 18093567 }, + { url = "https://files.pythonhosted.org/packages/d2/b7/a734c733286e10a7f1a8ad1ae8c90f2d33bf604a96548e0a4a3a6739b468/numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20", size = 5968812 }, + { url = "https://files.pythonhosted.org/packages/3f/6b/5610004206cf7f8e7ad91c5a85a8c71b2f2f8051a0c0c4d5916b76d6cbb2/numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2", size = 15811913 }, + { url = "https://files.pythonhosted.org/packages/95/12/8f2020a8e8b8383ac0177dc9570aad031a3beb12e38847f7129bacd96228/numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218", size = 20335901 }, + { url = "https://files.pythonhosted.org/packages/75/5b/ca6c8bd14007e5ca171c7c03102d17b4f4e0ceb53957e8c44343a9546dcc/numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b", size = 13685868 }, + { url = "https://files.pythonhosted.org/packages/79/f8/97f10e6755e2a7d027ca783f63044d5b1bc1ae7acb12afe6a9b4286eac17/numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b", size = 13925109 }, + { url = "https://files.pythonhosted.org/packages/0f/50/de23fde84e45f5c4fda2488c759b69990fd4512387a8632860f3ac9cd225/numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed", size = 17950613 }, + { url = "https://files.pythonhosted.org/packages/4c/0c/9c603826b6465e82591e05ca230dfc13376da512b25ccd0894709b054ed0/numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a", size = 13572172 }, + { url = "https://files.pythonhosted.org/packages/76/8c/2ba3902e1a0fc1c74962ea9bb33a534bb05984ad7ff9515bf8d07527cadd/numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0", size = 17786643 }, + { url = "https://files.pythonhosted.org/packages/28/4a/46d9e65106879492374999e76eb85f87b15328e06bd1550668f79f7b18c6/numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110", size = 5677803 }, + { url = "https://files.pythonhosted.org/packages/16/2e/86f24451c2d530c88daf997cb8d6ac622c1d40d19f5a031ed68a4b73a374/numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818", size = 15517754 }, + { url = "https://files.pythonhosted.org/packages/7d/24/ce71dc08f06534269f66e73c04f5709ee024a1afe92a7b6e1d73f158e1f8/numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c", size = 20636301 }, + { url = "https://files.pythonhosted.org/packages/ae/8c/ab03a7c25741f9ebc92684a20125fbc9fc1b8e1e700beb9197d750fdff88/numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be", size = 13971216 }, + { url = "https://files.pythonhosted.org/packages/6d/64/c3bcdf822269421d85fe0d64ba972003f9bb4aa9a419da64b86856c9961f/numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764", size = 14226281 }, + { url = "https://files.pythonhosted.org/packages/54/30/c2a907b9443cf42b90c17ad10c1e8fa801975f01cb9764f3f8eb8aea638b/numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3", size = 18249516 }, + { url = "https://files.pythonhosted.org/packages/43/12/01a563fc44c07095996d0129b8899daf89e4742146f7044cdbdb3101c57f/numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd", size = 13882132 }, + { url = "https://files.pythonhosted.org/packages/16/ee/9df80b06680aaa23fc6c31211387e0db349e0e36d6a63ba3bd78c5acdf11/numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c", size = 18084181 }, + { url = "https://files.pythonhosted.org/packages/28/7d/4b92e2fe20b214ffca36107f1a3e75ef4c488430e64de2d9af5db3a4637d/numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6", size = 5976360 }, + { url = "https://files.pythonhosted.org/packages/b5/42/054082bd8220bbf6f297f982f0a8f5479fcbc55c8b511d928df07b965869/numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea", size = 15814633 }, + { url = "https://files.pythonhosted.org/packages/3f/72/3df6c1c06fc83d9cfe381cccb4be2532bbd38bf93fbc9fad087b6687f1c0/numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30", size = 20455961 }, + { url = "https://files.pythonhosted.org/packages/8e/02/570545bac308b58ffb21adda0f4e220ba716fb658a63c151daecc3293350/numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c", size = 18061071 }, + { url = "https://files.pythonhosted.org/packages/f4/5f/fafd8c51235f60d49f7a88e2275e13971e90555b67da52dd6416caec32fe/numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0", size = 15709730 }, ] [[package]] @@ -1494,11 +1502,11 @@ wheels = [ [[package]] name = "packaging" -version = "24.2" +version = "23.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f", size = 163950 } +sdist = { url = "https://files.pythonhosted.org/packages/fb/2b/9b9c33ffed44ee921d0967086d653047286054117d584f1b1a7c22ceaf7b/packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5", size = 146714 } wheels = [ - { url = "https://files.pythonhosted.org/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759", size = 65451 }, + { url = "https://files.pythonhosted.org/packages/ec/1a/610693ac4ee14fcdf2d9bf3c493370e4f2ef7ae2e19217d7a237ff42367d/packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7", size = 53011 }, ] [[package]] @@ -1606,65 +1614,68 @@ wheels = [ [[package]] name = "pillow" -version = "11.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a5/26/0d95c04c868f6bdb0c447e3ee2de5564411845e36a858cfd63766bc7b563/pillow-11.0.0.tar.gz", hash = "sha256:72bacbaf24ac003fea9bff9837d1eedb6088758d41e100c1552930151f677739", size = 46737780 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/98/fb/a6ce6836bd7fd93fbf9144bf54789e02babc27403b50a9e1583ee877d6da/pillow-11.0.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:6619654954dc4936fcff82db8eb6401d3159ec6be81e33c6000dfd76ae189947", size = 3154708 }, - { url = "https://files.pythonhosted.org/packages/6a/1d/1f51e6e912d8ff316bb3935a8cda617c801783e0b998bf7a894e91d3bd4c/pillow-11.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b3c5ac4bed7519088103d9450a1107f76308ecf91d6dabc8a33a2fcfb18d0fba", size = 2979223 }, - { url = "https://files.pythonhosted.org/packages/90/83/e2077b0192ca8a9ef794dbb74700c7e48384706467067976c2a95a0f40a1/pillow-11.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a65149d8ada1055029fcb665452b2814fe7d7082fcb0c5bed6db851cb69b2086", size = 4183167 }, - { url = "https://files.pythonhosted.org/packages/0e/74/467af0146970a98349cdf39e9b79a6cc8a2e7558f2c01c28a7b6b85c5bda/pillow-11.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88a58d8ac0cc0e7f3a014509f0455248a76629ca9b604eca7dc5927cc593c5e9", size = 4283912 }, - { url = "https://files.pythonhosted.org/packages/85/b1/d95d4f7ca3a6c1ae120959605875a31a3c209c4e50f0029dc1a87566cf46/pillow-11.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:c26845094b1af3c91852745ae78e3ea47abf3dbcd1cf962f16b9a5fbe3ee8488", size = 4195815 }, - { url = "https://files.pythonhosted.org/packages/41/c3/94f33af0762ed76b5a237c5797e088aa57f2b7fa8ee7932d399087be66a8/pillow-11.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:1a61b54f87ab5786b8479f81c4b11f4d61702830354520837f8cc791ebba0f5f", size = 4366117 }, - { url = "https://files.pythonhosted.org/packages/ba/3c/443e7ef01f597497268899e1cca95c0de947c9bbf77a8f18b3c126681e5d/pillow-11.0.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:674629ff60030d144b7bca2b8330225a9b11c482ed408813924619c6f302fdbb", size = 4278607 }, - { url = "https://files.pythonhosted.org/packages/26/95/1495304448b0081e60c0c5d63f928ef48bb290acee7385804426fa395a21/pillow-11.0.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:598b4e238f13276e0008299bd2482003f48158e2b11826862b1eb2ad7c768b97", size = 4410685 }, - { url = "https://files.pythonhosted.org/packages/45/da/861e1df971ef0de9870720cb309ca4d553b26a9483ec9be3a7bf1de4a095/pillow-11.0.0-cp310-cp310-win32.whl", hash = "sha256:9a0f748eaa434a41fccf8e1ee7a3eed68af1b690e75328fd7a60af123c193b50", size = 2249185 }, - { url = "https://files.pythonhosted.org/packages/d5/4e/78f7c5202ea2a772a5ab05069c1b82503e6353cd79c7e474d4945f4b82c3/pillow-11.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:a5629742881bcbc1f42e840af185fd4d83a5edeb96475a575f4da50d6ede337c", size = 2566726 }, - { url = "https://files.pythonhosted.org/packages/77/e4/6e84eada35cbcc646fc1870f72ccfd4afacb0fae0c37ffbffe7f5dc24bf1/pillow-11.0.0-cp310-cp310-win_arm64.whl", hash = "sha256:ee217c198f2e41f184f3869f3e485557296d505b5195c513b2bfe0062dc537f1", size = 2254585 }, - { url = "https://files.pythonhosted.org/packages/f0/eb/f7e21b113dd48a9c97d364e0915b3988c6a0b6207652f5a92372871b7aa4/pillow-11.0.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1c1d72714f429a521d8d2d018badc42414c3077eb187a59579f28e4270b4b0fc", size = 3154705 }, - { url = "https://files.pythonhosted.org/packages/25/b3/2b54a1d541accebe6bd8b1358b34ceb2c509f51cb7dcda8687362490da5b/pillow-11.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:499c3a1b0d6fc8213519e193796eb1a86a1be4b1877d678b30f83fd979811d1a", size = 2979222 }, - { url = "https://files.pythonhosted.org/packages/20/12/1a41eddad8265c5c19dda8fb6c269ce15ee25e0b9f8f26286e6202df6693/pillow-11.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8b2351c85d855293a299038e1f89db92a2f35e8d2f783489c6f0b2b5f3fe8a3", size = 4190220 }, - { url = "https://files.pythonhosted.org/packages/a9/9b/8a8c4d07d77447b7457164b861d18f5a31ae6418ef5c07f6f878fa09039a/pillow-11.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f4dba50cfa56f910241eb7f883c20f1e7b1d8f7d91c750cd0b318bad443f4d5", size = 4291399 }, - { url = "https://files.pythonhosted.org/packages/fc/e4/130c5fab4a54d3991129800dd2801feeb4b118d7630148cd67f0e6269d4c/pillow-11.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:5ddbfd761ee00c12ee1be86c9c0683ecf5bb14c9772ddbd782085779a63dd55b", size = 4202709 }, - { url = "https://files.pythonhosted.org/packages/39/63/b3fc299528d7df1f678b0666002b37affe6b8751225c3d9c12cf530e73ed/pillow-11.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:45c566eb10b8967d71bf1ab8e4a525e5a93519e29ea071459ce517f6b903d7fa", size = 4372556 }, - { url = "https://files.pythonhosted.org/packages/c6/a6/694122c55b855b586c26c694937d36bb8d3b09c735ff41b2f315c6e66a10/pillow-11.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b4fd7bd29610a83a8c9b564d457cf5bd92b4e11e79a4ee4716a63c959699b306", size = 4287187 }, - { url = "https://files.pythonhosted.org/packages/ba/a9/f9d763e2671a8acd53d29b1e284ca298bc10a595527f6be30233cdb9659d/pillow-11.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cb929ca942d0ec4fac404cbf520ee6cac37bf35be479b970c4ffadf2b6a1cad9", size = 4418468 }, - { url = "https://files.pythonhosted.org/packages/6e/0e/b5cbad2621377f11313a94aeb44ca55a9639adabcaaa073597a1925f8c26/pillow-11.0.0-cp311-cp311-win32.whl", hash = "sha256:006bcdd307cc47ba43e924099a038cbf9591062e6c50e570819743f5607404f5", size = 2249249 }, - { url = "https://files.pythonhosted.org/packages/dc/83/1470c220a4ff06cd75fc609068f6605e567ea51df70557555c2ab6516b2c/pillow-11.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:52a2d8323a465f84faaba5236567d212c3668f2ab53e1c74c15583cf507a0291", size = 2566769 }, - { url = "https://files.pythonhosted.org/packages/52/98/def78c3a23acee2bcdb2e52005fb2810ed54305602ec1bfcfab2bda6f49f/pillow-11.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:16095692a253047fe3ec028e951fa4221a1f3ed3d80c397e83541a3037ff67c9", size = 2254611 }, - { url = "https://files.pythonhosted.org/packages/1c/a3/26e606ff0b2daaf120543e537311fa3ae2eb6bf061490e4fea51771540be/pillow-11.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d2c0a187a92a1cb5ef2c8ed5412dd8d4334272617f532d4ad4de31e0495bd923", size = 3147642 }, - { url = "https://files.pythonhosted.org/packages/4f/d5/1caabedd8863526a6cfa44ee7a833bd97f945dc1d56824d6d76e11731939/pillow-11.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:084a07ef0821cfe4858fe86652fffac8e187b6ae677e9906e192aafcc1b69903", size = 2978999 }, - { url = "https://files.pythonhosted.org/packages/d9/ff/5a45000826a1aa1ac6874b3ec5a856474821a1b59d838c4f6ce2ee518fe9/pillow-11.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8069c5179902dcdce0be9bfc8235347fdbac249d23bd90514b7a47a72d9fecf4", size = 4196794 }, - { url = "https://files.pythonhosted.org/packages/9d/21/84c9f287d17180f26263b5f5c8fb201de0f88b1afddf8a2597a5c9fe787f/pillow-11.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f02541ef64077f22bf4924f225c0fd1248c168f86e4b7abdedd87d6ebaceab0f", size = 4300762 }, - { url = "https://files.pythonhosted.org/packages/84/39/63fb87cd07cc541438b448b1fed467c4d687ad18aa786a7f8e67b255d1aa/pillow-11.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:fcb4621042ac4b7865c179bb972ed0da0218a076dc1820ffc48b1d74c1e37fe9", size = 4210468 }, - { url = "https://files.pythonhosted.org/packages/7f/42/6e0f2c2d5c60f499aa29be14f860dd4539de322cd8fb84ee01553493fb4d/pillow-11.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:00177a63030d612148e659b55ba99527803288cea7c75fb05766ab7981a8c1b7", size = 4381824 }, - { url = "https://files.pythonhosted.org/packages/31/69/1ef0fb9d2f8d2d114db982b78ca4eeb9db9a29f7477821e160b8c1253f67/pillow-11.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8853a3bf12afddfdf15f57c4b02d7ded92c7a75a5d7331d19f4f9572a89c17e6", size = 4296436 }, - { url = "https://files.pythonhosted.org/packages/44/ea/dad2818c675c44f6012289a7c4f46068c548768bc6c7f4e8c4ae5bbbc811/pillow-11.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3107c66e43bda25359d5ef446f59c497de2b5ed4c7fdba0894f8d6cf3822dafc", size = 4429714 }, - { url = "https://files.pythonhosted.org/packages/af/3a/da80224a6eb15bba7a0dcb2346e2b686bb9bf98378c0b4353cd88e62b171/pillow-11.0.0-cp312-cp312-win32.whl", hash = "sha256:86510e3f5eca0ab87429dd77fafc04693195eec7fd6a137c389c3eeb4cfb77c6", size = 2249631 }, - { url = "https://files.pythonhosted.org/packages/57/97/73f756c338c1d86bb802ee88c3cab015ad7ce4b838f8a24f16b676b1ac7c/pillow-11.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:8ec4a89295cd6cd4d1058a5e6aec6bf51e0eaaf9714774e1bfac7cfc9051db47", size = 2567533 }, - { url = "https://files.pythonhosted.org/packages/0b/30/2b61876e2722374558b871dfbfcbe4e406626d63f4f6ed92e9c8e24cac37/pillow-11.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:27a7860107500d813fcd203b4ea19b04babe79448268403172782754870dac25", size = 2254890 }, - { url = "https://files.pythonhosted.org/packages/f3/8b/01849a820686bf309b7d79a935d57bcafbfd016f1d78fc3d37ed2ba00f96/pillow-11.0.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:2e46773dc9f35a1dd28bd6981332fd7f27bec001a918a72a79b4133cf5291dba", size = 3154738 }, - { url = "https://files.pythonhosted.org/packages/35/e8/ff71a40ca8e24cfd6bb333cc4ca8cc24ebecb6942bb4ad1e5ec61f33d1b8/pillow-11.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2679d2258b7f1192b378e2893a8a0a0ca472234d4c2c0e6bdd3380e8dfa21b6a", size = 2979272 }, - { url = "https://files.pythonhosted.org/packages/09/4f/2280ad43f5639174a0227920a59664fb78c5096a0b3fd865fee5184d4526/pillow-11.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eda2616eb2313cbb3eebbe51f19362eb434b18e3bb599466a1ffa76a033fb916", size = 4179756 }, - { url = "https://files.pythonhosted.org/packages/14/b1/c8f428bae932a27ce9c87e7b21aba8ea3e820aa11413c5a795868c37e039/pillow-11.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20ec184af98a121fb2da42642dea8a29ec80fc3efbaefb86d8fdd2606619045d", size = 4280488 }, - { url = "https://files.pythonhosted.org/packages/78/66/7c5e44ab2c0123710a5d4692a4ee5931ac438efd7730ac395e305902346e/pillow-11.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:8594f42df584e5b4bb9281799698403f7af489fba84c34d53d1c4bfb71b7c4e7", size = 4192772 }, - { url = "https://files.pythonhosted.org/packages/36/5d/a9a00f8251ce93144f0250c0f0aece31b83ff33ffc243cdf987a8d584818/pillow-11.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:c12b5ae868897c7338519c03049a806af85b9b8c237b7d675b8c5e089e4a618e", size = 4363533 }, - { url = "https://files.pythonhosted.org/packages/fd/21/d8182fc1f3233078eb744f9f2950992f537655174febb8b3f7bdc61847b1/pillow-11.0.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:70fbbdacd1d271b77b7721fe3cdd2d537bbbd75d29e6300c672ec6bb38d9672f", size = 4275415 }, - { url = "https://files.pythonhosted.org/packages/c9/ee/93e02e8c29210ba7383843405b8b39bd19a164770f14d8569096dd123781/pillow-11.0.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:5178952973e588b3f1360868847334e9e3bf49d19e169bbbdfaf8398002419ae", size = 4407081 }, - { url = "https://files.pythonhosted.org/packages/6e/77/8cda03af2b5177a18d645ad4a7446cda6c1292d1a2fb6e772a06fa9fc86b/pillow-11.0.0-cp39-cp39-win32.whl", hash = "sha256:8c676b587da5673d3c75bd67dd2a8cdfeb282ca38a30f37950511766b26858c4", size = 2249213 }, - { url = "https://files.pythonhosted.org/packages/9f/e4/c90bf7889489f3a14803bd00d3645945dd476020ab67579985af8233ab30/pillow-11.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:94f3e1780abb45062287b4614a5bc0874519c86a777d4a7ad34978e86428b8dd", size = 2566862 }, - { url = "https://files.pythonhosted.org/packages/27/a6/77d2ed085055237581d6276ac1e85f562f1b1848614647d8427e49d83c03/pillow-11.0.0-cp39-cp39-win_arm64.whl", hash = "sha256:290f2cc809f9da7d6d622550bbf4c1e57518212da51b6a30fe8e0a270a5b78bd", size = 2254605 }, - { url = "https://files.pythonhosted.org/packages/36/57/42a4dd825eab762ba9e690d696d894ba366e06791936056e26e099398cda/pillow-11.0.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:1187739620f2b365de756ce086fdb3604573337cc28a0d3ac4a01ab6b2d2a6d2", size = 3119239 }, - { url = "https://files.pythonhosted.org/packages/98/f7/25f9f9e368226a1d6cf3507081a1a7944eddd3ca7821023377043f5a83c8/pillow-11.0.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:fbbcb7b57dc9c794843e3d1258c0fbf0f48656d46ffe9e09b63bbd6e8cd5d0a2", size = 2950803 }, - { url = "https://files.pythonhosted.org/packages/59/01/98ead48a6c2e31e6185d4c16c978a67fe3ccb5da5c2ff2ba8475379bb693/pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d203af30149ae339ad1b4f710d9844ed8796e97fda23ffbc4cc472968a47d0b", size = 3281098 }, - { url = "https://files.pythonhosted.org/packages/51/c0/570255b2866a0e4d500a14f950803a2ec273bac7badc43320120b9262450/pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a0d3b115009ebb8ac3d2ebec5c2982cc693da935f4ab7bb5c8ebe2f47d36f2", size = 3323665 }, - { url = "https://files.pythonhosted.org/packages/0e/75/689b4ec0483c42bfc7d1aacd32ade7a226db4f4fac57c6fdcdf90c0731e3/pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:73853108f56df97baf2bb8b522f3578221e56f646ba345a372c78326710d3830", size = 3310533 }, - { url = "https://files.pythonhosted.org/packages/3d/30/38bd6149cf53da1db4bad304c543ade775d225961c4310f30425995cb9ec/pillow-11.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e58876c91f97b0952eb766123bfef372792ab3f4e3e1f1a2267834c2ab131734", size = 3414886 }, - { url = "https://files.pythonhosted.org/packages/ec/3d/c32a51d848401bd94cabb8767a39621496491ee7cd5199856b77da9b18ad/pillow-11.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:224aaa38177597bb179f3ec87eeefcce8e4f85e608025e9cfac60de237ba6316", size = 2567508 }, - { url = "https://files.pythonhosted.org/packages/67/21/fbb4222399f72d6e9c828818ff4ef8391c1e8e71623368295c8dbc789bd1/pillow-11.0.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:5bd2d3bdb846d757055910f0a59792d33b555800813c3b39ada1829c372ccb06", size = 2950706 }, - { url = "https://files.pythonhosted.org/packages/a2/b6/6aeb6e018b705ea4076db50aac078c9db8715a901f4c65698edc31375d0f/pillow-11.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:375b8dd15a1f5d2feafff536d47e22f69625c1aa92f12b339ec0b2ca40263273", size = 3323524 }, - { url = "https://files.pythonhosted.org/packages/48/26/36cc90e9932c5fe7c8876c32d6091ef5a09e8137e8e0633045bd35085fdd/pillow-11.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:daffdf51ee5db69a82dd127eabecce20729e21f7a3680cf7cbb23f0829189790", size = 3414787 }, - { url = "https://files.pythonhosted.org/packages/44/5c/089154029fcca7729ae142ac820057f74ca4b0b59617734276c31281af15/pillow-11.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7326a1787e3c7b0429659e0a944725e1b03eeaa10edd945a86dead1913383944", size = 2567664 }, +version = "10.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/74/ad3d526f3bf7b6d3f408b73fde271ec69dfac8b81341a318ce825f2b3812/pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06", size = 46555059 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/69/a31cccd538ca0b5272be2a38347f8839b97a14be104ea08b0db92f749c74/pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e", size = 3509271 }, + { url = "https://files.pythonhosted.org/packages/9a/9e/4143b907be8ea0bce215f2ae4f7480027473f8b61fcedfda9d851082a5d2/pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d", size = 3375658 }, + { url = "https://files.pythonhosted.org/packages/8a/25/1fc45761955f9359b1169aa75e241551e74ac01a09f487adaaf4c3472d11/pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856", size = 4332075 }, + { url = "https://files.pythonhosted.org/packages/5e/dd/425b95d0151e1d6c951f45051112394f130df3da67363b6bc75dc4c27aba/pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f", size = 4444808 }, + { url = "https://files.pythonhosted.org/packages/b1/84/9a15cc5726cbbfe7f9f90bfb11f5d028586595907cd093815ca6644932e3/pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b", size = 4356290 }, + { url = "https://files.pythonhosted.org/packages/b5/5b/6651c288b08df3b8c1e2f8c1152201e0b25d240e22ddade0f1e242fc9fa0/pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc", size = 4525163 }, + { url = "https://files.pythonhosted.org/packages/07/8b/34854bf11a83c248505c8cb0fcf8d3d0b459a2246c8809b967963b6b12ae/pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e", size = 4463100 }, + { url = "https://files.pythonhosted.org/packages/78/63/0632aee4e82476d9cbe5200c0cdf9ba41ee04ed77887432845264d81116d/pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46", size = 4592880 }, + { url = "https://files.pythonhosted.org/packages/df/56/b8663d7520671b4398b9d97e1ed9f583d4afcbefbda3c6188325e8c297bd/pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984", size = 2235218 }, + { url = "https://files.pythonhosted.org/packages/f4/72/0203e94a91ddb4a9d5238434ae6c1ca10e610e8487036132ea9bf806ca2a/pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141", size = 2554487 }, + { url = "https://files.pythonhosted.org/packages/bd/52/7e7e93d7a6e4290543f17dc6f7d3af4bd0b3dd9926e2e8a35ac2282bc5f4/pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1", size = 2243219 }, + { url = "https://files.pythonhosted.org/packages/a7/62/c9449f9c3043c37f73e7487ec4ef0c03eb9c9afc91a92b977a67b3c0bbc5/pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c", size = 3509265 }, + { url = "https://files.pythonhosted.org/packages/f4/5f/491dafc7bbf5a3cc1845dc0430872e8096eb9e2b6f8161509d124594ec2d/pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be", size = 3375655 }, + { url = "https://files.pythonhosted.org/packages/73/d5/c4011a76f4207a3c151134cd22a1415741e42fa5ddecec7c0182887deb3d/pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3", size = 4340304 }, + { url = "https://files.pythonhosted.org/packages/ac/10/c67e20445a707f7a610699bba4fe050583b688d8cd2d202572b257f46600/pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6", size = 4452804 }, + { url = "https://files.pythonhosted.org/packages/a9/83/6523837906d1da2b269dee787e31df3b0acb12e3d08f024965a3e7f64665/pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe", size = 4365126 }, + { url = "https://files.pythonhosted.org/packages/ba/e5/8c68ff608a4203085158cff5cc2a3c534ec384536d9438c405ed6370d080/pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319", size = 4533541 }, + { url = "https://files.pythonhosted.org/packages/f4/7c/01b8dbdca5bc6785573f4cee96e2358b0918b7b2c7b60d8b6f3abf87a070/pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d", size = 4471616 }, + { url = "https://files.pythonhosted.org/packages/c8/57/2899b82394a35a0fbfd352e290945440e3b3785655a03365c0ca8279f351/pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696", size = 4600802 }, + { url = "https://files.pythonhosted.org/packages/4d/d7/a44f193d4c26e58ee5d2d9db3d4854b2cfb5b5e08d360a5e03fe987c0086/pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496", size = 2235213 }, + { url = "https://files.pythonhosted.org/packages/c1/d0/5866318eec2b801cdb8c82abf190c8343d8a1cd8bf5a0c17444a6f268291/pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91", size = 2554498 }, + { url = "https://files.pythonhosted.org/packages/d4/c8/310ac16ac2b97e902d9eb438688de0d961660a87703ad1561fd3dfbd2aa0/pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22", size = 2243219 }, + { url = "https://files.pythonhosted.org/packages/05/cb/0353013dc30c02a8be34eb91d25e4e4cf594b59e5a55ea1128fde1e5f8ea/pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94", size = 3509350 }, + { url = "https://files.pythonhosted.org/packages/e7/cf/5c558a0f247e0bf9cec92bff9b46ae6474dd736f6d906315e60e4075f737/pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597", size = 3374980 }, + { url = "https://files.pythonhosted.org/packages/84/48/6e394b86369a4eb68b8a1382c78dc092245af517385c086c5094e3b34428/pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80", size = 4343799 }, + { url = "https://files.pythonhosted.org/packages/3b/f3/a8c6c11fa84b59b9df0cd5694492da8c039a24cd159f0f6918690105c3be/pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca", size = 4459973 }, + { url = "https://files.pythonhosted.org/packages/7d/1b/c14b4197b80150fb64453585247e6fb2e1d93761fa0fa9cf63b102fde822/pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef", size = 4370054 }, + { url = "https://files.pythonhosted.org/packages/55/77/40daddf677897a923d5d33329acd52a2144d54a9644f2a5422c028c6bf2d/pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a", size = 4539484 }, + { url = "https://files.pythonhosted.org/packages/40/54/90de3e4256b1207300fb2b1d7168dd912a2fb4b2401e439ba23c2b2cabde/pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b", size = 4477375 }, + { url = "https://files.pythonhosted.org/packages/13/24/1bfba52f44193860918ff7c93d03d95e3f8748ca1de3ceaf11157a14cf16/pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9", size = 4608773 }, + { url = "https://files.pythonhosted.org/packages/55/04/5e6de6e6120451ec0c24516c41dbaf80cce1b6451f96561235ef2429da2e/pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42", size = 2235690 }, + { url = "https://files.pythonhosted.org/packages/74/0a/d4ce3c44bca8635bd29a2eab5aa181b654a734a29b263ca8efe013beea98/pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a", size = 2554951 }, + { url = "https://files.pythonhosted.org/packages/b5/ca/184349ee40f2e92439be9b3502ae6cfc43ac4b50bc4fc6b3de7957563894/pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9", size = 2243427 }, + { url = "https://files.pythonhosted.org/packages/31/85/955fa5400fa8039921f630372cfe5056eed6e1b8e0430ee4507d7de48832/pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d", size = 3509283 }, + { url = "https://files.pythonhosted.org/packages/23/9c/343827267eb28d41cd82b4180d33b10d868af9077abcec0af9793aa77d2d/pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b", size = 3375691 }, + { url = "https://files.pythonhosted.org/packages/60/a3/7ebbeabcd341eab722896d1a5b59a3df98c4b4d26cf4b0385f8aa94296f7/pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd", size = 4328295 }, + { url = "https://files.pythonhosted.org/packages/32/3f/c02268d0c6fb6b3958bdda673c17b315c821d97df29ae6969f20fb49388a/pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126", size = 4440810 }, + { url = "https://files.pythonhosted.org/packages/67/5d/1c93c8cc35f2fdd3d6cc7e4ad72d203902859a2867de6ad957d9b708eb8d/pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b", size = 4352283 }, + { url = "https://files.pythonhosted.org/packages/bc/a8/8655557c9c7202b8abbd001f61ff36711cefaf750debcaa1c24d154ef602/pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c", size = 4521800 }, + { url = "https://files.pythonhosted.org/packages/58/78/6f95797af64d137124f68af1bdaa13b5332da282b86031f6fa70cf368261/pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1", size = 4459177 }, + { url = "https://files.pythonhosted.org/packages/8a/6d/2b3ce34f1c4266d79a78c9a51d1289a33c3c02833fe294ef0dcbb9cba4ed/pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df", size = 4589079 }, + { url = "https://files.pythonhosted.org/packages/e3/e0/456258c74da1ff5bf8ef1eab06a95ca994d8b9ed44c01d45c3f8cbd1db7e/pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef", size = 2235247 }, + { url = "https://files.pythonhosted.org/packages/37/f8/bef952bdb32aa53741f58bf21798642209e994edc3f6598f337f23d5400a/pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5", size = 2554479 }, + { url = "https://files.pythonhosted.org/packages/bb/8e/805201619cad6651eef5fc1fdef913804baf00053461522fabbc5588ea12/pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e", size = 2243226 }, + { url = "https://files.pythonhosted.org/packages/38/30/095d4f55f3a053392f75e2eae45eba3228452783bab3d9a920b951ac495c/pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4", size = 3493889 }, + { url = "https://files.pythonhosted.org/packages/f3/e8/4ff79788803a5fcd5dc35efdc9386af153569853767bff74540725b45863/pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da", size = 3346160 }, + { url = "https://files.pythonhosted.org/packages/d7/ac/4184edd511b14f760c73f5bb8a5d6fd85c591c8aff7c2229677a355c4179/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026", size = 3435020 }, + { url = "https://files.pythonhosted.org/packages/da/21/1749cd09160149c0a246a81d646e05f35041619ce76f6493d6a96e8d1103/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e", size = 3490539 }, + { url = "https://files.pythonhosted.org/packages/b6/f5/f71fe1888b96083b3f6dfa0709101f61fc9e972c0c8d04e9d93ccef2a045/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5", size = 3476125 }, + { url = "https://files.pythonhosted.org/packages/96/b9/c0362c54290a31866c3526848583a2f45a535aa9d725fd31e25d318c805f/pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885", size = 3579373 }, + { url = "https://files.pythonhosted.org/packages/52/3b/ce7a01026a7cf46e5452afa86f97a5e88ca97f562cafa76570178ab56d8d/pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5", size = 2554661 }, + { url = "https://files.pythonhosted.org/packages/e1/1f/5a9fcd6ced51633c22481417e11b1b47d723f64fb536dfd67c015eb7f0ab/pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b", size = 3493850 }, + { url = "https://files.pythonhosted.org/packages/cb/e6/3ea4755ed5320cb62aa6be2f6de47b058c6550f752dd050e86f694c59798/pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908", size = 3346118 }, + { url = "https://files.pythonhosted.org/packages/0a/22/492f9f61e4648422b6ca39268ec8139277a5b34648d28f400faac14e0f48/pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b", size = 3434958 }, + { url = "https://files.pythonhosted.org/packages/f9/19/559a48ad4045704bb0547965b9a9345f5cd461347d977a56d178db28819e/pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8", size = 3490340 }, + { url = "https://files.pythonhosted.org/packages/d9/de/cebaca6fb79905b3a1aa0281d238769df3fb2ede34fd7c0caa286575915a/pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a", size = 3476048 }, + { url = "https://files.pythonhosted.org/packages/71/f0/86d5b2f04693b0116a01d75302b0a307800a90d6c351a8aa4f8ae76cd499/pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27", size = 3579366 }, + { url = "https://files.pythonhosted.org/packages/37/ae/2dbfc38cc4fd14aceea14bc440d5151b21f64c4c3ba3f6f4191610b7ee5d/pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3", size = 2554652 }, ] [[package]] @@ -1703,18 +1714,18 @@ wheels = [ [[package]] name = "protobuf" -version = "5.29.2" +version = "4.25.5" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a5/73/4e6295c1420a9d20c9c351db3a36109b4c9aa601916cb7c6871e3196a1ca/protobuf-5.29.2.tar.gz", hash = "sha256:b2cc8e8bb7c9326996f0e160137b0861f1a82162502658df2951209d0cb0309e", size = 424901 } +sdist = { url = "https://files.pythonhosted.org/packages/67/dd/48d5fdb68ec74d70fabcc252e434492e56f70944d9f17b6a15e3746d2295/protobuf-4.25.5.tar.gz", hash = "sha256:7f8249476b4a9473645db7f8ab42b02fe1488cbe5fb72fddd445e0665afd8584", size = 380315 } wheels = [ - { url = "https://files.pythonhosted.org/packages/f3/42/6db5387124708d619ffb990a846fb123bee546f52868039f8fa964c5bc54/protobuf-5.29.2-cp310-abi3-win32.whl", hash = "sha256:c12ba8249f5624300cf51c3d0bfe5be71a60c63e4dcf51ffe9a68771d958c851", size = 422697 }, - { url = "https://files.pythonhosted.org/packages/6c/38/2fcc968b377b531882d6ab2ac99b10ca6d00108394f6ff57c2395fb7baff/protobuf-5.29.2-cp310-abi3-win_amd64.whl", hash = "sha256:842de6d9241134a973aab719ab42b008a18a90f9f07f06ba480df268f86432f9", size = 434495 }, - { url = "https://files.pythonhosted.org/packages/cb/26/41debe0f6615fcb7e97672057524687ed86fcd85e3da3f031c30af8f0c51/protobuf-5.29.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a0c53d78383c851bfa97eb42e3703aefdc96d2036a41482ffd55dc5f529466eb", size = 417812 }, - { url = "https://files.pythonhosted.org/packages/e4/20/38fc33b60dcfb380507b99494aebe8c34b68b8ac7d32808c4cebda3f6f6b/protobuf-5.29.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:494229ecd8c9009dd71eda5fd57528395d1eacdf307dbece6c12ad0dd09e912e", size = 319562 }, - { url = "https://files.pythonhosted.org/packages/90/4d/c3d61e698e0e41d926dbff6aa4e57428ab1a6fc3b5e1deaa6c9ec0fd45cf/protobuf-5.29.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:b6b0d416bbbb9d4fbf9d0561dbfc4e324fd522f61f7af0fe0f282ab67b22477e", size = 319662 }, - { url = "https://files.pythonhosted.org/packages/5e/d0/76d086c744c8252b35c2bc9c49c3be7c815b806191e58ad82c6d228c07a8/protobuf-5.29.2-cp39-cp39-win32.whl", hash = "sha256:36000f97ea1e76e8398a3f02936aac2a5d2b111aae9920ec1b769fc4a222c4d9", size = 422665 }, - { url = "https://files.pythonhosted.org/packages/84/08/be8223de1967ae8a100aaa1f7076f65c42ed1ff5ed413ff5dd718cff9fa8/protobuf-5.29.2-cp39-cp39-win_amd64.whl", hash = "sha256:2d2e674c58a06311c8e99e74be43e7f3a8d1e2b2fdf845eaa347fbd866f23355", size = 434584 }, - { url = "https://files.pythonhosted.org/packages/f3/fd/c7924b4c2a1c61b8f4b64edd7a31ffacf63432135a2606f03a2f0d75a750/protobuf-5.29.2-py3-none-any.whl", hash = "sha256:fde4554c0e578a5a0bcc9a276339594848d1e89f9ea47b4427c80e5d72f90181", size = 172539 }, + { url = "https://files.pythonhosted.org/packages/00/35/1b3c5a5e6107859c4ca902f4fbb762e48599b78129a05d20684fef4a4d04/protobuf-4.25.5-cp310-abi3-win32.whl", hash = "sha256:5e61fd921603f58d2f5acb2806a929b4675f8874ff5f330b7d6f7e2e784bbcd8", size = 392457 }, + { url = "https://files.pythonhosted.org/packages/a7/ad/bf3f358e90b7e70bf7fb520702cb15307ef268262292d3bdb16ad8ebc815/protobuf-4.25.5-cp310-abi3-win_amd64.whl", hash = "sha256:4be0571adcbe712b282a330c6e89eae24281344429ae95c6d85e79e84780f5ea", size = 413449 }, + { url = "https://files.pythonhosted.org/packages/51/49/d110f0a43beb365758a252203c43eaaad169fe7749da918869a8c991f726/protobuf-4.25.5-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:b2fde3d805354df675ea4c7c6338c1aecd254dfc9925e88c6d31a2bcb97eb173", size = 394248 }, + { url = "https://files.pythonhosted.org/packages/c6/ab/0f384ca0bc6054b1a7b6009000ab75d28a5506e4459378b81280ae7fd358/protobuf-4.25.5-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:919ad92d9b0310070f8356c24b855c98df2b8bd207ebc1c0c6fcc9ab1e007f3d", size = 293717 }, + { url = "https://files.pythonhosted.org/packages/05/a6/094a2640be576d760baa34c902dcb8199d89bce9ed7dd7a6af74dcbbd62d/protobuf-4.25.5-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:fe14e16c22be926d3abfcb500e60cab068baf10b542b8c858fa27e098123e331", size = 294635 }, + { url = "https://files.pythonhosted.org/packages/6a/1e/73a7f7a6c21dcca8ba0ca90d5404a5011c388dd87e2ea1a9f11ea6b61ec0/protobuf-4.25.5-cp39-cp39-win32.whl", hash = "sha256:abe32aad8561aa7cc94fc7ba4fdef646e576983edb94a73381b03c53728a626f", size = 392501 }, + { url = "https://files.pythonhosted.org/packages/26/1b/a6c17bb22bdda781ebf058fb88c3727f69bed9f7913c0c5835caf6bc09f5/protobuf-4.25.5-cp39-cp39-win_amd64.whl", hash = "sha256:7a183f592dc80aa7c8da7ad9e55091c4ffc9497b3054452d629bb85fa27c2a45", size = 413396 }, + { url = "https://files.pythonhosted.org/packages/33/90/f198a61df8381fb43ae0fe81b3d2718e8dcc51ae8502c7657ab9381fbc4f/protobuf-4.25.5-py3-none-any.whl", hash = "sha256:0aebecb809cae990f8129ada5ca273d9d670b76d9bfc9b1809f0a9c02b7dbf41", size = 156467 }, ] [[package]] @@ -2423,20 +2434,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, ] -[[package]] -name = "snowplow-tracker" -version = "1.0.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "requests" }, - { name = "types-requests" }, - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e1/86/0c6a02dd258d93050265e00b40448045a849c57b8f83de37ecec63089a4b/snowplow_tracker-1.0.4.tar.gz", hash = "sha256:16d8a3c001a7847d91dc081d508324550c314a4cbf5d6106b5ab35f77fa34678", size = 33998 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8b/70/c2836c7143b390a4b12ac44ada6fea7440b153165edcef47eca551b298af/snowplow_tracker-1.0.4-py3-none-any.whl", hash = "sha256:382e289811550f6ce7d5abc9e68590cc080ac9b21916b701b17497cfd6b32038", size = 44094 }, -] - [[package]] name = "sqlparse" version = "0.5.3" @@ -2486,7 +2483,7 @@ wheels = [ [[package]] name = "streamlit" -version = "1.41.1" +version = "1.29.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "altair" }, @@ -2494,6 +2491,7 @@ dependencies = [ { name = "cachetools" }, { name = "click" }, { name = "gitpython" }, + { name = "importlib-metadata" }, { name = "numpy" }, { name = "packaging" }, { name = "pandas" }, @@ -2501,17 +2499,20 @@ dependencies = [ { name = "protobuf" }, { name = "pyarrow" }, { name = "pydeck" }, + { name = "python-dateutil" }, { name = "requests" }, { name = "rich" }, { name = "tenacity" }, { name = "toml" }, { name = "tornado" }, { name = "typing-extensions" }, + { name = "tzlocal" }, + { name = "validators" }, { name = "watchdog", marker = "platform_system != 'Darwin'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/78/33/14b5ac0369ecf0af675911e5e84b934e6fcc2cec850857d2390eb373b0a6/streamlit-1.41.1.tar.gz", hash = "sha256:6626d32b098ba1458b71eebdd634c62af2dd876380e59c4b6a1e828a39d62d69", size = 8712473 } +sdist = { url = "https://files.pythonhosted.org/packages/9d/2d/5273692001363f9534422707a8a8382b7b0b250832fdff473ed992680aa9/streamlit-1.29.0.tar.gz", hash = "sha256:b6dfff9c5e132e5518c92150efcd452980db492a45fafeac3d4688d2334efa07", size = 8033351 } wheels = [ - { url = "https://files.pythonhosted.org/packages/c2/87/b2e162869500062a94dde7589c167367b5538dab6eacce2e7c0f00d5c9c5/streamlit-1.41.1-py2.py3-none-any.whl", hash = "sha256:0def00822480071d642e6df36cd63c089f991da3a69fd9eb4ab8f65ce27de4e0", size = 9100386 }, + { url = "https://files.pythonhosted.org/packages/d3/96/9251b421d0a1c7d625a82a04bea56b8a9830c785940ec16db454b85c6db7/streamlit-1.29.0-py2.py3-none-any.whl", hash = "sha256:753510edb5bb831af0e3bdacd353c879ad5b4f0211e7efa0ec378809464868b4", size = 8374475 }, ] [[package]] @@ -2540,11 +2541,11 @@ wheels = [ [[package]] name = "tenacity" -version = "9.0.0" +version = "8.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cd/94/91fccdb4b8110642462e653d5dcb27e7b674742ad68efd146367da7bdb10/tenacity-9.0.0.tar.gz", hash = "sha256:807f37ca97d62aa361264d497b0e31e92b8027044942bfa756160d908320d73b", size = 47421 } +sdist = { url = "https://files.pythonhosted.org/packages/a3/4d/6a19536c50b849338fcbe9290d562b52cbdcf30d8963d3588a68a4107df1/tenacity-8.5.0.tar.gz", hash = "sha256:8bc6c0c8a09b31e6cad13c47afbed1a567518250a9a171418582ed8d9c20ca78", size = 47309 } wheels = [ - { url = "https://files.pythonhosted.org/packages/b6/cb/b86984bed139586d01532a587464b5805f12e397594f19f931c4c2fbfa61/tenacity-9.0.0-py3-none-any.whl", hash = "sha256:93de0c98785b27fcf659856aa9f54bfbd399e29969b0621bc7f762bd441b4539", size = 28169 }, + { url = "https://files.pythonhosted.org/packages/d2/3f/8ba87d9e287b9d385a02a7114ddcef61b26f86411e121c9003eb509a1773/tenacity-8.5.0-py3-none-any.whl", hash = "sha256:b594c2a5945830c267ce6b79a166228323ed52718f30302c1359836112346687", size = 28165 }, ] [[package]] @@ -2637,18 +2638,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f2/53/9465dedf2d69fe26008e7732cf6e0a385e387c240869e7d54eed49782a3c/typeguard-4.4.1-py3-none-any.whl", hash = "sha256:9324ec07a27ec67fc54a9c063020ca4c0ae6abad5e9f0f9804ca59aee68c6e21", size = 35635 }, ] -[[package]] -name = "types-requests" -version = "2.32.0.20241016" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "urllib3" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/fa/3c/4f2a430c01a22abd49a583b6b944173e39e7d01b688190a5618bd59a2e22/types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95", size = 18065 } -wheels = [ - { url = "https://files.pythonhosted.org/packages/d7/01/485b3026ff90e5190b5e24f1711522e06c79f4a56c8f4b95848ac072e20f/types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747", size = 15836 }, -] - [[package]] name = "typing-extensions" version = "4.12.2" @@ -2667,6 +2656,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a6/ab/7e5f53c3b9d14972843a647d8d7a853969a58aecc7559cb3267302c94774/tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd", size = 346586 }, ] +[[package]] +name = "tzlocal" +version = "5.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tzdata", marker = "platform_system == 'Windows'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/04/d3/c19d65ae67636fe63953b20c2e4a8ced4497ea232c43ff8d01db16de8dc0/tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e", size = 30201 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/97/3f/c4c51c55ff8487f2e6d0e618dba917e3c3ee2caae6cf0fbb59c9b1876f2e/tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8", size = 17859 }, +] + [[package]] name = "urllib3" version = "2.3.0" @@ -2676,6 +2677,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c8/19/4ec628951a74043532ca2cf5d97b7b14863931476d117c471e8e2b1eb39f/urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df", size = 128369 }, ] +[[package]] +name = "validators" +version = "0.34.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/64/07/91582d69320f6f6daaf2d8072608a4ad8884683d4840e7e4f3a9dbdcc639/validators-0.34.0.tar.gz", hash = "sha256:647fe407b45af9a74d245b943b18e6a816acf4926974278f6dd617778e1e781f", size = 70955 } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6e/78/36828a4d857b25896f9774c875714ba4e9b3bc8a92d2debe3f4df3a83d4f/validators-0.34.0-py3-none-any.whl", hash = "sha256:c804b476e3e6d3786fa07a30073a4ef694e617805eb1946ceee3fe5a9b8b1321", size = 43536 }, +] + [[package]] name = "virtualenv" version = "20.28.0" From 1d265f06a292ea5534072cddb04fce6b99e3b5de Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Thu, 2 Jan 2025 01:47:19 -0700 Subject: [PATCH 42/46] feat: first cli command updated and working --- pyproject.toml | 3 +- src/dbt_osmosis/__main__.py | 2 +- src/dbt_osmosis/cli/__init__.py | 0 src/dbt_osmosis/cli/main.py | 1083 ++++++++++++++++++++++++++++++ src/dbt_osmosis/core/osmosis.py | 72 +- src/dbt_osmosis/main.py | 1085 ------------------------------- uv.lock | 2 + 7 files changed, 1135 insertions(+), 1112 deletions(-) create mode 100644 src/dbt_osmosis/cli/__init__.py create mode 100644 src/dbt_osmosis/cli/main.py delete mode 100644 src/dbt_osmosis/main.py diff --git a/pyproject.toml b/pyproject.toml index e181380d..75f0d2b2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,6 +29,7 @@ dependencies = [ "ruamel.yaml~=0.18.7", "rich>=10", "pluggy>=1.5.0,<2", + "typing-extensions~=4.12.2 ; python_version < '3.10'", ] [project.optional-dependencies] @@ -50,7 +51,7 @@ openai = ["openai>0.28.0"] dev = ["ruff~=0.8.4", "pytest~=8.3.4", "pre-commit>3.0.0"] [project.scripts] -"dbt-osmosis" = "dbt_osmosis.main:cli" +"dbt-osmosis" = "dbt_osmosis.cli.main:cli" [tool.black] line-length = 100 diff --git a/src/dbt_osmosis/__main__.py b/src/dbt_osmosis/__main__.py index 868e62b3..b96bb162 100644 --- a/src/dbt_osmosis/__main__.py +++ b/src/dbt_osmosis/__main__.py @@ -1,6 +1,6 @@ import sys -import dbt_osmosis.main +import dbt_osmosis.cli if __name__ == "__main__": dbt_osmosis.main.cli(sys.argv[1:]) diff --git a/src/dbt_osmosis/cli/__init__.py b/src/dbt_osmosis/cli/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/dbt_osmosis/cli/main.py b/src/dbt_osmosis/cli/main.py new file mode 100644 index 00000000..778988d1 --- /dev/null +++ b/src/dbt_osmosis/cli/main.py @@ -0,0 +1,1083 @@ +# pyright: reportUnreachable=false +import functools +import io +import sys +import typing as t + +import click + +import dbt_osmosis.core.logger as logger +from dbt_osmosis.core.osmosis import ( + DbtConfiguration, + YamlRefactorContext, + YamlRefactorSettings, + apply_restructure_plan, + commit_yamls, + create_dbt_project_context, + create_missing_source_yamls, + discover_profiles_dir, + discover_project_dir, + draft_restructure_delta_plan, + inherit_upstream_column_knowledge, + inject_missing_columns, + remove_columns_not_in_database, + sort_columns_as_in_database, + sync_node_to_yaml, +) + +T = t.TypeVar("T") +if sys.version_info >= (3, 9): + P = t.ParamSpec("P") +else: + import typing_extensions as te + + P = te.ParamSpec("P") + +_CONTEXT = {"max_content_width": 800} + + +@click.group() +@click.version_option() +def cli() -> None: + """dbt-osmosis is a CLI tool for dbt that helps you manage, document, and organize your dbt yaml files""" + pass + + +@cli.group() +def yaml(): + """Manage, document, and organize dbt YAML files""" + + +@cli.group() +def sql(): + """Execute and compile dbt SQL statements""" + + +def shared_opts(func: t.Callable[P, T]) -> t.Callable[P, T]: + """Here we define the options shared across subcommands + + Args: + func (Callable): Wraps a subcommand + + Returns: + Callable: Subcommand with added options + """ + + @click.option( + "--project-dir", + type=click.Path(exists=True, dir_okay=True, file_okay=False), + default=discover_project_dir, + help=( + "Which directory to look in for the dbt_project.yml file. Default is the current" + " working directory and its parents." + ), + ) + @click.option( + "--profiles-dir", + type=click.Path(exists=True, dir_okay=True, file_okay=False), + default=discover_profiles_dir, + help="Which directory to look in for the profiles.yml file. Defaults to ~/.dbt", + ) + @click.option( + "-t", + "--target", + type=click.STRING, + help="Which target to load. Overrides default target in the profiles.yml.", + ) + @functools.wraps(func) + def wrapper(*args: P.args, **kwargs: P.kwargs) -> T: + return func(*args, **kwargs) + + return wrapper + + +@yaml.command(context_settings=_CONTEXT) +@shared_opts +@click.option( + "-f", + "--fqn", + multiple=True, + type=click.STRING, + help="Specify models based on dbt's FQN. Mostly useful when combined with dbt ls.", +) +@click.option( + "-F", + "--force-inherit-descriptions", + is_flag=True, + help="If specified, forces descriptions to be inherited from an upstream source if possible.", +) +@click.option( + "-d", + "--dry-run", + is_flag=True, + help="If specified, no changes are committed to disk.", +) +@click.option( + "-C", + "--check", + is_flag=True, + help="If specified, will return a non-zero exit code if any files are changed or would have changed.", +) +@click.option( + "--catalog-path", + type=click.Path(exists=True), + help="If specified, will read the list of columns from the catalog.json file instead of querying the warehouse.", +) +@click.option( + "--skip-add-columns", + is_flag=True, + help="If specified, we will skip adding columns to the models. This is useful if you want to document your models without adding columns present in the database.", +) +@click.option( + "--skip-add-tags", + is_flag=True, + help="If specified, we will skip adding upstream tags to the model columns.", +) +@click.option( + "--skip-merge-meta", + is_flag=True, + help="If specified, we will skip merging upstrean meta keys to the model columns.", +) +@click.option( + "--skip-add-data-types", # TODO: make sure this is implemented + is_flag=True, + help="If specified, we will skip adding data types to the models.", +) +@click.option( + "--numeric-precision", + is_flag=True, + help="If specified, numeric types will have precision and scale, e.g. Number(38, 8).", +) +@click.option( + "--char-length", + is_flag=True, + help="If specified, character types will have length, e.g. Varchar(128).", +) +@click.option( + "--add-progenitor-to-meta", + is_flag=True, + help="If specified, progenitor information will be added to the meta information of a column. This is useful if you want to know which model is the progenitor (origin) of a specific model's column.", +) +@click.option( + "--profile", + type=click.STRING, + help="Which profile to load. Overrides setting in dbt_project.yml.", +) +@click.option( + "--vars", + type=click.STRING, + help='Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. \'{"foo": "bar"}\'', +) +@click.option( + "--use-unrendered-descriptions", + is_flag=True, + help="If specified, will use unrendered column descriptions in the documentation. This is the only way to propogate docs blocks", +) +@click.option( + "--add-inheritance-for-specified-keys", + multiple=True, + type=click.STRING, + help="If specified, will add inheritance for the specified keys. IE policy_tags", +) +@click.option( + "--output-to-lower", # TODO: validate this is implemented + is_flag=True, + help="If specified, output yaml file columns and data types in lowercase if possible.", +) +@click.argument("models", nargs=-1) +def refactor( + target: str | None = None, + project_dir: str | None = None, + profiles_dir: str | None = None, + catalog_path: str | None = None, + fqn: list[str] | None = None, + force_inherit_descriptions: bool = False, + dry_run: bool = False, + check: bool = False, + skip_add_columns: bool = False, + skip_add_tags: bool = False, + skip_add_data_types: bool = False, + numeric_precision: bool = False, + char_length: bool = False, + skip_merge_meta: bool = False, + add_progenitor_to_meta: bool = False, + models: list[str] | None = None, + profile: str | None = None, + vars: str | None = None, + use_unrendered_descriptions: bool = False, + add_inheritance_for_specified_keys: list[str] | None = None, + output_to_lower: bool = False, +): + """Executes organize which syncs yaml files with database schema and organizes the dbt models + directory, reparses the project, then executes document passing down inheritable documentation + + \f + This command will conform your project as outlined in `dbt_project.yml`, bootstrap undocumented + dbt models, and propagate column level documentation downwards once all yamls are accounted for + + Args: + target (Optional[str]): Profile target. Defaults to default target set in profile yml + project_dir (Optional[str], optional): Dbt project directory. Defaults to working directory. + profiles_dir (Optional[str], optional): Dbt profile directory. Defaults to ~/.dbt + """ + logger.info(":water_wave: Executing dbt-osmosis\n") + settings = DbtConfiguration( + project_dir=t.cast(str, project_dir), + profiles_dir=t.cast(str, profiles_dir), + target=target, + profile=profile, + ) + context = YamlRefactorContext( + project=create_dbt_project_context(settings), + settings=YamlRefactorSettings( + fqns=fqn or [], + models=models or [], + dry_run=dry_run, + skip_add_columns=skip_add_columns, + skip_add_tags=skip_add_tags, + skip_merge_meta=skip_merge_meta, + skip_add_data_types=skip_add_data_types, + numeric_precision=numeric_precision, + char_length=char_length, + add_progenitor_to_meta=add_progenitor_to_meta, + use_unrendered_descriptions=use_unrendered_descriptions, + add_inheritance_for_specified_keys=add_inheritance_for_specified_keys or [], + output_to_lower=output_to_lower, + force_inherit_descriptions=force_inherit_descriptions, + catalog_path=catalog_path, + create_catalog_if_not_exists=False, # TODO: allow enabling if ready + ), + ) + if vars: + settings.vars = context.yaml_handler.load(io.StringIO(vars)) # pyright: ignore[reportUnknownMemberType] + + create_missing_source_yamls(context=context) + apply_restructure_plan( + context=context, plan=draft_restructure_delta_plan(context), confirm=False + ) + inject_missing_columns(context=context) + remove_columns_not_in_database(context=context) + inherit_upstream_column_knowledge(context=context) + sort_columns_as_in_database(context=context) + sync_node_to_yaml(context=context) + commit_yamls(context=context) + + if check and context.mutated > 0: + exit(1) + + +# @yaml.command(context_settings=_CONTEXT) +# @shared_opts +# @click.option( +# "-f", +# "--fqn", +# type=click.STRING, +# help=( +# "Specify models based on FQN. Use dots as separators. Looks like folder.folder.model or" +# " folder.folder.source.table. Use list command to see the scope of an FQN filter." +# " This may be deprecated in the future. Please use model positional selectors instead." +# ), +# ) +# @click.option( +# "-d", +# "--dry-run", +# is_flag=True, +# help="If specified, no changes are committed to disk.", +# ) +# @click.option( +# "-C", +# "--check", +# is_flag=True, +# help="If specified, will return a non-zero exit code if any files are changed.", +# ) +# @click.option( +# "--skip-add-columns", +# is_flag=True, +# help=( +# "If specified, we will skip adding columns to the models. This is useful if you want to" +# " document your models without adding columns present in the database." +# ), +# ) +# @click.option( +# "--skip-add-tags", +# is_flag=True, +# help="If specified, we will skip adding tags to the models.", +# ) +# @click.option( +# "--skip-add-data-types", +# is_flag=True, +# help="If specified, we will skip adding data types to the models.", +# ) +# @click.option( +# "--numeric-precision", +# is_flag=True, +# help="If specified, numeric types will have precision and scale, e.g. Number(38, 8).", +# ) +# @click.option( +# "--char-length", +# is_flag=True, +# help="If specified, character types will have length, e.g. Varchar(128).", +# ) +# @click.option( +# "--skip-merge-meta", +# is_flag=True, +# help="If specified, we will skip merging meta to the models.", +# ) +# @click.option( +# "--add-progenitor-to-meta", +# is_flag=True, +# help=( +# "If specified, progenitor information will be added to the meta information of a column." +# " This is useful if you want to know which model is the progenitor of a specific model's" +# " column." +# ), +# ) +# @click.option( +# "--profile", +# type=click.STRING, +# help="Which profile to load. Overrides setting in dbt_project.yml.", +# ) +# @click.option( +# "--vars", +# type=click.STRING, +# help=( +# "Supply variables to the project. This argument overrides variables defined in your" +# " dbt_project.yml file. This argument should be a YAML string, eg. '{my_variable:" +# " my_value}'" +# ), +# ) +# @click.option( +# "--add-inheritance-for-specified-keys", +# multiple=True, +# type=click.STRING, +# help="If specified, will add inheritance for the specified keys.", +# ) +# @click.option( +# "--output-to-lower", +# is_flag=True, +# help="If specified, output yaml file in lowercase if possible.", +# ) +# @click.argument("models", nargs=-1) +# def organize( +# target: Optional[str] = None, +# project_dir: Optional[str] = None, +# profiles_dir: Optional[str] = None, +# fqn: Optional[str] = None, +# dry_run: bool = False, +# check: bool = False, +# models: Optional[List[str]] = None, +# skip_add_columns: bool = False, +# skip_add_tags: bool = False, +# skip_add_data_types: bool = False, +# numeric_precision: bool = False, +# char_length: bool = False, +# skip_merge_meta: bool = False, +# add_progenitor_to_meta: bool = False, +# profile: Optional[str] = None, +# vars: Optional[str] = None, +# add_inheritance_for_specified_keys: Optional[List[str]] = None, +# output_to_lower: bool = False, +# ): +# """Organizes schema ymls based on config and injects undocumented models +# +# \f +# This command will conform schema ymls in your project as outlined in `dbt_project.yml` & +# bootstrap undocumented dbt models +# +# Args: +# target (Optional[str]): Profile target. Defaults to default target set in profile yml +# project_dir (Optional[str], optional): Dbt project directory. Defaults to working directory. +# profiles_dir (Optional[str], optional): Dbt profile directory. Defaults to ~/.dbt +# """ +# logger().info(":water_wave: Executing dbt-osmosis\n") +# +# runner = DbtYamlManager( +# project_dir=project_dir, +# profiles_dir=profiles_dir, +# target=target, +# fqn=fqn, +# dry_run=dry_run, +# models=models, +# skip_add_columns=skip_add_columns, +# skip_add_tags=skip_add_tags, +# skip_add_data_types=skip_add_data_types, +# numeric_precision=numeric_precision, +# char_length=char_length, +# skip_merge_meta=skip_merge_meta, +# add_progenitor_to_meta=add_progenitor_to_meta, +# profile=profile, +# vars=vars, +# add_inheritance_for_specified_keys=add_inheritance_for_specified_keys, +# output_to_lower=output_to_lower, +# ) +# +# # Conform project structure & bootstrap undocumented models injecting columns +# runner.commit_project_restructure_to_disk() +# if check and runner._mutations > 0: +# exit(1) +# +# +# @yaml.command(context_settings=_CONTEXT) +# @shared_opts +# @click.option( +# "-f", +# "--fqn", +# type=click.STRING, +# help=( +# "Specify models based on FQN. Use dots as separators. Looks like folder.folder.model or" +# " folder.folder.source.table. Use list command to see the scope of an FQN filter." +# " This may be deprecated in the future. Please use model positional selectors instead." +# ), +# ) +# @click.option( +# "-F", +# "--force-inheritance", +# is_flag=True, +# help=( +# "If specified, forces documentation to be inherited overriding existing column level" +# " documentation where applicable." +# ), +# ) +# @click.option( +# "-d", +# "--dry-run", +# is_flag=True, +# help="If specified, no changes are committed to disk.", +# ) +# @click.option( +# "-C", +# "--check", +# is_flag=True, +# help="If specified, will return a non-zero exit code if any files are changed.", +# ) +# @click.option( +# "--catalog-file", +# type=click.Path(exists=True), +# help=( +# "If specified, will read the list of columns from the catalog.json file instead of querying" +# " the warehouse." +# ), +# ) +# @click.option( +# "--skip-add-columns", +# is_flag=True, +# help=( +# "If specified, we will skip adding columns to the models. This is useful if you want to" +# " document your models without adding columns present in the database." +# ), +# ) +# @click.option( +# "--skip-add-tags", +# is_flag=True, +# help="If specified, we will skip adding tags to the models.", +# ) +# @click.option( +# "--skip-add-data-types", +# is_flag=True, +# help="If specified, we will skip adding data types to the models.", +# ) +# @click.option( +# "--numeric-precision", +# is_flag=True, +# help="If specified, numeric types will have precision and scale, e.g. Number(38, 8).", +# ) +# @click.option( +# "--char-length", +# is_flag=True, +# help="If specified, character types will have length, e.g. Varchar(128).", +# ) +# @click.option( +# "--skip-merge-meta", +# is_flag=True, +# help="If specified, we will skip merging meta to the models.", +# ) +# @click.option( +# "--add-progenitor-to-meta", +# is_flag=True, +# help=( +# "If specified, progenitor information will be added to the meta information of a column." +# " This is useful if you want to know which model is the progenitor of a specific model's" +# " column." +# ), +# ) +# @click.option( +# "--profile", +# type=click.STRING, +# help="Which profile to load. Overrides setting in dbt_project.yml.", +# ) +# @click.option( +# "--vars", +# type=click.STRING, +# help=( +# "Supply variables to the project. This argument overrides variables defined in your" +# " dbt_project.yml file. This argument should be a YAML string, eg. '{my_variable:" +# " my_value}'" +# ), +# ) +# @click.option( +# "--use-unrendered-descriptions", +# is_flag=True, +# help=( +# "If specified, will use unrendered column descriptions in the documentation." +# "This is useful for propogating docs blocks" +# ), +# ) +# @click.option( +# "--add-inheritance-for-specified-keys", +# multiple=True, +# type=click.STRING, +# help="If specified, will add inheritance for the specified keys.", +# ) +# @click.option( +# "--output-to-lower", +# is_flag=True, +# help="If specified, output yaml file in lowercase if possible.", +# ) +# @click.argument("models", nargs=-1) +# def document( +# target: Optional[str] = None, +# project_dir: Optional[str] = None, +# profiles_dir: Optional[str] = None, +# catalog_file: Optional[str] = None, +# fqn: Optional[str] = None, +# force_inheritance: bool = False, +# dry_run: bool = False, +# check: bool = False, +# models: Optional[List[str]] = None, +# skip_add_columns: bool = False, +# skip_add_tags: bool = False, +# skip_add_data_types: bool = False, +# numeric_precision: bool = False, +# char_length: bool = False, +# skip_merge_meta: bool = False, +# add_progenitor_to_meta: bool = False, +# profile: Optional[str] = None, +# vars: Optional[str] = None, +# use_unrendered_descriptions: bool = False, +# add_inheritance_for_specified_keys: Optional[List[str]] = None, +# output_to_lower: bool = False, +# ): +# """Column level documentation inheritance for existing models +# +# \f +# This command will conform schema ymls in your project as outlined in `dbt_project.yml` & +# bootstrap undocumented dbt models +# +# Args: +# target (Optional[str]): Profile target. Defaults to default target set in profile yml +# project_dir (Optional[str], optional): Dbt project directory. Defaults to working directory. +# profiles_dir (Optional[str], optional): Dbt profile directory. Defaults to ~/.dbt +# """ +# logger().info(":water_wave: Executing dbt-osmosis\n") +# +# runner = DbtYamlManager( +# project_dir=project_dir, +# profiles_dir=profiles_dir, +# target=target, +# fqn=fqn, +# dry_run=dry_run, +# models=models, +# catalog_file=catalog_file, +# skip_add_columns=skip_add_columns, +# skip_add_tags=skip_add_tags, +# skip_add_data_types=skip_add_data_types, +# numeric_precision=numeric_precision, +# char_length=char_length, +# skip_merge_meta=skip_merge_meta, +# add_progenitor_to_meta=add_progenitor_to_meta, +# profile=profile, +# vars=vars, +# use_unrendered_descriptions=use_unrendered_descriptions, +# add_inheritance_for_specified_keys=add_inheritance_for_specified_keys, +# output_to_lower=output_to_lower, +# ) +# +# # Propagate documentation & inject/remove schema file columns to align with model in database +# runner.propagate_documentation_downstream(force_inheritance, output_to_lower) +# if check and runner._mutations > 0: +# exit(1) +# +# +# class ServerRegisterThread(threading.Thread): +# """Thin container to capture errors in project registration""" +# +# def run(self): +# try: +# threading.Thread.run(self) +# except Exception as err: +# self.err = err +# pass +# else: +# self.err = None +# +# +# def _health_check(host: str, port: int): +# """Performs health check on server, +# raises ConnectionError otherwise returns result""" +# t, max_t, i = 0.25, 10, 0 +# address = f"http://{host}:{port}/health" +# error_msg = f"Server at {address} is not healthy" +# while True: +# try: +# resp = requests.get(address) +# except Exception: +# time.sleep(t) +# i += 1 +# if t * i > max_t: +# logger().critical(error_msg, address) +# raise ConnectionError(error_msg) +# else: +# continue +# if resp.ok: +# break +# else: +# if resp.status_code in (400, 404): +# # this is not indicative of unhealthy server +# # but rather it wants a dbt project to be registered +# break +# logger().critical(error_msg, address) +# raise ConnectionError(error_msg) +# return resp.json() +# +# +# @server.command(context_settings=_CONTEXT) +# @shared_opts +# @click.option( +# "--host", +# type=click.STRING, +# help="The host to serve the server on", +# default="localhost", +# ) +# @click.option( +# "--port", +# type=click.INT, +# help="The port to serve the server on", +# default=8581, +# ) +# @click.option( +# "--register-project", +# is_flag=True, +# help=( +# "Try to register a dbt project on init as specified by --project-dir, --profiles-dir or" +# " their defaults if not passed explicitly" +# ), +# ) +# @click.option( +# "--exit-on-error", +# is_flag=True, +# help=( +# "A flag which indicates the program should terminate on registration failure if" +# " --register-project was unsuccessful" +# ), +# ) +# def serve( +# project_dir: str, +# profiles_dir: str, +# target: str, +# host: str = "localhost", +# port: int = 8581, +# register_project: bool = False, +# exit_on_error: bool = False, +# ): +# """Runs a lightweight server compatible with dbt-power-user and convenient for interactively +# running or compile dbt SQL queries with two simple endpoints accepting POST messages""" +# logger().info(":water_wave: Executing dbt-osmosis\n") +# +# def _register_project(): +# """Background job which registers the first project on the server automatically""" +# +# # Wait +# _health_check(host, port) +# +# # Register +# body = {"project_dir": project_dir, "profiles_dir": profiles_dir, "force": True} +# if target: +# body["target"] = target +# endpoint = f"http://{host}:{port}/register" +# logger().info("Registering project: %s", endpoint) +# res = requests.post( +# endpoint, +# headers={"X-dbt-Project": str(Path(project_dir).absolute())}, +# json=body, +# ).json() +# +# # Log +# logger().info(res) +# if "error" in res: +# raise ConnectionError(res["error"]["message"]) +# +# server = multiprocessing.Process(target=run_server, args=(None, host, port)) +# server.start() +# +# import atexit +# +# atexit.register(lambda: server.terminate()) +# +# register_handler: Optional[ServerRegisterThread] = None +# if register_project and project_dir and profiles_dir: +# register_handler = ServerRegisterThread(target=_register_project) +# register_handler.start() +# +# register_exit = None +# if register_handler is not None: +# register_handler.join() +# if register_handler.err is not None and exit_on_error: +# register_exit = 1 +# server.kill() +# +# server.join() +# sys.exit(register_exit or server.exitcode) +# +# +# @server.command(context_settings=_CONTEXT) +# @shared_opts +# @click.option( +# "--host", +# type=click.STRING, +# help="The host to serve the server on", +# default="localhost", +# ) +# @click.option( +# "--port", +# type=click.INT, +# help="The port to serve the server on", +# default=8581, +# ) +# @click.option( +# "--project-name", +# type=click.STRING, +# help=( +# "The name to register the project with. By default, it is a string value representing the" +# " absolute directory of the project on disk" +# ), +# ) +# def register_project( +# project_dir: str, +# profiles_dir: str, +# target: str, +# host: str = "localhost", +# port: int = 8581, +# project_name: Optional[str] = None, +# ): +# """Convenience method to allow user to register project on the running server from the CLI""" +# logger().info(":water_wave: Executing dbt-osmosis\n") +# +# # Wait +# _health_check(host, port) +# +# # Register +# body = {"project_dir": project_dir, "profiles_dir": profiles_dir, "force": True} +# if target: +# body["target"] = target +# endpoint = f"http://{host}:{port}/register" +# logger().info("Registering project: %s", endpoint) +# res = requests.post( +# endpoint, +# headers={"X-dbt-Project": project_name or str(Path(project_dir).absolute())}, +# json=body, +# ) +# +# # Log +# logger().info(res.json()) +# +# +# @server.command(context_settings=_CONTEXT) +# @click.option( +# "--project-name", +# type=click.STRING, +# help="The name of the registered project to remove.", +# ) +# @click.option( +# "--host", +# type=click.STRING, +# help="The host to serve the server on", +# default="localhost", +# ) +# @click.option( +# "--port", +# type=click.INT, +# help="The port to serve the server on", +# default=8581, +# ) +# def unregister_project( +# project_name: str, +# host: str = "localhost", +# port: int = 8581, +# ): +# """Convenience method to allow user to unregister project on the running server from the CLI""" +# logger().info(":water_wave: Executing dbt-osmosis\n") +# +# # Wait +# _health_check(host, port) +# +# # Unregister +# endpoint = f"http://{host}:{port}/unregister" +# logger().info("Unregistering project: %s", endpoint) +# res = requests.post( +# endpoint, +# headers={"X-dbt-Project": project_name}, +# ) +# +# # Log +# logger().info(res.json()) +# +# +# @cli.command( +# context_settings=dict( +# ignore_unknown_options=True, +# allow_extra_args=True, +# ) +# ) +# @click.option( +# "--project-dir", +# type=click.Path(exists=True, dir_okay=True, file_okay=False), +# help=( +# "Which directory to look in for the dbt_project.yml file. Default is the current working" +# " directory and its parents." +# ), +# ) +# @click.option( +# "--profiles-dir", +# type=click.Path(exists=True, dir_okay=True, file_okay=False), +# default=DEFAULT_PROFILES_DIR, +# help="Which directory to look in for the profiles.yml file. Defaults to ~/.dbt", +# ) +# @click.option( +# "--host", +# type=click.STRING, +# help="The host to serve the server on", +# default="localhost", +# ) +# @click.option( +# "--port", +# type=click.INT, +# help="The port to serve the server on", +# default=8501, +# ) +# @click.pass_context +# def workbench( +# ctx, +# profiles_dir: Optional[str] = None, +# project_dir: Optional[str] = None, +# host: str = "localhost", +# port: int = 8501, +# ): +# """Start the dbt-osmosis workbench +# +# \f +# Pass the --options command to see streamlit specific options that can be passed to the app, +# pass --config to see the output of streamlit config show +# """ +# logger().info(":water_wave: Executing dbt-osmosis\n") +# +# if "--options" in ctx.args: +# subprocess.run(["streamlit", "run", "--help"]) +# ctx.exit() +# +# import os +# +# if "--config" in ctx.args: +# subprocess.run( +# ["streamlit", "config", "show"], +# env=os.environ, +# cwd=Path.cwd(), +# ) +# ctx.exit() +# +# script_args = ["--"] +# if project_dir: +# script_args.append("--project-dir") +# script_args.append(project_dir) +# if profiles_dir: +# script_args.append("--profiles-dir") +# script_args.append(profiles_dir) +# +# subprocess.run( +# [ +# "streamlit", +# "run", +# "--runner.magicEnabled=false", +# f"--browser.serverAddress={host}", +# f"--browser.serverPort={port}", +# Path(__file__).parent / "app_v2.py", +# ] +# + ctx.args +# + script_args, +# env=os.environ, +# cwd=Path.cwd(), +# ) +# +# +# @cli.command(context_settings=_CONTEXT) +# @shared_opts +# @click.option( +# "-m", +# "--model", +# type=click.STRING, +# required=True, +# help="The model to edit in the workbench, must be a valid model as would be selected by `ref`", +# ) +# @click.option( +# "--pk", +# type=click.STRING, +# help="The primary key of the model with which to base the diff", +# ) +# @click.option( +# "--temp-table", +# is_flag=True, +# help="If specified, temp tables are used to stage the queries.", +# ) +# @click.option( +# "--agg/--no-agg", +# default=True, +# help="Use --no-agg to show sample results, by default we agg for a summary view.", +# ) +# @click.option( +# "-o", +# "--output", +# default="table", +# help=( +# "Output format can be one of table, chart/bar, or csv. CSV is saved to a file named" +# " dbt-osmosis-diff in working dir" +# ), +# ) +# def diff( +# model: str, +# pk: str, +# target: Optional[str] = None, +# project_dir: Optional[str] = None, +# profiles_dir: Optional[str] = None, +# temp_table: bool = False, +# agg: bool = True, +# output: str = "table", +# ): +# """Diff dbt models at different git revisions""" +# +# logger().info(":water_wave: Executing dbt-osmosis\n") +# +# runner = DbtProject( +# project_dir=project_dir, +# profiles_dir=profiles_dir, +# target=target, +# ) +# inject_macros(runner) +# diff_and_print_to_console(model, pk, runner, temp_table, agg, output) +# +# +# @sql.command(context_settings=_CONTEXT) +# @shared_opts +# @click.argument("sql") +# def run( +# sql: str = "", +# project_dir: Optional[str] = None, +# profiles_dir: Optional[str] = None, +# target: Optional[str] = None, +# ): +# """Executes a dbt SQL statement writing an OsmosisRunResult | OsmosisErrorContainer to stdout""" +# from dbt_osmosis.vendored.dbt_core_interface.project import ( +# ServerError, +# ServerErrorCode, +# ServerErrorContainer, +# ServerRunResult, +# ) +# +# rv: Union[ServerRunResult, ServerErrorContainer] = None +# +# try: +# runner = DbtProject( +# project_dir=project_dir, +# profiles_dir=profiles_dir, +# target=target, +# ) +# except Exception as init_err: +# rv = ServerErrorContainer( +# error=ServerError( +# code=ServerErrorCode.ProjectParseFailure, +# message=str(init_err), +# data=init_err.__dict__, +# ) +# ) +# +# if rv is not None: +# print(asdict(rv)) +# return rv +# +# try: +# result = runner.execute_code("\n".join(sys.stdin.readlines()) if sql == "-" else sql) +# except Exception as execution_err: +# rv = ServerErrorContainer( +# error=ServerError( +# code=ServerErrorCode.ExecuteSqlFailure, +# message=str(execution_err), +# data=execution_err.__dict__, +# ) +# ) +# else: +# rv = ServerRunResult( +# rows=[list(row) for row in result.table.rows], +# column_names=result.table.column_names, +# executed_code=result.compiled_code, +# raw_code=result.raw_code, +# ) +# +# print(asdict(rv)) +# return rv +# +# +# @sql.command(context_settings=_CONTEXT) +# @shared_opts +# @click.argument("sql") +# def compile( +# sql: str, +# project_dir: Optional[str] = None, +# profiles_dir: Optional[str] = None, +# target: Optional[str] = None, +# ): +# """Compiles dbt SQL statement writing an OsmosisCompileResult | OsmosisErrorContainer to stdout""" +# from dbt_osmosis.vendored.dbt_core_interface.project import ( +# ServerCompileResult, +# ServerError, +# ServerErrorCode, +# ServerErrorContainer, +# ) +# +# rv: Union[ServerCompileResult, ServerErrorContainer] = None +# +# try: +# runner = DbtProject( +# project_dir=project_dir, +# profiles_dir=profiles_dir, +# target=target, +# ) +# except Exception as init_err: +# rv = ServerErrorContainer( +# error=ServerError( +# code=ServerErrorCode.ProjectParseFailure, +# message=str(init_err), +# data=init_err.__dict__, +# ) +# ) +# +# if rv is not None: +# print(asdict(rv)) +# return rv +# +# try: +# result = runner.compile_code("\n".join(sys.stdin.readlines()) if sql == "-" else sql) +# except Exception as compilation_err: +# rv = ServerErrorContainer( +# error=ServerError( +# code=ServerErrorCode.CompileSqlFailure, +# message=str(compilation_err), +# data=compilation_err.__dict__, +# ) +# ) +# else: +# rv = ServerCompileResult(result=result.compiled_code) +# +# print(asdict(rv)) +# return rv +# +# +# if __name__ == "__main__": +# cli() diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index fa6a7a4b..c187e0c0 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -3,6 +3,7 @@ from __future__ import annotations import argparse +import io import json import os import re @@ -309,8 +310,8 @@ class RestructureDeltaPlan: class YamlRefactorSettings: """Settings for yaml based refactoring operations.""" - fqn: str | None = None - """Filter models to action via a fully qualified name match.""" + fqns: list[str] = field(default_factory=list) + """Filter models to action via a fully qualified name match such as returned by `dbt ls`.""" models: list[str] = field(default_factory=list) """Filter models to action via a file path match.""" dry_run: bool = False @@ -516,14 +517,16 @@ def execute_sql_code(context: DbtProjectContext, raw_sql: str) -> tuple[AdapterR # ============== -def _is_fqn_match(node: ResultNode, fqn_str: str) -> bool: +def _is_fqn_match(node: ResultNode, fqns: list[str]) -> bool: """Filter models based on the provided fully qualified name matching on partial segments.""" - if not fqn_str: - return True - parts = fqn_str.split(".") - return len(node.fqn[1:]) >= len(parts) and all( - left == right for left, right in zip(parts, node.fqn[1:]) - ) + for fqn_str in fqns: + parts = fqn_str.split(".") + segment_match = len(node.fqn[1:]) >= len(parts) and all( + left == right for left, right in zip(parts, node.fqn[1:]) + ) + if segment_match: + return True + return False def _is_file_match(node: ResultNode, paths: list[str]) -> bool: @@ -565,8 +568,8 @@ def f(node: ResultNode) -> bool: if context.settings.models: if not _is_file_match(node, context.settings.models): return False - elif context.settings.fqn: - if not _is_fqn_match(node, context.settings.fqn): + if context.settings.fqns: + if not _is_fqn_match(node, context.settings.fqns): return False return True @@ -747,9 +750,9 @@ def _describe(rel: BaseRelation) -> dict[str, t.Any]: with src_yaml_path.open("w") as f: logger.info(f"Injecting source {source} => {src_yaml_path}") context.yaml_handler.dump({"version": 2, "sources": [source]}, f) + context.register_mutations(1) did_side_effect = True - context.register_mutations(1) if did_side_effect: logger.info("Reloading project to pick up new sources.") @@ -831,7 +834,6 @@ def build_yaml_file_mapping( return out_map -# TODO: detect if something is dirty to minimize disk writes on commits _YAML_BUFFER_CACHE: dict[Path, t.Any] = {} """Cache for yaml file buffers to avoid redundant disk reads/writes and simplify edits.""" @@ -851,10 +853,19 @@ def _write_yaml(context: YamlRefactorContext, path: Path, data: dict[str, t.Any] if not context.settings.dry_run: with context.yaml_handler_lock: path.parent.mkdir(parents=True, exist_ok=True) - context.yaml_handler.dump(data, path) + original = path.read_bytes() if path.is_file() else b"" + context.yaml_handler.dump(data, staging := io.BytesIO()) + modified = staging.getvalue() + if modified != original: + logger.info(f"Writing {path}") + with path.open("wb") as f: + _ = f.write(modified) + context.register_mutations(1) + else: + logger.debug(f"Skipping {path} (no changes)") + del staging if path in _YAML_BUFFER_CACHE: del _YAML_BUFFER_CACHE[path] - context.register_mutations(1) def commit_yamls(context: YamlRefactorContext) -> None: @@ -862,10 +873,17 @@ def commit_yamls(context: YamlRefactorContext) -> None: if not context.settings.dry_run: with context.yaml_handler_lock: for path in list(_YAML_BUFFER_CACHE.keys()): - with path.open("w") as f: - context.yaml_handler.dump(_YAML_BUFFER_CACHE[path], f) + original = path.read_bytes() if path.is_file() else b"" + context.yaml_handler.dump(_YAML_BUFFER_CACHE[path], staging := io.BytesIO()) + modified = staging.getvalue() + if modified != original: + with path.open("wb") as f: + logger.info(f"Writing {path}") + _ = f.write(modified) + context.register_mutations(1) + else: + logger.debug(f"Skipping {path} (no changes)") del _YAML_BUFFER_CACHE[path] - context.register_mutations(1) def _generate_minimal_model_yaml(node: ModelNode | SeedNode) -> dict[str, t.Any]: @@ -1063,7 +1081,9 @@ def _sync_doc_section( doc_section["columns"] = incoming_columns -def sync_node_to_yaml(context: YamlRefactorContext, node: ResultNode | None = None) -> None: +def sync_node_to_yaml( + context: YamlRefactorContext, node: ResultNode | None = None, *, commit: bool = True +) -> None: """Synchronize a single node's columns, description, tags, meta, etc. from the manifest into its corresponding YAML file. We assume the manifest node is the single source of truth, so the YAML file is overwritten to match. @@ -1079,7 +1099,7 @@ def sync_node_to_yaml(context: YamlRefactorContext, node: ResultNode | None = No """ if node is None: for _, node in filter_models(context): - sync_node_to_yaml(context, node) + sync_node_to_yaml(context, node, commit=commit) return current_path = get_current_yaml_path(context, node) @@ -1150,7 +1170,8 @@ def sync_node_to_yaml(context: YamlRefactorContext, node: ResultNode | None = No if len(doc.get(k, [])) == 0: _ = doc.pop(k, None) - _write_yaml(context, current_path, doc) + if commit: + _write_yaml(context, current_path, doc) def apply_restructure_plan( @@ -1457,7 +1478,6 @@ def _position(column: dict[str, t.Any]): node.columns = { k: v for k, v in sorted(node.columns.items(), key=lambda i: _position(i[1].to_dict())) } - context.register_mutations(1) def sort_columns_alphabetically( @@ -1469,7 +1489,6 @@ def sort_columns_alphabetically( sort_columns_alphabetically(context, node) return node.columns = {k: v for k, v in sorted(node.columns.items(), key=lambda i: i[0])} - context.register_mutations(1) # Fuzzy Plugins @@ -1543,16 +1562,19 @@ def run_example_compilation_flow(c: DbtConfiguration) -> None: node = compile_sql_code(context, "select '{{ 1+1 }}' as col_{{ var('foo') }}") print("Compiled =>", node.compiled_code) - resp, _ = execute_sql_code(context, "select '{{ 1+2 }}' as col_{{ var('foo') }}") + resp, t_ = execute_sql_code(context, "select '{{ 1+2 }}' as col_{{ var('foo') }}") print("Resp =>", resp) + t_.print_csv() + if __name__ == "__main__": + # Kitchen sink c = DbtConfiguration( project_dir="demo_duckdb", profiles_dir="demo_duckdb", vars={"dbt-osmosis": {}} ) - # run_example_compilation_flow(c) + run_example_compilation_flow(c) project = create_dbt_project_context(c) _ = generate_catalog(project) diff --git a/src/dbt_osmosis/main.py b/src/dbt_osmosis/main.py deleted file mode 100644 index 42b478e5..00000000 --- a/src/dbt_osmosis/main.py +++ /dev/null @@ -1,1085 +0,0 @@ -import functools -import multiprocessing -import subprocess -import sys -import threading -import time -from dataclasses import asdict -from pathlib import Path -from typing import Callable, List, Optional, Union - -import click -import requests - -from dbt_osmosis.core.diff import diff_and_print_to_console -from dbt_osmosis.core.log_controller import logger -from dbt_osmosis.core.macros import inject_macros -from dbt_osmosis.core.osmosis import DbtYamlManager -from dbt_osmosis.vendored.dbt_core_interface import ( - DEFAULT_PROFILES_DIR, - DEFAULT_PROJECT_DIR, - DbtProject, - run_server, -) - -CONTEXT = {"max_content_width": 800} - - -@click.group() -@click.version_option() -def cli(): - pass - - -@cli.group() -def yaml(): - """Manage, document, and organize dbt YAML files""" - - -@cli.group() -def sql(): - """Execute and compile dbt SQL statements""" - - -@cli.group() -def server(): - """Manage dbt osmosis server""" - - -def shared_opts(func: Callable) -> Callable: - """Here we define the options shared across subcommands - Args: - func (Callable): Wraps a subcommand - Returns: - Callable: Subcommand with added options - """ - - @click.option( - "--project-dir", - type=click.Path(exists=True, dir_okay=True, file_okay=False), - default=DEFAULT_PROJECT_DIR, - help=( - "Which directory to look in for the dbt_project.yml file. Default is the current" - " working directory and its parents." - ), - ) - @click.option( - "--profiles-dir", - type=click.Path(exists=True, dir_okay=True, file_okay=False), - default=DEFAULT_PROFILES_DIR, - help="Which directory to look in for the profiles.yml file. Defaults to ~/.dbt", - ) - @click.option( - "-t", - "--target", - type=click.STRING, - help="Which target to load. Overrides default target in the profiles.yml.", - ) - @functools.wraps(func) - def wrapper(*args, **kwargs): - return func(*args, **kwargs) - - return wrapper - - -@yaml.command(context_settings=CONTEXT) -@shared_opts -@click.option( - "-f", - "--fqn", - type=click.STRING, - help=( - "Specify models based on dbt's FQN. Looks like folder.folder, folder.folder.model, or" - " folder.folder.source.table. Use list command to see the scope of an FQN filter." - " This may be deprecated in the future. Please use model positional selectors instead." - ), -) -@click.option( - "-F", - "--force-inheritance", - is_flag=True, - help=( - "If specified, forces documentation to be inherited overriding existing column level" - " documentation where applicable." - ), -) -@click.option( - "-d", - "--dry-run", - is_flag=True, - help="If specified, no changes are committed to disk.", -) -@click.option( - "-C", - "--check", - is_flag=True, - help="If specified, will return a non-zero exit code if any files are changed.", -) -@click.option( - "--catalog-file", - type=click.Path(exists=True), - help=( - "If specified, will read the list of columns from the catalog.json file instead of querying" - " the warehouse." - ), -) -@click.option( - "--skip-add-columns", - is_flag=True, - help=( - "If specified, we will skip adding columns to the models. This is useful if you want to" - " document your models without adding columns present in the database." - ), -) -@click.option( - "--skip-add-tags", - is_flag=True, - help="If specified, we will skip adding tags to the models.", -) -@click.option( - "--skip-add-data-types", - is_flag=True, - help="If specified, we will skip adding data types to the models.", -) -@click.option( - "--numeric-precision", - is_flag=True, - help="If specified, numeric types will have precision and scale, e.g. Number(38, 8).", -) -@click.option( - "--char-length", - is_flag=True, - help="If specified, character types will have length, e.g. Varchar(128).", -) -@click.option( - "--skip-merge-meta", - is_flag=True, - help="If specified, we will skip merging meta to the models.", -) -@click.option( - "--add-progenitor-to-meta", - is_flag=True, - help=( - "If specified, progenitor information will be added to the meta information of a column." - " This is useful if you want to know which model is the progenitor of a specific model's" - " column." - ), -) -@click.option( - "--profile", - type=click.STRING, - help="Which profile to load. Overrides setting in dbt_project.yml.", -) -@click.option( - "--vars", - type=click.STRING, - help=( - "Supply variables to the project. This argument overrides variables defined in your" - " dbt_project.yml file. This argument should be a YAML string, eg. '{my_variable:" - " my_value}'" - ), -) -@click.option( - "--use-unrendered-descriptions", - is_flag=True, - help=( - "If specified, will use unrendered column descriptions in the documentation." - "This is useful for propogating docs blocks" - ), -) -@click.option( - "--add-inheritance-for-specified-keys", - multiple=True, - type=click.STRING, - help="If specified, will add inheritance for the specified keys.", -) -@click.option( - "--output-to-lower", - is_flag=True, - help="If specified, output yaml file in lowercase if possible.", -) -@click.argument("models", nargs=-1) -def refactor( - target: Optional[str] = None, - project_dir: Optional[str] = None, - profiles_dir: Optional[str] = None, - catalog_file: Optional[str] = None, - fqn: Optional[str] = None, - force_inheritance: bool = False, - dry_run: bool = False, - check: bool = False, - skip_add_columns: bool = False, - skip_add_tags: bool = False, - skip_add_data_types: bool = False, - numeric_precision: bool = False, - char_length: bool = False, - skip_merge_meta: bool = False, - add_progenitor_to_meta: bool = False, - models: Optional[List[str]] = None, - profile: Optional[str] = None, - vars: Optional[str] = None, - use_unrendered_descriptions: bool = False, - add_inheritance_for_specified_keys: Optional[List[str]] = None, - output_to_lower: bool = False, -): - """Executes organize which syncs yaml files with database schema and organizes the dbt models - directory, reparses the project, then executes document passing down inheritable documentation - - \f - This command will conform your project as outlined in `dbt_project.yml`, bootstrap undocumented - dbt models, and propagate column level documentation downwards once all yamls are accounted for - - Args: - target (Optional[str]): Profile target. Defaults to default target set in profile yml - project_dir (Optional[str], optional): Dbt project directory. Defaults to working directory. - profiles_dir (Optional[str], optional): Dbt profile directory. Defaults to ~/.dbt - """ - logger().info(":water_wave: Executing dbt-osmosis\n") - - runner = DbtYamlManager( - project_dir=project_dir, - profiles_dir=profiles_dir, - target=target, - fqn=fqn, - dry_run=dry_run, - models=models, - catalog_file=catalog_file, - skip_add_columns=skip_add_columns, - skip_add_tags=skip_add_tags, - skip_add_data_types=skip_add_data_types, - numeric_precision=numeric_precision, - char_length=char_length, - skip_merge_meta=skip_merge_meta, - add_progenitor_to_meta=add_progenitor_to_meta, - profile=profile, - vars=vars, - use_unrendered_descriptions=use_unrendered_descriptions, - add_inheritance_for_specified_keys=add_inheritance_for_specified_keys, - output_to_lower=output_to_lower, - ) - - # Conform project structure & bootstrap undocumented models injecting columns - if runner.commit_project_restructure_to_disk(): - runner.safe_parse_project(init=True) - runner.propagate_documentation_downstream( - force_inheritance=force_inheritance, output_to_lower=output_to_lower - ) - if check and runner._mutations > 0: - exit(1) - - -@yaml.command(context_settings=CONTEXT) -@shared_opts -@click.option( - "-f", - "--fqn", - type=click.STRING, - help=( - "Specify models based on FQN. Use dots as separators. Looks like folder.folder.model or" - " folder.folder.source.table. Use list command to see the scope of an FQN filter." - " This may be deprecated in the future. Please use model positional selectors instead." - ), -) -@click.option( - "-d", - "--dry-run", - is_flag=True, - help="If specified, no changes are committed to disk.", -) -@click.option( - "-C", - "--check", - is_flag=True, - help="If specified, will return a non-zero exit code if any files are changed.", -) -@click.option( - "--skip-add-columns", - is_flag=True, - help=( - "If specified, we will skip adding columns to the models. This is useful if you want to" - " document your models without adding columns present in the database." - ), -) -@click.option( - "--skip-add-tags", - is_flag=True, - help="If specified, we will skip adding tags to the models.", -) -@click.option( - "--skip-add-data-types", - is_flag=True, - help="If specified, we will skip adding data types to the models.", -) -@click.option( - "--numeric-precision", - is_flag=True, - help="If specified, numeric types will have precision and scale, e.g. Number(38, 8).", -) -@click.option( - "--char-length", - is_flag=True, - help="If specified, character types will have length, e.g. Varchar(128).", -) -@click.option( - "--skip-merge-meta", - is_flag=True, - help="If specified, we will skip merging meta to the models.", -) -@click.option( - "--add-progenitor-to-meta", - is_flag=True, - help=( - "If specified, progenitor information will be added to the meta information of a column." - " This is useful if you want to know which model is the progenitor of a specific model's" - " column." - ), -) -@click.option( - "--profile", - type=click.STRING, - help="Which profile to load. Overrides setting in dbt_project.yml.", -) -@click.option( - "--vars", - type=click.STRING, - help=( - "Supply variables to the project. This argument overrides variables defined in your" - " dbt_project.yml file. This argument should be a YAML string, eg. '{my_variable:" - " my_value}'" - ), -) -@click.option( - "--add-inheritance-for-specified-keys", - multiple=True, - type=click.STRING, - help="If specified, will add inheritance for the specified keys.", -) -@click.option( - "--output-to-lower", - is_flag=True, - help="If specified, output yaml file in lowercase if possible.", -) -@click.argument("models", nargs=-1) -def organize( - target: Optional[str] = None, - project_dir: Optional[str] = None, - profiles_dir: Optional[str] = None, - fqn: Optional[str] = None, - dry_run: bool = False, - check: bool = False, - models: Optional[List[str]] = None, - skip_add_columns: bool = False, - skip_add_tags: bool = False, - skip_add_data_types: bool = False, - numeric_precision: bool = False, - char_length: bool = False, - skip_merge_meta: bool = False, - add_progenitor_to_meta: bool = False, - profile: Optional[str] = None, - vars: Optional[str] = None, - add_inheritance_for_specified_keys: Optional[List[str]] = None, - output_to_lower: bool = False, -): - """Organizes schema ymls based on config and injects undocumented models - - \f - This command will conform schema ymls in your project as outlined in `dbt_project.yml` & - bootstrap undocumented dbt models - - Args: - target (Optional[str]): Profile target. Defaults to default target set in profile yml - project_dir (Optional[str], optional): Dbt project directory. Defaults to working directory. - profiles_dir (Optional[str], optional): Dbt profile directory. Defaults to ~/.dbt - """ - logger().info(":water_wave: Executing dbt-osmosis\n") - - runner = DbtYamlManager( - project_dir=project_dir, - profiles_dir=profiles_dir, - target=target, - fqn=fqn, - dry_run=dry_run, - models=models, - skip_add_columns=skip_add_columns, - skip_add_tags=skip_add_tags, - skip_add_data_types=skip_add_data_types, - numeric_precision=numeric_precision, - char_length=char_length, - skip_merge_meta=skip_merge_meta, - add_progenitor_to_meta=add_progenitor_to_meta, - profile=profile, - vars=vars, - add_inheritance_for_specified_keys=add_inheritance_for_specified_keys, - output_to_lower=output_to_lower, - ) - - # Conform project structure & bootstrap undocumented models injecting columns - runner.commit_project_restructure_to_disk() - if check and runner._mutations > 0: - exit(1) - - -@yaml.command(context_settings=CONTEXT) -@shared_opts -@click.option( - "-f", - "--fqn", - type=click.STRING, - help=( - "Specify models based on FQN. Use dots as separators. Looks like folder.folder.model or" - " folder.folder.source.table. Use list command to see the scope of an FQN filter." - " This may be deprecated in the future. Please use model positional selectors instead." - ), -) -@click.option( - "-F", - "--force-inheritance", - is_flag=True, - help=( - "If specified, forces documentation to be inherited overriding existing column level" - " documentation where applicable." - ), -) -@click.option( - "-d", - "--dry-run", - is_flag=True, - help="If specified, no changes are committed to disk.", -) -@click.option( - "-C", - "--check", - is_flag=True, - help="If specified, will return a non-zero exit code if any files are changed.", -) -@click.option( - "--catalog-file", - type=click.Path(exists=True), - help=( - "If specified, will read the list of columns from the catalog.json file instead of querying" - " the warehouse." - ), -) -@click.option( - "--skip-add-columns", - is_flag=True, - help=( - "If specified, we will skip adding columns to the models. This is useful if you want to" - " document your models without adding columns present in the database." - ), -) -@click.option( - "--skip-add-tags", - is_flag=True, - help="If specified, we will skip adding tags to the models.", -) -@click.option( - "--skip-add-data-types", - is_flag=True, - help="If specified, we will skip adding data types to the models.", -) -@click.option( - "--numeric-precision", - is_flag=True, - help="If specified, numeric types will have precision and scale, e.g. Number(38, 8).", -) -@click.option( - "--char-length", - is_flag=True, - help="If specified, character types will have length, e.g. Varchar(128).", -) -@click.option( - "--skip-merge-meta", - is_flag=True, - help="If specified, we will skip merging meta to the models.", -) -@click.option( - "--add-progenitor-to-meta", - is_flag=True, - help=( - "If specified, progenitor information will be added to the meta information of a column." - " This is useful if you want to know which model is the progenitor of a specific model's" - " column." - ), -) -@click.option( - "--profile", - type=click.STRING, - help="Which profile to load. Overrides setting in dbt_project.yml.", -) -@click.option( - "--vars", - type=click.STRING, - help=( - "Supply variables to the project. This argument overrides variables defined in your" - " dbt_project.yml file. This argument should be a YAML string, eg. '{my_variable:" - " my_value}'" - ), -) -@click.option( - "--use-unrendered-descriptions", - is_flag=True, - help=( - "If specified, will use unrendered column descriptions in the documentation." - "This is useful for propogating docs blocks" - ), -) -@click.option( - "--add-inheritance-for-specified-keys", - multiple=True, - type=click.STRING, - help="If specified, will add inheritance for the specified keys.", -) -@click.option( - "--output-to-lower", - is_flag=True, - help="If specified, output yaml file in lowercase if possible.", -) -@click.argument("models", nargs=-1) -def document( - target: Optional[str] = None, - project_dir: Optional[str] = None, - profiles_dir: Optional[str] = None, - catalog_file: Optional[str] = None, - fqn: Optional[str] = None, - force_inheritance: bool = False, - dry_run: bool = False, - check: bool = False, - models: Optional[List[str]] = None, - skip_add_columns: bool = False, - skip_add_tags: bool = False, - skip_add_data_types: bool = False, - numeric_precision: bool = False, - char_length: bool = False, - skip_merge_meta: bool = False, - add_progenitor_to_meta: bool = False, - profile: Optional[str] = None, - vars: Optional[str] = None, - use_unrendered_descriptions: bool = False, - add_inheritance_for_specified_keys: Optional[List[str]] = None, - output_to_lower: bool = False, -): - """Column level documentation inheritance for existing models - - \f - This command will conform schema ymls in your project as outlined in `dbt_project.yml` & - bootstrap undocumented dbt models - - Args: - target (Optional[str]): Profile target. Defaults to default target set in profile yml - project_dir (Optional[str], optional): Dbt project directory. Defaults to working directory. - profiles_dir (Optional[str], optional): Dbt profile directory. Defaults to ~/.dbt - """ - logger().info(":water_wave: Executing dbt-osmosis\n") - - runner = DbtYamlManager( - project_dir=project_dir, - profiles_dir=profiles_dir, - target=target, - fqn=fqn, - dry_run=dry_run, - models=models, - catalog_file=catalog_file, - skip_add_columns=skip_add_columns, - skip_add_tags=skip_add_tags, - skip_add_data_types=skip_add_data_types, - numeric_precision=numeric_precision, - char_length=char_length, - skip_merge_meta=skip_merge_meta, - add_progenitor_to_meta=add_progenitor_to_meta, - profile=profile, - vars=vars, - use_unrendered_descriptions=use_unrendered_descriptions, - add_inheritance_for_specified_keys=add_inheritance_for_specified_keys, - output_to_lower=output_to_lower, - ) - - # Propagate documentation & inject/remove schema file columns to align with model in database - runner.propagate_documentation_downstream(force_inheritance, output_to_lower) - if check and runner._mutations > 0: - exit(1) - - -class ServerRegisterThread(threading.Thread): - """Thin container to capture errors in project registration""" - - def run(self): - try: - threading.Thread.run(self) - except Exception as err: - self.err = err - pass - else: - self.err = None - - -def _health_check(host: str, port: int): - """Performs health check on server, - raises ConnectionError otherwise returns result""" - t, max_t, i = 0.25, 10, 0 - address = f"http://{host}:{port}/health" - error_msg = f"Server at {address} is not healthy" - while True: - try: - resp = requests.get(address) - except Exception: - time.sleep(t) - i += 1 - if t * i > max_t: - logger().critical(error_msg, address) - raise ConnectionError(error_msg) - else: - continue - if resp.ok: - break - else: - if resp.status_code in (400, 404): - # this is not indicative of unhealthy server - # but rather it wants a dbt project to be registered - break - logger().critical(error_msg, address) - raise ConnectionError(error_msg) - return resp.json() - - -@server.command(context_settings=CONTEXT) -@shared_opts -@click.option( - "--host", - type=click.STRING, - help="The host to serve the server on", - default="localhost", -) -@click.option( - "--port", - type=click.INT, - help="The port to serve the server on", - default=8581, -) -@click.option( - "--register-project", - is_flag=True, - help=( - "Try to register a dbt project on init as specified by --project-dir, --profiles-dir or" - " their defaults if not passed explicitly" - ), -) -@click.option( - "--exit-on-error", - is_flag=True, - help=( - "A flag which indicates the program should terminate on registration failure if" - " --register-project was unsuccessful" - ), -) -def serve( - project_dir: str, - profiles_dir: str, - target: str, - host: str = "localhost", - port: int = 8581, - register_project: bool = False, - exit_on_error: bool = False, -): - """Runs a lightweight server compatible with dbt-power-user and convenient for interactively - running or compile dbt SQL queries with two simple endpoints accepting POST messages""" - logger().info(":water_wave: Executing dbt-osmosis\n") - - def _register_project(): - """Background job which registers the first project on the server automatically""" - - # Wait - _health_check(host, port) - - # Register - body = {"project_dir": project_dir, "profiles_dir": profiles_dir, "force": True} - if target: - body["target"] = target - endpoint = f"http://{host}:{port}/register" - logger().info("Registering project: %s", endpoint) - res = requests.post( - endpoint, - headers={"X-dbt-Project": str(Path(project_dir).absolute())}, - json=body, - ).json() - - # Log - logger().info(res) - if "error" in res: - raise ConnectionError(res["error"]["message"]) - - server = multiprocessing.Process(target=run_server, args=(None, host, port)) - server.start() - - import atexit - - atexit.register(lambda: server.terminate()) - - register_handler: Optional[ServerRegisterThread] = None - if register_project and project_dir and profiles_dir: - register_handler = ServerRegisterThread(target=_register_project) - register_handler.start() - - register_exit = None - if register_handler is not None: - register_handler.join() - if register_handler.err is not None and exit_on_error: - register_exit = 1 - server.kill() - - server.join() - sys.exit(register_exit or server.exitcode) - - -@server.command(context_settings=CONTEXT) -@shared_opts -@click.option( - "--host", - type=click.STRING, - help="The host to serve the server on", - default="localhost", -) -@click.option( - "--port", - type=click.INT, - help="The port to serve the server on", - default=8581, -) -@click.option( - "--project-name", - type=click.STRING, - help=( - "The name to register the project with. By default, it is a string value representing the" - " absolute directory of the project on disk" - ), -) -def register_project( - project_dir: str, - profiles_dir: str, - target: str, - host: str = "localhost", - port: int = 8581, - project_name: Optional[str] = None, -): - """Convenience method to allow user to register project on the running server from the CLI""" - logger().info(":water_wave: Executing dbt-osmosis\n") - - # Wait - _health_check(host, port) - - # Register - body = {"project_dir": project_dir, "profiles_dir": profiles_dir, "force": True} - if target: - body["target"] = target - endpoint = f"http://{host}:{port}/register" - logger().info("Registering project: %s", endpoint) - res = requests.post( - endpoint, - headers={"X-dbt-Project": project_name or str(Path(project_dir).absolute())}, - json=body, - ) - - # Log - logger().info(res.json()) - - -@server.command(context_settings=CONTEXT) -@click.option( - "--project-name", - type=click.STRING, - help="The name of the registered project to remove.", -) -@click.option( - "--host", - type=click.STRING, - help="The host to serve the server on", - default="localhost", -) -@click.option( - "--port", - type=click.INT, - help="The port to serve the server on", - default=8581, -) -def unregister_project( - project_name: str, - host: str = "localhost", - port: int = 8581, -): - """Convenience method to allow user to unregister project on the running server from the CLI""" - logger().info(":water_wave: Executing dbt-osmosis\n") - - # Wait - _health_check(host, port) - - # Unregister - endpoint = f"http://{host}:{port}/unregister" - logger().info("Unregistering project: %s", endpoint) - res = requests.post( - endpoint, - headers={"X-dbt-Project": project_name}, - ) - - # Log - logger().info(res.json()) - - -@cli.command( - context_settings=dict( - ignore_unknown_options=True, - allow_extra_args=True, - ) -) -@click.option( - "--project-dir", - type=click.Path(exists=True, dir_okay=True, file_okay=False), - help=( - "Which directory to look in for the dbt_project.yml file. Default is the current working" - " directory and its parents." - ), -) -@click.option( - "--profiles-dir", - type=click.Path(exists=True, dir_okay=True, file_okay=False), - default=DEFAULT_PROFILES_DIR, - help="Which directory to look in for the profiles.yml file. Defaults to ~/.dbt", -) -@click.option( - "--host", - type=click.STRING, - help="The host to serve the server on", - default="localhost", -) -@click.option( - "--port", - type=click.INT, - help="The port to serve the server on", - default=8501, -) -@click.pass_context -def workbench( - ctx, - profiles_dir: Optional[str] = None, - project_dir: Optional[str] = None, - host: str = "localhost", - port: int = 8501, -): - """Start the dbt-osmosis workbench - - \f - Pass the --options command to see streamlit specific options that can be passed to the app, - pass --config to see the output of streamlit config show - """ - logger().info(":water_wave: Executing dbt-osmosis\n") - - if "--options" in ctx.args: - subprocess.run(["streamlit", "run", "--help"]) - ctx.exit() - - import os - - if "--config" in ctx.args: - subprocess.run( - ["streamlit", "config", "show"], - env=os.environ, - cwd=Path.cwd(), - ) - ctx.exit() - - script_args = ["--"] - if project_dir: - script_args.append("--project-dir") - script_args.append(project_dir) - if profiles_dir: - script_args.append("--profiles-dir") - script_args.append(profiles_dir) - - subprocess.run( - [ - "streamlit", - "run", - "--runner.magicEnabled=false", - f"--browser.serverAddress={host}", - f"--browser.serverPort={port}", - Path(__file__).parent / "app_v2.py", - ] - + ctx.args - + script_args, - env=os.environ, - cwd=Path.cwd(), - ) - - -@cli.command(context_settings=CONTEXT) -@shared_opts -@click.option( - "-m", - "--model", - type=click.STRING, - required=True, - help="The model to edit in the workbench, must be a valid model as would be selected by `ref`", -) -@click.option( - "--pk", - type=click.STRING, - help="The primary key of the model with which to base the diff", -) -@click.option( - "--temp-table", - is_flag=True, - help="If specified, temp tables are used to stage the queries.", -) -@click.option( - "--agg/--no-agg", - default=True, - help="Use --no-agg to show sample results, by default we agg for a summary view.", -) -@click.option( - "-o", - "--output", - default="table", - help=( - "Output format can be one of table, chart/bar, or csv. CSV is saved to a file named" - " dbt-osmosis-diff in working dir" - ), -) -def diff( - model: str, - pk: str, - target: Optional[str] = None, - project_dir: Optional[str] = None, - profiles_dir: Optional[str] = None, - temp_table: bool = False, - agg: bool = True, - output: str = "table", -): - """Diff dbt models at different git revisions""" - - logger().info(":water_wave: Executing dbt-osmosis\n") - - runner = DbtProject( - project_dir=project_dir, - profiles_dir=profiles_dir, - target=target, - ) - inject_macros(runner) - diff_and_print_to_console(model, pk, runner, temp_table, agg, output) - - -@sql.command(context_settings=CONTEXT) -@shared_opts -@click.argument("sql") -def run( - sql: str = "", - project_dir: Optional[str] = None, - profiles_dir: Optional[str] = None, - target: Optional[str] = None, -): - """Executes a dbt SQL statement writing an OsmosisRunResult | OsmosisErrorContainer to stdout""" - from dbt_osmosis.vendored.dbt_core_interface.project import ( - ServerError, - ServerErrorCode, - ServerErrorContainer, - ServerRunResult, - ) - - rv: Union[ServerRunResult, ServerErrorContainer] = None - - try: - runner = DbtProject( - project_dir=project_dir, - profiles_dir=profiles_dir, - target=target, - ) - except Exception as init_err: - rv = ServerErrorContainer( - error=ServerError( - code=ServerErrorCode.ProjectParseFailure, - message=str(init_err), - data=init_err.__dict__, - ) - ) - - if rv is not None: - print(asdict(rv)) - return rv - - try: - result = runner.execute_code("\n".join(sys.stdin.readlines()) if sql == "-" else sql) - except Exception as execution_err: - rv = ServerErrorContainer( - error=ServerError( - code=ServerErrorCode.ExecuteSqlFailure, - message=str(execution_err), - data=execution_err.__dict__, - ) - ) - else: - rv = ServerRunResult( - rows=[list(row) for row in result.table.rows], - column_names=result.table.column_names, - executed_code=result.compiled_code, - raw_code=result.raw_code, - ) - - print(asdict(rv)) - return rv - - -@sql.command(context_settings=CONTEXT) -@shared_opts -@click.argument("sql") -def compile( - sql: str, - project_dir: Optional[str] = None, - profiles_dir: Optional[str] = None, - target: Optional[str] = None, -): - """Compiles dbt SQL statement writing an OsmosisCompileResult | OsmosisErrorContainer to stdout""" - from dbt_osmosis.vendored.dbt_core_interface.project import ( - ServerCompileResult, - ServerError, - ServerErrorCode, - ServerErrorContainer, - ) - - rv: Union[ServerCompileResult, ServerErrorContainer] = None - - try: - runner = DbtProject( - project_dir=project_dir, - profiles_dir=profiles_dir, - target=target, - ) - except Exception as init_err: - rv = ServerErrorContainer( - error=ServerError( - code=ServerErrorCode.ProjectParseFailure, - message=str(init_err), - data=init_err.__dict__, - ) - ) - - if rv is not None: - print(asdict(rv)) - return rv - - try: - result = runner.compile_code("\n".join(sys.stdin.readlines()) if sql == "-" else sql) - except Exception as compilation_err: - rv = ServerErrorContainer( - error=ServerError( - code=ServerErrorCode.CompileSqlFailure, - message=str(compilation_err), - data=compilation_err.__dict__, - ) - ) - else: - rv = ServerCompileResult(result=result.compiled_code) - - print(asdict(rv)) - return rv - - -if __name__ == "__main__": - cli() diff --git a/uv.lock b/uv.lock index 75103a20..511ffbb7 100644 --- a/uv.lock +++ b/uv.lock @@ -453,6 +453,7 @@ dependencies = [ { name = "pluggy" }, { name = "rich" }, { name = "ruamel-yaml" }, + { name = "typing-extensions", marker = "python_full_version < '3.10'" }, ] [package.optional-dependencies] @@ -501,6 +502,7 @@ requires-dist = [ { name = "streamlit", marker = "extra == 'workbench'", specifier = ">=1.20.0,<1.36.0" }, { name = "streamlit-ace", marker = "extra == 'workbench'", specifier = "~=0.1.1" }, { name = "streamlit-elements-fluence", marker = "extra == 'workbench'", specifier = ">=0.1.4" }, + { name = "typing-extensions", marker = "python_full_version < '3.10'", specifier = "~=4.12.2" }, { name = "ydata-profiling", marker = "extra == 'workbench'", specifier = "~=4.12.1" }, ] From 8cf6a8c777b3e8f411cb450b895e2a094fde4e59 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Thu, 2 Jan 2025 02:28:25 -0700 Subject: [PATCH 43/46] feat: fully working cli --- src/dbt_osmosis/__main__.py | 4 +- src/dbt_osmosis/cli/__init__.py | 3 + src/dbt_osmosis/cli/main.py | 1235 ++++++++++--------------------- src/dbt_osmosis/core/logger.py | 2 +- src/dbt_osmosis/core/osmosis.py | 2 + 5 files changed, 379 insertions(+), 867 deletions(-) diff --git a/src/dbt_osmosis/__main__.py b/src/dbt_osmosis/__main__.py index b96bb162..a4f8a60b 100644 --- a/src/dbt_osmosis/__main__.py +++ b/src/dbt_osmosis/__main__.py @@ -1,6 +1,6 @@ import sys -import dbt_osmosis.cli +from dbt_osmosis.cli.main import cli if __name__ == "__main__": - dbt_osmosis.main.cli(sys.argv[1:]) + cli(sys.argv[1:]) diff --git a/src/dbt_osmosis/cli/__init__.py b/src/dbt_osmosis/cli/__init__.py index e69de29b..52f00279 100644 --- a/src/dbt_osmosis/cli/__init__.py +++ b/src/dbt_osmosis/cli/__init__.py @@ -0,0 +1,3 @@ +from dbt_osmosis.cli.main import cli + +__all__ = ["cli"] diff --git a/src/dbt_osmosis/cli/main.py b/src/dbt_osmosis/cli/main.py index 778988d1..26f17a83 100644 --- a/src/dbt_osmosis/cli/main.py +++ b/src/dbt_osmosis/cli/main.py @@ -1,8 +1,10 @@ # pyright: reportUnreachable=false import functools import io +import subprocess import sys import typing as t +from pathlib import Path import click @@ -13,11 +15,13 @@ YamlRefactorSettings, apply_restructure_plan, commit_yamls, + compile_sql_code, create_dbt_project_context, create_missing_source_yamls, discover_profiles_dir, discover_project_dir, draft_restructure_delta_plan, + execute_sql_code, inherit_upstream_column_knowledge, inject_missing_columns, remove_columns_not_in_database, @@ -54,14 +58,7 @@ def sql(): def shared_opts(func: t.Callable[P, T]) -> t.Callable[P, T]: - """Here we define the options shared across subcommands - - Args: - func (Callable): Wraps a subcommand - - Returns: - Callable: Subcommand with added options - """ + """Options common across subcommands""" @click.option( "--project-dir", @@ -91,38 +88,59 @@ def wrapper(*args: P.args, **kwargs: P.kwargs) -> T: return wrapper +def yaml_opts(func: t.Callable[P, T]) -> t.Callable[P, T]: + """Options common to YAML operations.""" + + @click.option( + "-f", + "--fqn", + multiple=True, + type=click.STRING, + help="Specify models based on dbt's FQN. Mostly useful when combined with dbt ls.", + ) + @click.option( + "-d", + "--dry-run", + is_flag=True, + help="If specified, no changes are committed to disk.", + ) + @click.option( + "-C", + "--check", + is_flag=True, + help="If specified, will return a non-zero exit code if any files are changed or would have changed.", + ) + @click.option( + "--catalog-path", + type=click.Path(exists=True), + help="If specified, will read the list of columns from the catalog.json file instead of querying the warehouse.", + ) + @click.option( + "--profile", + type=click.STRING, + help="Which profile to load. Overrides setting in dbt_project.yml.", + ) + @click.option( + "--vars", + type=click.STRING, + help='Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. \'{"foo": "bar"}\'', + ) + @functools.wraps(func) + def wrapper(*args: P.args, **kwargs: P.kwargs) -> T: + return func(*args, **kwargs) + + return wrapper + + @yaml.command(context_settings=_CONTEXT) @shared_opts -@click.option( - "-f", - "--fqn", - multiple=True, - type=click.STRING, - help="Specify models based on dbt's FQN. Mostly useful when combined with dbt ls.", -) +@yaml_opts @click.option( "-F", "--force-inherit-descriptions", is_flag=True, help="If specified, forces descriptions to be inherited from an upstream source if possible.", ) -@click.option( - "-d", - "--dry-run", - is_flag=True, - help="If specified, no changes are committed to disk.", -) -@click.option( - "-C", - "--check", - is_flag=True, - help="If specified, will return a non-zero exit code if any files are changed or would have changed.", -) -@click.option( - "--catalog-path", - type=click.Path(exists=True), - help="If specified, will read the list of columns from the catalog.json file instead of querying the warehouse.", -) @click.option( "--skip-add-columns", is_flag=True, @@ -158,16 +176,6 @@ def wrapper(*args: P.args, **kwargs: P.kwargs) -> T: is_flag=True, help="If specified, progenitor information will be added to the meta information of a column. This is useful if you want to know which model is the progenitor (origin) of a specific model's column.", ) -@click.option( - "--profile", - type=click.STRING, - help="Which profile to load. Overrides setting in dbt_project.yml.", -) -@click.option( - "--vars", - type=click.STRING, - help='Supply variables to the project. This argument overrides variables defined in your dbt_project.yml file. This argument should be a YAML string, eg. \'{"foo": "bar"}\'', -) @click.option( "--use-unrendered-descriptions", is_flag=True, @@ -207,18 +215,13 @@ def refactor( use_unrendered_descriptions: bool = False, add_inheritance_for_specified_keys: list[str] | None = None, output_to_lower: bool = False, -): +) -> None: """Executes organize which syncs yaml files with database schema and organizes the dbt models directory, reparses the project, then executes document passing down inheritable documentation \f This command will conform your project as outlined in `dbt_project.yml`, bootstrap undocumented dbt models, and propagate column level documentation downwards once all yamls are accounted for - - Args: - target (Optional[str]): Profile target. Defaults to default target set in profile yml - project_dir (Optional[str], optional): Dbt project directory. Defaults to working directory. - profiles_dir (Optional[str], optional): Dbt profile directory. Defaults to ~/.dbt """ logger.info(":water_wave: Executing dbt-osmosis\n") settings = DbtConfiguration( @@ -262,822 +265,326 @@ def refactor( sync_node_to_yaml(context=context) commit_yamls(context=context) - if check and context.mutated > 0: + if check and context.mutated: + exit(1) + + +@yaml.command(context_settings=_CONTEXT) +@shared_opts +@yaml_opts +@click.argument("models", nargs=-1) +def organize( + target: str | None = None, + project_dir: str | None = None, + profiles_dir: str | None = None, + catalog_path: str | None = None, + fqn: list[str] | None = None, + dry_run: bool = False, + check: bool = False, + models: list[str] | None = None, + profile: str | None = None, + vars: str | None = None, +) -> None: + """Organizes schema ymls based on config and injects undocumented models + + \f + This command will conform schema ymls in your project as outlined in `dbt_project.yml` & + bootstrap undocumented dbt models + """ + logger.info(":water_wave: Executing dbt-osmosis\n") + settings = DbtConfiguration( + project_dir=t.cast(str, project_dir), + profiles_dir=t.cast(str, profiles_dir), + target=target, + profile=profile, + ) + context = YamlRefactorContext( + project=create_dbt_project_context(settings), + settings=YamlRefactorSettings( + fqns=fqn or [], + models=models or [], + dry_run=dry_run, + catalog_path=catalog_path, + create_catalog_if_not_exists=False, + ), + ) + if vars: + settings.vars = context.yaml_handler.load(io.StringIO(vars)) # pyright: ignore[reportUnknownMemberType] + + create_missing_source_yamls(context=context) + apply_restructure_plan( + context=context, plan=draft_restructure_delta_plan(context), confirm=False + ) + + if check and context.mutated: + exit(1) + + +@yaml.command(context_settings=_CONTEXT) +@shared_opts +@yaml_opts +@click.option( + "-F", + "--force-inherit-descriptions", + is_flag=True, + help="If specified, forces descriptions to be inherited from an upstream source if possible.", +) +@click.option( + "--skip-add-tags", + is_flag=True, + help="If specified, we will skip adding upstream tags to the model columns.", +) +@click.option( + "--skip-merge-meta", + is_flag=True, + help="If specified, we will skip merging upstrean meta keys to the model columns.", +) +@click.option( + "--skip-add-data-types", # TODO: make sure this is implemented + is_flag=True, + help="If specified, we will skip adding data types to the models.", +) +@click.option( + "--skip-add-columns", + is_flag=True, + help="If specified, we will skip adding columns to the models. This is useful if you want to document your models without adding columns present in the database.", +) +@click.option( + "--numeric-precision", + is_flag=True, + help="If specified, numeric types will have precision and scale, e.g. Number(38, 8).", +) +@click.option( + "--char-length", + is_flag=True, + help="If specified, character types will have length, e.g. Varchar(128).", +) +@click.option( + "--add-progenitor-to-meta", + is_flag=True, + help="If specified, progenitor information will be added to the meta information of a column. This is useful if you want to know which model is the progenitor (origin) of a specific model's column.", +) +@click.option( + "--use-unrendered-descriptions", + is_flag=True, + help="If specified, will use unrendered column descriptions in the documentation. This is the only way to propogate docs blocks", +) +@click.option( + "--add-inheritance-for-specified-keys", + multiple=True, + type=click.STRING, + help="If specified, will add inheritance for the specified keys. IE policy_tags", +) +@click.option( + "--output-to-lower", # TODO: validate this is implemented + is_flag=True, + help="If specified, output yaml file columns and data types in lowercase if possible.", +) +@click.argument("models", nargs=-1) +def document( + target: str | None = None, + project_dir: str | None = None, + profiles_dir: str | None = None, + models: list[str] | None = None, + fqn: list[str] | None = None, + dry_run: bool = False, + check: bool = False, + skip_merge_meta: bool = False, + skip_add_tags: bool = False, + skip_add_data_types: bool = False, + skip_add_columns: bool = False, + add_progenitor_to_meta: bool = False, + add_inheritance_for_specified_keys: list[str] | None = None, + use_unrendered_descriptions: bool = False, + force_inherit_descriptions: bool = False, + output_to_lower: bool = False, + char_length: bool = False, + numeric_precision: bool = False, + catalog_path: str | None = None, + vars: str | None = None, + profile: str | None = None, +) -> None: + """Column level documentation inheritance for existing models + + \f + This command will conform schema ymls in your project as outlined in `dbt_project.yml` & + bootstrap undocumented dbt models + + Args: + target (Optional[str]): Profile target. Defaults to default target set in profile yml + project_dir (Optional[str], optional): Dbt project directory. Defaults to working directory. + profiles_dir (Optional[str], optional): Dbt profile directory. Defaults to ~/.dbt + """ + logger.info(":water_wave: Executing dbt-osmosis\n") + settings = DbtConfiguration( + project_dir=t.cast(str, project_dir), + profiles_dir=t.cast(str, profiles_dir), + target=target, + profile=profile, + ) + context = YamlRefactorContext( + project=create_dbt_project_context(settings), + settings=YamlRefactorSettings( + fqns=fqn or [], + models=models or [], + dry_run=dry_run, + skip_add_tags=skip_add_tags, + skip_merge_meta=skip_merge_meta, + skip_add_data_types=skip_add_data_types, + skip_add_columns=skip_add_columns, + numeric_precision=numeric_precision, + char_length=char_length, + add_progenitor_to_meta=add_progenitor_to_meta, + use_unrendered_descriptions=use_unrendered_descriptions, + add_inheritance_for_specified_keys=add_inheritance_for_specified_keys or [], + output_to_lower=output_to_lower, + force_inherit_descriptions=force_inherit_descriptions, + catalog_path=catalog_path, + ), + ) + if vars: + settings.vars = context.yaml_handler.load(io.StringIO(vars)) # pyright: ignore[reportUnknownMemberType] + + inject_missing_columns(context=context) + inherit_upstream_column_knowledge(context=context) + sort_columns_as_in_database(context=context) + sync_node_to_yaml(context=context) + commit_yamls(context=context) + + if check and context.mutated: exit(1) -# @yaml.command(context_settings=_CONTEXT) -# @shared_opts -# @click.option( -# "-f", -# "--fqn", -# type=click.STRING, -# help=( -# "Specify models based on FQN. Use dots as separators. Looks like folder.folder.model or" -# " folder.folder.source.table. Use list command to see the scope of an FQN filter." -# " This may be deprecated in the future. Please use model positional selectors instead." -# ), -# ) -# @click.option( -# "-d", -# "--dry-run", -# is_flag=True, -# help="If specified, no changes are committed to disk.", -# ) -# @click.option( -# "-C", -# "--check", -# is_flag=True, -# help="If specified, will return a non-zero exit code if any files are changed.", -# ) -# @click.option( -# "--skip-add-columns", -# is_flag=True, -# help=( -# "If specified, we will skip adding columns to the models. This is useful if you want to" -# " document your models without adding columns present in the database." -# ), -# ) -# @click.option( -# "--skip-add-tags", -# is_flag=True, -# help="If specified, we will skip adding tags to the models.", -# ) -# @click.option( -# "--skip-add-data-types", -# is_flag=True, -# help="If specified, we will skip adding data types to the models.", -# ) -# @click.option( -# "--numeric-precision", -# is_flag=True, -# help="If specified, numeric types will have precision and scale, e.g. Number(38, 8).", -# ) -# @click.option( -# "--char-length", -# is_flag=True, -# help="If specified, character types will have length, e.g. Varchar(128).", -# ) -# @click.option( -# "--skip-merge-meta", -# is_flag=True, -# help="If specified, we will skip merging meta to the models.", -# ) -# @click.option( -# "--add-progenitor-to-meta", -# is_flag=True, -# help=( -# "If specified, progenitor information will be added to the meta information of a column." -# " This is useful if you want to know which model is the progenitor of a specific model's" -# " column." -# ), -# ) -# @click.option( -# "--profile", -# type=click.STRING, -# help="Which profile to load. Overrides setting in dbt_project.yml.", -# ) -# @click.option( -# "--vars", -# type=click.STRING, -# help=( -# "Supply variables to the project. This argument overrides variables defined in your" -# " dbt_project.yml file. This argument should be a YAML string, eg. '{my_variable:" -# " my_value}'" -# ), -# ) -# @click.option( -# "--add-inheritance-for-specified-keys", -# multiple=True, -# type=click.STRING, -# help="If specified, will add inheritance for the specified keys.", -# ) -# @click.option( -# "--output-to-lower", -# is_flag=True, -# help="If specified, output yaml file in lowercase if possible.", -# ) -# @click.argument("models", nargs=-1) -# def organize( -# target: Optional[str] = None, -# project_dir: Optional[str] = None, -# profiles_dir: Optional[str] = None, -# fqn: Optional[str] = None, -# dry_run: bool = False, -# check: bool = False, -# models: Optional[List[str]] = None, -# skip_add_columns: bool = False, -# skip_add_tags: bool = False, -# skip_add_data_types: bool = False, -# numeric_precision: bool = False, -# char_length: bool = False, -# skip_merge_meta: bool = False, -# add_progenitor_to_meta: bool = False, -# profile: Optional[str] = None, -# vars: Optional[str] = None, -# add_inheritance_for_specified_keys: Optional[List[str]] = None, -# output_to_lower: bool = False, -# ): -# """Organizes schema ymls based on config and injects undocumented models -# -# \f -# This command will conform schema ymls in your project as outlined in `dbt_project.yml` & -# bootstrap undocumented dbt models -# -# Args: -# target (Optional[str]): Profile target. Defaults to default target set in profile yml -# project_dir (Optional[str], optional): Dbt project directory. Defaults to working directory. -# profiles_dir (Optional[str], optional): Dbt profile directory. Defaults to ~/.dbt -# """ -# logger().info(":water_wave: Executing dbt-osmosis\n") -# -# runner = DbtYamlManager( -# project_dir=project_dir, -# profiles_dir=profiles_dir, -# target=target, -# fqn=fqn, -# dry_run=dry_run, -# models=models, -# skip_add_columns=skip_add_columns, -# skip_add_tags=skip_add_tags, -# skip_add_data_types=skip_add_data_types, -# numeric_precision=numeric_precision, -# char_length=char_length, -# skip_merge_meta=skip_merge_meta, -# add_progenitor_to_meta=add_progenitor_to_meta, -# profile=profile, -# vars=vars, -# add_inheritance_for_specified_keys=add_inheritance_for_specified_keys, -# output_to_lower=output_to_lower, -# ) -# -# # Conform project structure & bootstrap undocumented models injecting columns -# runner.commit_project_restructure_to_disk() -# if check and runner._mutations > 0: -# exit(1) -# -# -# @yaml.command(context_settings=_CONTEXT) -# @shared_opts -# @click.option( -# "-f", -# "--fqn", -# type=click.STRING, -# help=( -# "Specify models based on FQN. Use dots as separators. Looks like folder.folder.model or" -# " folder.folder.source.table. Use list command to see the scope of an FQN filter." -# " This may be deprecated in the future. Please use model positional selectors instead." -# ), -# ) -# @click.option( -# "-F", -# "--force-inheritance", -# is_flag=True, -# help=( -# "If specified, forces documentation to be inherited overriding existing column level" -# " documentation where applicable." -# ), -# ) -# @click.option( -# "-d", -# "--dry-run", -# is_flag=True, -# help="If specified, no changes are committed to disk.", -# ) -# @click.option( -# "-C", -# "--check", -# is_flag=True, -# help="If specified, will return a non-zero exit code if any files are changed.", -# ) -# @click.option( -# "--catalog-file", -# type=click.Path(exists=True), -# help=( -# "If specified, will read the list of columns from the catalog.json file instead of querying" -# " the warehouse." -# ), -# ) -# @click.option( -# "--skip-add-columns", -# is_flag=True, -# help=( -# "If specified, we will skip adding columns to the models. This is useful if you want to" -# " document your models without adding columns present in the database." -# ), -# ) -# @click.option( -# "--skip-add-tags", -# is_flag=True, -# help="If specified, we will skip adding tags to the models.", -# ) -# @click.option( -# "--skip-add-data-types", -# is_flag=True, -# help="If specified, we will skip adding data types to the models.", -# ) -# @click.option( -# "--numeric-precision", -# is_flag=True, -# help="If specified, numeric types will have precision and scale, e.g. Number(38, 8).", -# ) -# @click.option( -# "--char-length", -# is_flag=True, -# help="If specified, character types will have length, e.g. Varchar(128).", -# ) -# @click.option( -# "--skip-merge-meta", -# is_flag=True, -# help="If specified, we will skip merging meta to the models.", -# ) -# @click.option( -# "--add-progenitor-to-meta", -# is_flag=True, -# help=( -# "If specified, progenitor information will be added to the meta information of a column." -# " This is useful if you want to know which model is the progenitor of a specific model's" -# " column." -# ), -# ) -# @click.option( -# "--profile", -# type=click.STRING, -# help="Which profile to load. Overrides setting in dbt_project.yml.", -# ) -# @click.option( -# "--vars", -# type=click.STRING, -# help=( -# "Supply variables to the project. This argument overrides variables defined in your" -# " dbt_project.yml file. This argument should be a YAML string, eg. '{my_variable:" -# " my_value}'" -# ), -# ) -# @click.option( -# "--use-unrendered-descriptions", -# is_flag=True, -# help=( -# "If specified, will use unrendered column descriptions in the documentation." -# "This is useful for propogating docs blocks" -# ), -# ) -# @click.option( -# "--add-inheritance-for-specified-keys", -# multiple=True, -# type=click.STRING, -# help="If specified, will add inheritance for the specified keys.", -# ) -# @click.option( -# "--output-to-lower", -# is_flag=True, -# help="If specified, output yaml file in lowercase if possible.", -# ) -# @click.argument("models", nargs=-1) -# def document( -# target: Optional[str] = None, -# project_dir: Optional[str] = None, -# profiles_dir: Optional[str] = None, -# catalog_file: Optional[str] = None, -# fqn: Optional[str] = None, -# force_inheritance: bool = False, -# dry_run: bool = False, -# check: bool = False, -# models: Optional[List[str]] = None, -# skip_add_columns: bool = False, -# skip_add_tags: bool = False, -# skip_add_data_types: bool = False, -# numeric_precision: bool = False, -# char_length: bool = False, -# skip_merge_meta: bool = False, -# add_progenitor_to_meta: bool = False, -# profile: Optional[str] = None, -# vars: Optional[str] = None, -# use_unrendered_descriptions: bool = False, -# add_inheritance_for_specified_keys: Optional[List[str]] = None, -# output_to_lower: bool = False, -# ): -# """Column level documentation inheritance for existing models -# -# \f -# This command will conform schema ymls in your project as outlined in `dbt_project.yml` & -# bootstrap undocumented dbt models -# -# Args: -# target (Optional[str]): Profile target. Defaults to default target set in profile yml -# project_dir (Optional[str], optional): Dbt project directory. Defaults to working directory. -# profiles_dir (Optional[str], optional): Dbt profile directory. Defaults to ~/.dbt -# """ -# logger().info(":water_wave: Executing dbt-osmosis\n") -# -# runner = DbtYamlManager( -# project_dir=project_dir, -# profiles_dir=profiles_dir, -# target=target, -# fqn=fqn, -# dry_run=dry_run, -# models=models, -# catalog_file=catalog_file, -# skip_add_columns=skip_add_columns, -# skip_add_tags=skip_add_tags, -# skip_add_data_types=skip_add_data_types, -# numeric_precision=numeric_precision, -# char_length=char_length, -# skip_merge_meta=skip_merge_meta, -# add_progenitor_to_meta=add_progenitor_to_meta, -# profile=profile, -# vars=vars, -# use_unrendered_descriptions=use_unrendered_descriptions, -# add_inheritance_for_specified_keys=add_inheritance_for_specified_keys, -# output_to_lower=output_to_lower, -# ) -# -# # Propagate documentation & inject/remove schema file columns to align with model in database -# runner.propagate_documentation_downstream(force_inheritance, output_to_lower) -# if check and runner._mutations > 0: -# exit(1) -# -# -# class ServerRegisterThread(threading.Thread): -# """Thin container to capture errors in project registration""" -# -# def run(self): -# try: -# threading.Thread.run(self) -# except Exception as err: -# self.err = err -# pass -# else: -# self.err = None -# -# -# def _health_check(host: str, port: int): -# """Performs health check on server, -# raises ConnectionError otherwise returns result""" -# t, max_t, i = 0.25, 10, 0 -# address = f"http://{host}:{port}/health" -# error_msg = f"Server at {address} is not healthy" -# while True: -# try: -# resp = requests.get(address) -# except Exception: -# time.sleep(t) -# i += 1 -# if t * i > max_t: -# logger().critical(error_msg, address) -# raise ConnectionError(error_msg) -# else: -# continue -# if resp.ok: -# break -# else: -# if resp.status_code in (400, 404): -# # this is not indicative of unhealthy server -# # but rather it wants a dbt project to be registered -# break -# logger().critical(error_msg, address) -# raise ConnectionError(error_msg) -# return resp.json() -# -# -# @server.command(context_settings=_CONTEXT) -# @shared_opts -# @click.option( -# "--host", -# type=click.STRING, -# help="The host to serve the server on", -# default="localhost", -# ) -# @click.option( -# "--port", -# type=click.INT, -# help="The port to serve the server on", -# default=8581, -# ) -# @click.option( -# "--register-project", -# is_flag=True, -# help=( -# "Try to register a dbt project on init as specified by --project-dir, --profiles-dir or" -# " their defaults if not passed explicitly" -# ), -# ) -# @click.option( -# "--exit-on-error", -# is_flag=True, -# help=( -# "A flag which indicates the program should terminate on registration failure if" -# " --register-project was unsuccessful" -# ), -# ) -# def serve( -# project_dir: str, -# profiles_dir: str, -# target: str, -# host: str = "localhost", -# port: int = 8581, -# register_project: bool = False, -# exit_on_error: bool = False, -# ): -# """Runs a lightweight server compatible with dbt-power-user and convenient for interactively -# running or compile dbt SQL queries with two simple endpoints accepting POST messages""" -# logger().info(":water_wave: Executing dbt-osmosis\n") -# -# def _register_project(): -# """Background job which registers the first project on the server automatically""" -# -# # Wait -# _health_check(host, port) -# -# # Register -# body = {"project_dir": project_dir, "profiles_dir": profiles_dir, "force": True} -# if target: -# body["target"] = target -# endpoint = f"http://{host}:{port}/register" -# logger().info("Registering project: %s", endpoint) -# res = requests.post( -# endpoint, -# headers={"X-dbt-Project": str(Path(project_dir).absolute())}, -# json=body, -# ).json() -# -# # Log -# logger().info(res) -# if "error" in res: -# raise ConnectionError(res["error"]["message"]) -# -# server = multiprocessing.Process(target=run_server, args=(None, host, port)) -# server.start() -# -# import atexit -# -# atexit.register(lambda: server.terminate()) -# -# register_handler: Optional[ServerRegisterThread] = None -# if register_project and project_dir and profiles_dir: -# register_handler = ServerRegisterThread(target=_register_project) -# register_handler.start() -# -# register_exit = None -# if register_handler is not None: -# register_handler.join() -# if register_handler.err is not None and exit_on_error: -# register_exit = 1 -# server.kill() -# -# server.join() -# sys.exit(register_exit or server.exitcode) -# -# -# @server.command(context_settings=_CONTEXT) -# @shared_opts -# @click.option( -# "--host", -# type=click.STRING, -# help="The host to serve the server on", -# default="localhost", -# ) -# @click.option( -# "--port", -# type=click.INT, -# help="The port to serve the server on", -# default=8581, -# ) -# @click.option( -# "--project-name", -# type=click.STRING, -# help=( -# "The name to register the project with. By default, it is a string value representing the" -# " absolute directory of the project on disk" -# ), -# ) -# def register_project( -# project_dir: str, -# profiles_dir: str, -# target: str, -# host: str = "localhost", -# port: int = 8581, -# project_name: Optional[str] = None, -# ): -# """Convenience method to allow user to register project on the running server from the CLI""" -# logger().info(":water_wave: Executing dbt-osmosis\n") -# -# # Wait -# _health_check(host, port) -# -# # Register -# body = {"project_dir": project_dir, "profiles_dir": profiles_dir, "force": True} -# if target: -# body["target"] = target -# endpoint = f"http://{host}:{port}/register" -# logger().info("Registering project: %s", endpoint) -# res = requests.post( -# endpoint, -# headers={"X-dbt-Project": project_name or str(Path(project_dir).absolute())}, -# json=body, -# ) -# -# # Log -# logger().info(res.json()) -# -# -# @server.command(context_settings=_CONTEXT) -# @click.option( -# "--project-name", -# type=click.STRING, -# help="The name of the registered project to remove.", -# ) -# @click.option( -# "--host", -# type=click.STRING, -# help="The host to serve the server on", -# default="localhost", -# ) -# @click.option( -# "--port", -# type=click.INT, -# help="The port to serve the server on", -# default=8581, -# ) -# def unregister_project( -# project_name: str, -# host: str = "localhost", -# port: int = 8581, -# ): -# """Convenience method to allow user to unregister project on the running server from the CLI""" -# logger().info(":water_wave: Executing dbt-osmosis\n") -# -# # Wait -# _health_check(host, port) -# -# # Unregister -# endpoint = f"http://{host}:{port}/unregister" -# logger().info("Unregistering project: %s", endpoint) -# res = requests.post( -# endpoint, -# headers={"X-dbt-Project": project_name}, -# ) -# -# # Log -# logger().info(res.json()) -# -# -# @cli.command( -# context_settings=dict( -# ignore_unknown_options=True, -# allow_extra_args=True, -# ) -# ) -# @click.option( -# "--project-dir", -# type=click.Path(exists=True, dir_okay=True, file_okay=False), -# help=( -# "Which directory to look in for the dbt_project.yml file. Default is the current working" -# " directory and its parents." -# ), -# ) -# @click.option( -# "--profiles-dir", -# type=click.Path(exists=True, dir_okay=True, file_okay=False), -# default=DEFAULT_PROFILES_DIR, -# help="Which directory to look in for the profiles.yml file. Defaults to ~/.dbt", -# ) -# @click.option( -# "--host", -# type=click.STRING, -# help="The host to serve the server on", -# default="localhost", -# ) -# @click.option( -# "--port", -# type=click.INT, -# help="The port to serve the server on", -# default=8501, -# ) -# @click.pass_context -# def workbench( -# ctx, -# profiles_dir: Optional[str] = None, -# project_dir: Optional[str] = None, -# host: str = "localhost", -# port: int = 8501, -# ): -# """Start the dbt-osmosis workbench -# -# \f -# Pass the --options command to see streamlit specific options that can be passed to the app, -# pass --config to see the output of streamlit config show -# """ -# logger().info(":water_wave: Executing dbt-osmosis\n") -# -# if "--options" in ctx.args: -# subprocess.run(["streamlit", "run", "--help"]) -# ctx.exit() -# -# import os -# -# if "--config" in ctx.args: -# subprocess.run( -# ["streamlit", "config", "show"], -# env=os.environ, -# cwd=Path.cwd(), -# ) -# ctx.exit() -# -# script_args = ["--"] -# if project_dir: -# script_args.append("--project-dir") -# script_args.append(project_dir) -# if profiles_dir: -# script_args.append("--profiles-dir") -# script_args.append(profiles_dir) -# -# subprocess.run( -# [ -# "streamlit", -# "run", -# "--runner.magicEnabled=false", -# f"--browser.serverAddress={host}", -# f"--browser.serverPort={port}", -# Path(__file__).parent / "app_v2.py", -# ] -# + ctx.args -# + script_args, -# env=os.environ, -# cwd=Path.cwd(), -# ) -# -# -# @cli.command(context_settings=_CONTEXT) -# @shared_opts -# @click.option( -# "-m", -# "--model", -# type=click.STRING, -# required=True, -# help="The model to edit in the workbench, must be a valid model as would be selected by `ref`", -# ) -# @click.option( -# "--pk", -# type=click.STRING, -# help="The primary key of the model with which to base the diff", -# ) -# @click.option( -# "--temp-table", -# is_flag=True, -# help="If specified, temp tables are used to stage the queries.", -# ) -# @click.option( -# "--agg/--no-agg", -# default=True, -# help="Use --no-agg to show sample results, by default we agg for a summary view.", -# ) -# @click.option( -# "-o", -# "--output", -# default="table", -# help=( -# "Output format can be one of table, chart/bar, or csv. CSV is saved to a file named" -# " dbt-osmosis-diff in working dir" -# ), -# ) -# def diff( -# model: str, -# pk: str, -# target: Optional[str] = None, -# project_dir: Optional[str] = None, -# profiles_dir: Optional[str] = None, -# temp_table: bool = False, -# agg: bool = True, -# output: str = "table", -# ): -# """Diff dbt models at different git revisions""" -# -# logger().info(":water_wave: Executing dbt-osmosis\n") -# -# runner = DbtProject( -# project_dir=project_dir, -# profiles_dir=profiles_dir, -# target=target, -# ) -# inject_macros(runner) -# diff_and_print_to_console(model, pk, runner, temp_table, agg, output) -# -# -# @sql.command(context_settings=_CONTEXT) -# @shared_opts -# @click.argument("sql") -# def run( -# sql: str = "", -# project_dir: Optional[str] = None, -# profiles_dir: Optional[str] = None, -# target: Optional[str] = None, -# ): -# """Executes a dbt SQL statement writing an OsmosisRunResult | OsmosisErrorContainer to stdout""" -# from dbt_osmosis.vendored.dbt_core_interface.project import ( -# ServerError, -# ServerErrorCode, -# ServerErrorContainer, -# ServerRunResult, -# ) -# -# rv: Union[ServerRunResult, ServerErrorContainer] = None -# -# try: -# runner = DbtProject( -# project_dir=project_dir, -# profiles_dir=profiles_dir, -# target=target, -# ) -# except Exception as init_err: -# rv = ServerErrorContainer( -# error=ServerError( -# code=ServerErrorCode.ProjectParseFailure, -# message=str(init_err), -# data=init_err.__dict__, -# ) -# ) -# -# if rv is not None: -# print(asdict(rv)) -# return rv -# -# try: -# result = runner.execute_code("\n".join(sys.stdin.readlines()) if sql == "-" else sql) -# except Exception as execution_err: -# rv = ServerErrorContainer( -# error=ServerError( -# code=ServerErrorCode.ExecuteSqlFailure, -# message=str(execution_err), -# data=execution_err.__dict__, -# ) -# ) -# else: -# rv = ServerRunResult( -# rows=[list(row) for row in result.table.rows], -# column_names=result.table.column_names, -# executed_code=result.compiled_code, -# raw_code=result.raw_code, -# ) -# -# print(asdict(rv)) -# return rv -# -# -# @sql.command(context_settings=_CONTEXT) -# @shared_opts -# @click.argument("sql") -# def compile( -# sql: str, -# project_dir: Optional[str] = None, -# profiles_dir: Optional[str] = None, -# target: Optional[str] = None, -# ): -# """Compiles dbt SQL statement writing an OsmosisCompileResult | OsmosisErrorContainer to stdout""" -# from dbt_osmosis.vendored.dbt_core_interface.project import ( -# ServerCompileResult, -# ServerError, -# ServerErrorCode, -# ServerErrorContainer, -# ) -# -# rv: Union[ServerCompileResult, ServerErrorContainer] = None -# -# try: -# runner = DbtProject( -# project_dir=project_dir, -# profiles_dir=profiles_dir, -# target=target, -# ) -# except Exception as init_err: -# rv = ServerErrorContainer( -# error=ServerError( -# code=ServerErrorCode.ProjectParseFailure, -# message=str(init_err), -# data=init_err.__dict__, -# ) -# ) -# -# if rv is not None: -# print(asdict(rv)) -# return rv -# -# try: -# result = runner.compile_code("\n".join(sys.stdin.readlines()) if sql == "-" else sql) -# except Exception as compilation_err: -# rv = ServerErrorContainer( -# error=ServerError( -# code=ServerErrorCode.CompileSqlFailure, -# message=str(compilation_err), -# data=compilation_err.__dict__, -# ) -# ) -# else: -# rv = ServerCompileResult(result=result.compiled_code) -# -# print(asdict(rv)) -# return rv -# -# -# if __name__ == "__main__": -# cli() +@cli.command( + context_settings=dict( + ignore_unknown_options=True, + allow_extra_args=True, + ) +) +@click.option( + "--project-dir", + default=discover_project_dir, + type=click.Path(exists=True, dir_okay=True, file_okay=False), + help="Which directory to look in for the dbt_project.yml file. Default is the current working directory and its parents.", +) +@click.option( + "--profiles-dir", + default=discover_profiles_dir, + type=click.Path(exists=True, dir_okay=True, file_okay=False), + help="Which directory to look in for the profiles.yml file. Defaults to ~/.dbt", +) +@click.option( + "--host", + type=click.STRING, + help="The host to serve the server on", + default="localhost", +) +@click.option( + "--port", + type=click.INT, + help="The port to serve the server on", + default=8501, +) +@click.pass_context +def workbench( + ctx: click.Context, + profiles_dir: str | None = None, + project_dir: str | None = None, + host: str = "localhost", + port: int = 8501, +) -> None: + """Start the dbt-osmosis workbench + + \f + Pass the --options command to see streamlit specific options that can be passed to the app, + pass --config to see the output of streamlit config show + """ + logger.info(":water_wave: Executing dbt-osmosis\n") + + if "--options" in ctx.args: + proc = subprocess.run(["streamlit", "run", "--help"]) + ctx.exit(proc.returncode) + + import os + + if "--config" in ctx.args: + proc = subprocess.run( + ["streamlit", "config", "show"], + env=os.environ, + cwd=Path.cwd(), + ) + ctx.exit(proc.returncode) + + script_args = ["--"] + if project_dir: + script_args.append("--project-dir") + script_args.append(project_dir) + if profiles_dir: + script_args.append("--profiles-dir") + script_args.append(profiles_dir) + + proc = subprocess.run( + [ + "streamlit", + "run", + "--runner.magicEnabled=false", + f"--browser.serverAddress={host}", + f"--browser.serverPort={port}", + Path(__file__).parent.parent / "workbench" / "app.py", + *ctx.args, + *script_args, + ], + env=os.environ, + cwd=Path.cwd(), + ) + + ctx.exit(proc.returncode) + + +@sql.command(context_settings=_CONTEXT) +@shared_opts +@click.argument("sql") +def run( + sql: str = "", + project_dir: str | None = None, + profiles_dir: str | None = None, + target: str | None = None, +) -> None: + """Executes a dbt SQL statement writing results to stdout""" + settings = DbtConfiguration( + project_dir=t.cast(str, project_dir), profiles_dir=t.cast(str, profiles_dir), target=target + ) + project = create_dbt_project_context(settings) + _, table = execute_sql_code(project, sql) + + getattr(table, "print_table")( + max_rows=50, + max_columns=6, + output=sys.stdout, + max_column_width=20, + locale=None, + max_precision=3, + ) + + +@sql.command(context_settings=_CONTEXT) +@shared_opts +@click.argument("sql") +def compile( + sql: str = "", + project_dir: str | None = None, + profiles_dir: str | None = None, + target: str | None = None, +) -> None: + """Executes a dbt SQL statement writing results to stdout""" + settings = DbtConfiguration( + project_dir=t.cast(str, project_dir), profiles_dir=t.cast(str, profiles_dir), target=target + ) + project = create_dbt_project_context(settings) + node = compile_sql_code(project, sql) + + print(node.compiled_code) + + +if __name__ == "__main__": + cli() diff --git a/src/dbt_osmosis/core/logger.py b/src/dbt_osmosis/core/logger.py index b50491fc..bcf84320 100644 --- a/src/dbt_osmosis/core/logger.py +++ b/src/dbt_osmosis/core/logger.py @@ -73,7 +73,7 @@ def get_logger( class LogMethod(t.Protocol): """Protocol for logger methods""" - def __call__(self, msg: str, /, *args: t.Any, **kwds: t.Any) -> t.Any: ... + def __call__(self, msg: t.Any, /, *args: t.Any, **kwds: t.Any) -> t.Any: ... def __getattr__(name: str) -> LogMethod: diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index c187e0c0..9199a575 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -112,6 +112,7 @@ class DbtConfiguration: threads: int = 1 single_threaded: bool = True vars: dict[str, t.Any] = field(default_factory=dict) + quiet: bool = True def __post_init__(self) -> None: set_invocation_context(get_env()) @@ -132,6 +133,7 @@ def config_to_namespace(cfg: DbtConfiguration) -> argparse.Namespace: single_threaded=cfg.single_threaded, vars=cfg.vars, which="parse", + quiet=cfg.quiet, DEBUG=False, REQUIRE_RESOURCE_NAMES_WITHOUT_SPACES=False, ) From 0949ff7d2abbde60e618510c791db19c0bfb386d Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Thu, 2 Jan 2025 03:10:39 -0700 Subject: [PATCH 44/46] feat: add logging and auto apply parameter --- src/dbt_osmosis/cli/main.py | 16 ++- src/dbt_osmosis/core/osmosis.py | 219 +++++++++++++++++++++++++++----- 2 files changed, 201 insertions(+), 34 deletions(-) diff --git a/src/dbt_osmosis/cli/main.py b/src/dbt_osmosis/cli/main.py index 26f17a83..6cbfbadc 100644 --- a/src/dbt_osmosis/cli/main.py +++ b/src/dbt_osmosis/cli/main.py @@ -192,6 +192,11 @@ def wrapper(*args: P.args, **kwargs: P.kwargs) -> T: is_flag=True, help="If specified, output yaml file columns and data types in lowercase if possible.", ) +@click.option( + "--auto-apply", + is_flag=True, + help="If specified, will automatically apply the restructure plan without confirmation.", +) @click.argument("models", nargs=-1) def refactor( target: str | None = None, @@ -215,6 +220,7 @@ def refactor( use_unrendered_descriptions: bool = False, add_inheritance_for_specified_keys: list[str] | None = None, output_to_lower: bool = False, + auto_apply: bool = False, ) -> None: """Executes organize which syncs yaml files with database schema and organizes the dbt models directory, reparses the project, then executes document passing down inheritable documentation @@ -256,7 +262,7 @@ def refactor( create_missing_source_yamls(context=context) apply_restructure_plan( - context=context, plan=draft_restructure_delta_plan(context), confirm=False + context=context, plan=draft_restructure_delta_plan(context), confirm=not auto_apply ) inject_missing_columns(context=context) remove_columns_not_in_database(context=context) @@ -273,6 +279,11 @@ def refactor( @shared_opts @yaml_opts @click.argument("models", nargs=-1) +@click.option( + "--auto-apply", + is_flag=True, + help="If specified, will automatically apply the restructure plan without confirmation.", +) def organize( target: str | None = None, project_dir: str | None = None, @@ -284,6 +295,7 @@ def organize( models: list[str] | None = None, profile: str | None = None, vars: str | None = None, + auto_apply: bool = False, ) -> None: """Organizes schema ymls based on config and injects undocumented models @@ -313,7 +325,7 @@ def organize( create_missing_source_yamls(context=context) apply_restructure_plan( - context=context, plan=draft_restructure_delta_plan(context), confirm=False + context=context, plan=draft_restructure_delta_plan(context), confirm=not auto_apply ) if check and context.mutated: diff --git a/src/dbt_osmosis/core/osmosis.py b/src/dbt_osmosis/core/osmosis.py index 9199a575..347693c3 100644 --- a/src/dbt_osmosis/core/osmosis.py +++ b/src/dbt_osmosis/core/osmosis.py @@ -80,12 +80,15 @@ def discover_project_dir() -> str: if "DBT_PROJECT_DIR" in os.environ: project_dir = Path(os.environ["DBT_PROJECT_DIR"]) if project_dir.is_dir(): + logger.info(":mag: DBT_PROJECT_DIR detected => %s", project_dir) return str(project_dir.resolve()) - logger.warning(f"DBT_PROJECT_DIR {project_dir} is not a valid directory.") + logger.warning(":warning: DBT_PROJECT_DIR %s is not a valid directory.", project_dir) cwd = Path.cwd() for p in [cwd] + list(cwd.parents): if (p / "dbt_project.yml").exists(): + logger.info(":mag: Found dbt_project.yml at => %s", p) return str(p.resolve()) + logger.info(":mag: Defaulting to current directory => %s", cwd) return str(cwd.resolve()) @@ -94,11 +97,15 @@ def discover_profiles_dir() -> str: if "DBT_PROFILES_DIR" in os.environ: profiles_dir = Path(os.environ["DBT_PROFILES_DIR"]) if profiles_dir.is_dir(): + logger.info(":mag: DBT_PROFILES_DIR detected => %s", profiles_dir) return str(profiles_dir.resolve()) - logger.warning(f"DBT_PROFILES_DIR {profiles_dir} is not a valid directory.") + logger.warning(":warning: DBT_PROFILES_DIR %s is not a valid directory.", profiles_dir) if (Path.cwd() / "profiles.yml").exists(): + logger.info(":mag: Found profiles.yml in current directory.") return str(Path.cwd().resolve()) - return str(Path.home() / ".dbt") + home_profiles = str(Path.home() / ".dbt") + logger.info(":mag: Defaulting to => %s", home_profiles) + return home_profiles @dataclass @@ -115,6 +122,7 @@ class DbtConfiguration: quiet: bool = True def __post_init__(self) -> None: + logger.debug(":bookmark_tabs: Setting invocation context with environment variables.") set_invocation_context(get_env()) if self.threads > 1: self.single_threaded = False @@ -124,6 +132,7 @@ def __post_init__(self) -> None: def config_to_namespace(cfg: DbtConfiguration) -> argparse.Namespace: """Convert a DbtConfiguration into a dbt-friendly argparse.Namespace.""" + logger.debug(":blue_book: Converting DbtConfiguration to argparse.Namespace => %s", cfg) return argparse.Namespace( project_dir=cfg.project_dir, profiles_dir=cfg.profiles_dir, @@ -166,22 +175,33 @@ class DbtProjectContext: @property def is_connection_expired(self) -> bool: """Check if the adapter has expired based on the adapter TTL.""" - return ( + expired = ( time.time() - self._connection_created_at.setdefault(get_ident(), 0.0) > self.connection_ttl ) + logger.debug(":hourglass_flowing_sand: Checking if connection is expired => %s", expired) + return expired @property def adapter(self) -> BaseAdapter: """Get the adapter instance, creating a new one if the current one has expired.""" with self._adapter_mutex: if not self._adapter: + logger.info(":wrench: Instantiating new adapter because none is currently set.") adapter = instantiate_adapter(self.config) adapter.set_macro_resolver(self.manifest) _ = adapter.acquire_connection() self._adapter = adapter self._connection_created_at[get_ident()] = time.time() + logger.info( + ":wrench: Successfully acquired new adapter connection for thread => %s", + get_ident(), + ) elif self.is_connection_expired: + logger.info( + ":wrench: Adapter connection expired, refreshing connection for thread => %s", + get_ident(), + ) self._adapter.connections.release() self._adapter.connections.clear_thread_connection() _ = self._adapter.acquire_connection() @@ -196,19 +216,23 @@ def manifest_mutex(self) -> threading.Lock: def instantiate_adapter(runtime_config: RuntimeConfig) -> BaseAdapter: """Instantiate a dbt adapter based on the runtime configuration.""" + logger.debug(":mag: Registering adapter for runtime config => %s", runtime_config) register_adapter(runtime_config, get_mp_context()) adapter = get_adapter(runtime_config) adapter.set_macro_context_generator(t.cast(t.Any, generate_runtime_macro_context)) adapter.connections.set_connection_name("dbt-osmosis") + logger.debug(":hammer_and_wrench: Adapter instantiated => %s", adapter) return t.cast(BaseAdapter, t.cast(t.Any, adapter)) def create_dbt_project_context(config: DbtConfiguration) -> DbtProjectContext: """Build a DbtProjectContext from a DbtConfiguration.""" + logger.info(":wave: Creating DBT project context using config => %s", config) args = config_to_namespace(config) dbt_flags.set_from_args(args, args) runtime_cfg = RuntimeConfig.from_args(args) + logger.info(":bookmark_tabs: Instantiating adapter as part of project context creation.") adapter = instantiate_adapter(runtime_cfg) setattr(runtime_cfg, "adapter", adapter) loader = ManifestLoader( @@ -217,12 +241,14 @@ def create_dbt_project_context(config: DbtConfiguration) -> DbtProjectContext: ) manifest = loader.load() manifest.build_flat_graph() + logger.info(":arrows_counterclockwise: Loaded the dbt project manifest!") adapter.set_macro_resolver(manifest) sql_parser = SqlBlockParser(runtime_cfg, manifest, runtime_cfg) macro_parser = SqlMacroParser(runtime_cfg, manifest) + logger.info(":sparkles: DbtProjectContext successfully created!") return DbtProjectContext( args=args, config=runtime_cfg, @@ -234,11 +260,13 @@ def create_dbt_project_context(config: DbtConfiguration) -> DbtProjectContext: def reload_manifest(context: DbtProjectContext) -> None: """Reload the dbt project manifest. Useful for picking up mutations.""" + logger.info(":arrows_counterclockwise: Reloading the dbt project manifest!") loader = ManifestLoader(context.config, context.config.load_dependencies()) manifest = loader.load() manifest.build_flat_graph() context.adapter.set_macro_resolver(manifest) context.manifest = manifest + logger.info(":white_check_mark: Manifest reloaded => %s", context.manifest.metadata) # YAML + File Data @@ -255,12 +283,14 @@ def create_yaml_instance( encoding: str = "utf-8", ) -> ruamel.yaml.YAML: """Returns a ruamel.yaml.YAML instance configured with the provided settings.""" + logger.debug(":notebook: Creating ruamel.yaml.YAML instance with custom formatting.") y = ruamel.yaml.YAML() y.indent(mapping=indent_mapping, sequence=indent_sequence, offset=indent_offset) y.width = width y.preserve_quotes = preserve_quotes y.default_flow_style = default_flow_style y.encoding = encoding + logger.debug(":notebook: YAML instance created => %s", y) return y @@ -275,7 +305,9 @@ class SchemaFileLocation: @property def is_valid(self) -> bool: """Check if the current and target locations are valid.""" - return self.current == self.target + valid = self.current == self.target + logger.debug(":white_check_mark: Checking if schema file location is valid => %s", valid) + return valid @dataclass @@ -377,6 +409,11 @@ class YamlRefactorContext: def register_mutations(self, count: int) -> None: """Increment the mutation count by a specified amount.""" + logger.debug( + ":sparkles: Registering %s new mutations. Current count => %s", + count, + self._mutation_count, + ) self._mutation_count += count @property @@ -387,7 +424,9 @@ def mutation_count(self) -> int: @property def mutated(self) -> bool: """Check if the context has performed any mutations.""" - return self._mutation_count > 0 + has_mutated = self._mutation_count > 0 + logger.debug(":white_check_mark: Has the context mutated anything? => %s", has_mutated) + return has_mutated @property def source_definitions(self) -> dict[str, t.Any]: @@ -410,31 +449,42 @@ def skip_patterns(self) -> list[str]: def read_catalog(self) -> CatalogResults | None: """Read the catalog file if it exists.""" + logger.debug(":mag: Checking if catalog is already loaded => %s", bool(self._catalog)) if not self._catalog: catalog = load_catalog(self.settings) if not catalog and self.settings.create_catalog_if_not_exists: + logger.info( + ":bookmark_tabs: No existing catalog found, generating new catalog.json." + ) catalog = generate_catalog(self.project) self._catalog = catalog return self._catalog def __post_init__(self) -> None: + logger.debug(":green_book: Running post-init for YamlRefactorContext.") if EMPTY_STRING not in self.placeholders: self.placeholders = (EMPTY_STRING, *self.placeholders) def load_catalog(settings: YamlRefactorSettings) -> CatalogResults | None: """Load the catalog file if it exists and return a CatalogResults instance.""" + logger.debug(":mag: Attempting to load catalog from => %s", settings.catalog_path) if not settings.catalog_path: return None fp = Path(settings.catalog_path) if not fp.exists(): + logger.warning(":warning: Catalog path => %s does not exist.", fp) return None + logger.info(":books: Loading existing catalog => %s", fp) return t.cast(CatalogResults, CatalogArtifact.from_dict(json.loads(fp.read_text()))) # pyright: ignore[reportInvalidCast] # NOTE: this is mostly adapted from dbt-core with some cruft removed, strict pyright is not a fan of dbt's shenanigans def generate_catalog(context: DbtProjectContext) -> CatalogResults | None: """Generate the dbt catalog file for the project.""" + logger.info( + ":books: Generating a new catalog for the project => %s", context.config.project_name + ) catalogable_nodes = chain( [ t.cast(RelationConfig, node) # pyright: ignore[reportInvalidCast] @@ -448,6 +498,7 @@ def generate_catalog(context: DbtProjectContext) -> CatalogResults | None: context.manifest.get_used_schemas(), # pyright: ignore[reportArgumentType] ) + logger.debug(":mag_right: Building catalog from returned table => %s", table) catalog = Catalog( [dict(zip(table.column_names, map(dbt_utils._coerce_decimal, row))) for row in table] # pyright: ignore[reportUnknownArgumentType,reportPrivateUsage] ) @@ -455,6 +506,7 @@ def generate_catalog(context: DbtProjectContext) -> CatalogResults | None: errors: list[str] | None = None if exceptions: errors = [str(e) for e in exceptions] + logger.warning(":warning: Exceptions encountered in get_filtered_catalog => %s", errors) nodes, sources = catalog.make_unique_id_map(context.manifest) artifact = CatalogArtifact.from_results( # pyright: ignore[reportAttributeAccessIssue] @@ -464,9 +516,9 @@ def generate_catalog(context: DbtProjectContext) -> CatalogResults | None: compile_results=None, errors=errors, ) - artifact.write( # Cache it same as dbt - os.path.join(context.config.project_target_path, "catalog.json") - ) + artifact_path = Path(context.config.project_target_path, "catalog.json") + logger.info(":bookmark_tabs: Writing fresh catalog => %s", artifact_path) + artifact.write(str(artifact_path.resolve())) # Cache it, same as dbt return t.cast(CatalogResults, artifact) @@ -476,11 +528,13 @@ def generate_catalog(context: DbtProjectContext) -> CatalogResults | None: def _has_jinja(code: str) -> bool: """Check if a code string contains jinja tokens.""" + logger.debug(":crystal_ball: Checking if code snippet has Jinja => %s", code[:50] + "...") return any(token in code for token in ("{{", "}}", "{%", "%}", "{#", "#}")) def compile_sql_code(context: DbtProjectContext, raw_sql: str) -> ManifestSQLNode: """Compile jinja SQL using the context's manifest and adapter.""" + logger.info(":zap: Compiling SQL code. Possibly with jinja => %s", raw_sql[:75] + "...") tmp_id = str(uuid.uuid4()) with context.manifest_mutex: key = f"{NodeType.SqlOperation}.{context.config.project_name}.{tmp_id}" @@ -488,6 +542,7 @@ def compile_sql_code(context: DbtProjectContext, raw_sql: str) -> ManifestSQLNod node = context.sql_parser.parse_remote(raw_sql, tmp_id) if not _has_jinja(raw_sql): + logger.debug(":scroll: No jinja found in the raw SQL, skipping compile steps.") return node process_node(context.config, context.manifest, node) compiled_node = SqlCompileRunner( @@ -500,11 +555,13 @@ def compile_sql_code(context: DbtProjectContext, raw_sql: str) -> ManifestSQLNod _ = context.manifest.nodes.pop(key, None) + logger.info(":sparkles: Compilation complete.") return compiled_node def execute_sql_code(context: DbtProjectContext, raw_sql: str) -> tuple[AdapterResponse, Table]: """Execute jinja SQL using the context's manifest and adapter.""" + logger.info(":running: Attempting to execute SQL => %s", raw_sql[:75] + "...") if _has_jinja(raw_sql): comp = compile_sql_code(context, raw_sql) sql_to_exec = comp.compiled_code or comp.raw_code @@ -512,6 +569,7 @@ def execute_sql_code(context: DbtProjectContext, raw_sql: str) -> tuple[AdapterR sql_to_exec = raw_sql resp, table = context.adapter.execute(sql_to_exec, auto_begin=False, fetch=True) + logger.info(":white_check_mark: SQL execution complete => %s rows returned.", len(table.rows)) # pyright: ignore[reportUnknownArgumentType] return resp, table @@ -521,12 +579,14 @@ def execute_sql_code(context: DbtProjectContext, raw_sql: str) -> tuple[AdapterR def _is_fqn_match(node: ResultNode, fqns: list[str]) -> bool: """Filter models based on the provided fully qualified name matching on partial segments.""" + logger.debug(":mag_right: Checking if node => %s matches any FQNs => %s", node.unique_id, fqns) for fqn_str in fqns: parts = fqn_str.split(".") segment_match = len(node.fqn[1:]) >= len(parts) and all( left == right for left, right in zip(parts, node.fqn[1:]) ) if segment_match: + logger.debug(":white_check_mark: FQN matched => %s", fqn_str) return True return False @@ -536,13 +596,16 @@ def _is_file_match(node: ResultNode, paths: list[str]) -> bool: node_path = _get_node_path(node) for model in paths: if node.name == model: + logger.debug(":white_check_mark: Name match => %s", model) return True try_path = Path(model).resolve() if try_path.is_dir(): if node_path and try_path in node_path.parents: + logger.debug(":white_check_mark: Directory path match => %s", model) return True elif try_path.is_file(): if node_path and try_path == node_path: + logger.debug(":white_check_mark: File path match => %s", model) return True return False @@ -550,7 +613,9 @@ def _is_file_match(node: ResultNode, paths: list[str]) -> bool: def _get_node_path(node: ResultNode) -> Path | None: """Return the path to the node's original file if available.""" if node.original_file_path and hasattr(node, "root_path"): - return Path(getattr(node, "root_path"), node.original_file_path).resolve() + path = Path(getattr(node, "root_path"), node.original_file_path).resolve() + logger.debug(":file_folder: Resolved node path => %s", path) + return path return None @@ -558,6 +623,10 @@ def filter_models( context: YamlRefactorContext, ) -> Iterator[tuple[str, ResultNode]]: """Iterate over the models in the dbt project manifest applying the filter settings.""" + logger.debug( + ":mag: Filtering nodes (models/sources/seeds) with user-specified settings => %s", + context.settings, + ) def f(node: ResultNode) -> bool: """Closure to filter models based on the context settings.""" @@ -573,6 +642,7 @@ def f(node: ResultNode) -> bool: if context.settings.fqns: if not _is_fqn_match(node, context.settings.fqns): return False + logger.debug(":white_check_mark: Node => %s passed filtering logic.", node.unique_id) return True items = chain(context.project.manifest.nodes.items(), context.project.manifest.sources.items()) @@ -608,7 +678,8 @@ def _find_first( def normalize_column_name(column: str, credentials_type: str) -> str: """Apply case normalization to a column name based on the credentials type.""" if credentials_type == "snowflake" and column.startswith('"') and column.endswith('"'): - return column + logger.debug(":snowflake: Column name found with double-quotes => %s", column) + return column.strip('"') if credentials_type == "snowflake": return column.upper() return column @@ -619,6 +690,7 @@ def _maybe_use_precise_dtype(col: BaseColumn, settings: YamlRefactorSettings) -> if (col.is_numeric() and settings.numeric_precision) or ( col.is_string() and settings.char_length ): + logger.debug(":ruler: Using precise data type => %s", col.data_type) return col.data_type return col.dtype @@ -642,14 +714,19 @@ def get_table_ref(node: ResultNode | BaseRelation) -> TableRef: def get_columns(context: YamlRefactorContext, ref: TableRef) -> dict[str, ColumnMetadata]: """Equivalent to get_columns_meta in old code but directly referencing a key, not a node.""" if ref in _COLUMN_LIST_CACHE: + logger.debug(":blue_book: Column list cache HIT => %s", ref) return _COLUMN_LIST_CACHE[ref] + logger.info(":mag_right: Collecting columns for table => %s", ref) normalized_cols = OrderedDict() offset = 0 def process_column(col: BaseColumn | ColumnMetadata): nonlocal offset if any(re.match(b, col.name) for b in context.skip_patterns): + logger.debug( + ":no_entry_sign: Skipping column => %s due to skip pattern match.", col.name + ) return normalized = normalize_column_name(col.name, context.project.config.credentials.type) if not isinstance(col, ColumnMetadata): @@ -664,25 +741,29 @@ def process_column(col: BaseColumn | ColumnMetadata): process_column(struct_field) if catalog := context.read_catalog(): + logger.debug(":blue_book: Catalog found => Checking for ref => %s", ref) catalog_entry = _find_first( chain(catalog.nodes.values(), catalog.sources.values()), lambda c: c.key() == ref ) if catalog_entry: + logger.info(":books: Found catalog entry for => %s. Using it to process columns.", ref) for column in catalog_entry.columns.values(): process_column(column) return normalized_cols relation: BaseRelation | None = context.project.adapter.get_relation(*ref) if relation is None: + logger.warning(":warning: No relation found => %s", ref) return normalized_cols try: + logger.info(":mag: Introspecting columns in warehouse for => %s", relation) for column in t.cast( Iterable[BaseColumn], context.project.adapter.get_columns_in_relation(relation) ): process_column(column) except Exception as ex: - logger.warning(f"Could not introspect columns for {ref}: {ex}") + logger.warning(":warning: Could not introspect columns for %s: %s", ref, ex) _COLUMN_LIST_CACHE[ref] = normalized_cols return normalized_cols @@ -698,6 +779,7 @@ def create_missing_source_yamls(context: YamlRefactorContext) -> None: This is a useful preprocessing step to ensure that all sources are represented in the dbt project manifest. We do not have rich node information for non-existent sources, hence the alternative codepath here to bootstrap them. """ + logger.info(":factory: Creating missing source YAMLs (if any).") database: str = context.project.config.credentials.database did_side_effect: bool = False @@ -715,6 +797,10 @@ def create_missing_source_yamls(context: YamlRefactorContext) -> None: if _find_first( context.project.manifest.sources.values(), lambda s: s.source_name == source ): + logger.debug( + ":white_check_mark: Source => %s already exists in the manifest, skipping creation.", + source, + ) continue src_yaml_path = Path( @@ -750,14 +836,16 @@ def _describe(rel: BaseRelation) -> dict[str, t.Any]: src_yaml_path.parent.mkdir(parents=True, exist_ok=True) with src_yaml_path.open("w") as f: - logger.info(f"Injecting source {source} => {src_yaml_path}") + logger.info(":books: Injecting new source => %s => %s", source["name"], src_yaml_path) context.yaml_handler.dump({"version": 2, "sources": [source]}, f) context.register_mutations(1) did_side_effect = True if did_side_effect: - logger.info("Reloading project to pick up new sources.") + logger.info( + ":arrows_counterclockwise: Some new sources were created, reloading the project." + ) reload_manifest(context.project) @@ -782,17 +870,22 @@ def _get_yaml_path_template(context: YamlRefactorContext, node: ResultNode) -> s raise MissingOsmosisConfig( f"Config key `dbt-osmosis: ` not set for model {node.name}" ) + logger.debug(":gear: Resolved YAML path template => %s", path_template) return path_template def get_current_yaml_path(context: YamlRefactorContext, node: ResultNode) -> Path | None: """Get the current yaml path for a dbt model or source node.""" if node.resource_type in (NodeType.Model, NodeType.Seed) and getattr(node, "patch_path", None): - return Path(context.project.config.project_root).joinpath( + path = Path(context.project.config.project_root).joinpath( t.cast(str, node.patch_path).partition("://")[-1] ) + logger.debug(":page_facing_up: Current YAML path => %s", path) + return path if node.resource_type == NodeType.Source: - return Path(context.project.config.project_root, node.path) + path = Path(context.project.config.project_root, node.path) + logger.debug(":page_facing_up: Current YAML path => %s", path) + return path return None @@ -800,6 +893,7 @@ def get_target_yaml_path(context: YamlRefactorContext, node: ResultNode) -> Path """Get the target yaml path for a dbt model or source node.""" tpl = _get_yaml_path_template(context, node) if not tpl: + logger.warning(":warning: No path template found for => %s", node.unique_id) return Path(context.project.config.project_root, node.original_file_path) rendered = tpl.format(node=node, model=node.name, parent=node.fqn[-2]) @@ -814,13 +908,19 @@ def get_target_yaml_path(context: YamlRefactorContext, node: ResultNode) -> Path rendered += ".yml" segments.append(rendered) - return Path(context.project.config.project_root, *segments) + path = Path(context.project.config.project_root, *segments) + logger.debug(":star2: Target YAML path => %s", path) + return path def build_yaml_file_mapping( context: YamlRefactorContext, create_missing_sources: bool = False ) -> dict[str, SchemaFileLocation]: """Build a mapping of dbt model and source nodes to their current and target yaml paths.""" + logger.info( + ":globe_with_meridians: Building YAML file mapping. create_missing_sources => %s", + create_missing_sources, + ) if create_missing_sources: create_missing_source_yamls(context) @@ -833,6 +933,8 @@ def build_yaml_file_mapping( current=current_path.resolve() if current_path else None, node_type=node.resource_type, ) + + logger.debug(":card_index_dividers: Built YAML file mapping => %s", out_map) return out_map @@ -844,7 +946,9 @@ def _read_yaml(context: YamlRefactorContext, path: Path) -> dict[str, t.Any]: """Read a yaml file from disk. Adds an entry to the buffer cache so all operations on a path are consistent.""" if path not in _YAML_BUFFER_CACHE: if not path.is_file(): + logger.debug(":warning: Path => %s is not a file. Returning empty doc.", path) return {} + logger.debug(":open_file_folder: Reading YAML doc => %s", path) with context.yaml_handler_lock: _YAML_BUFFER_CACHE[path] = t.cast(dict[str, t.Any], context.yaml_handler.load(path)) return _YAML_BUFFER_CACHE[path] @@ -852,6 +956,7 @@ def _read_yaml(context: YamlRefactorContext, path: Path) -> dict[str, t.Any]: def _write_yaml(context: YamlRefactorContext, path: Path, data: dict[str, t.Any]) -> None: """Write a yaml file to disk and register a mutation with the context. Clears the path from the buffer cache.""" + logger.debug(":page_with_curl: Attempting to write YAML to => %s", path) if not context.settings.dry_run: with context.yaml_handler_lock: path.parent.mkdir(parents=True, exist_ok=True) @@ -859,12 +964,12 @@ def _write_yaml(context: YamlRefactorContext, path: Path, data: dict[str, t.Any] context.yaml_handler.dump(data, staging := io.BytesIO()) modified = staging.getvalue() if modified != original: - logger.info(f"Writing {path}") + logger.info(":writing_hand: Writing changes to => %s", path) with path.open("wb") as f: _ = f.write(modified) context.register_mutations(1) else: - logger.debug(f"Skipping {path} (no changes)") + logger.debug(":white_check_mark: Skipping write => %s (no changes)", path) del staging if path in _YAML_BUFFER_CACHE: del _YAML_BUFFER_CACHE[path] @@ -872,6 +977,7 @@ def _write_yaml(context: YamlRefactorContext, path: Path, data: dict[str, t.Any] def commit_yamls(context: YamlRefactorContext) -> None: """Commit all files in the yaml buffer cache to disk. Clears the buffer cache and registers mutations.""" + logger.info(":inbox_tray: Committing all YAMLs from buffer cache to disk.") if not context.settings.dry_run: with context.yaml_handler_lock: for path in list(_YAML_BUFFER_CACHE.keys()): @@ -879,22 +985,25 @@ def commit_yamls(context: YamlRefactorContext) -> None: context.yaml_handler.dump(_YAML_BUFFER_CACHE[path], staging := io.BytesIO()) modified = staging.getvalue() if modified != original: + logger.info(":writing_hand: Writing => %s", path) with path.open("wb") as f: logger.info(f"Writing {path}") _ = f.write(modified) context.register_mutations(1) else: - logger.debug(f"Skipping {path} (no changes)") + logger.debug(":white_check_mark: Skipping => %s (no changes)", path) del _YAML_BUFFER_CACHE[path] def _generate_minimal_model_yaml(node: ModelNode | SeedNode) -> dict[str, t.Any]: """Generate a minimal model yaml for a dbt model node.""" + logger.debug(":baby: Generating minimal yaml for Model/Seed => %s", node.name) return {"name": node.name, "columns": []} def _generate_minimal_source_yaml(node: SourceDefinition) -> dict[str, t.Any]: """Generate a minimal source yaml for a dbt source node.""" + logger.debug(":baby: Generating minimal yaml for Source => %s", node.name) return {"name": node.source_name, "tables": [{"name": node.name, "columns": []}]} @@ -902,9 +1011,10 @@ def _create_operations_for_node( context: YamlRefactorContext, uid: str, loc: SchemaFileLocation ) -> list[RestructureOperation]: """Create restructure operations for a dbt model or source node.""" + logger.debug(":bricks: Creating restructure operations for => %s", uid) node = context.project.manifest.nodes.get(uid) or context.project.manifest.sources.get(uid) if not node: - logger.warning(f"Node {uid} not found in manifest.") + logger.warning(":warning: Node => %s not found in manifest.", uid) return [] # If loc.current is None => we are generating a brand new file @@ -912,6 +1022,7 @@ def _create_operations_for_node( ops: list[RestructureOperation] = [] if loc.current is None: + logger.info(":sparkles: No current YAML file, building minimal doc => %s", uid) if isinstance(node, (ModelNode, SeedNode)): minimal = _generate_minimal_model_yaml(node) ops.append( @@ -963,6 +1074,7 @@ def _create_operations_for_node( def draft_restructure_delta_plan(context: YamlRefactorContext) -> RestructureDeltaPlan: """Draft a restructure plan for the dbt project.""" + logger.info(":bulb: Drafting restructure delta plan for the project.") plan = RestructureDeltaPlan() lock = threading.Lock() @@ -979,24 +1091,28 @@ def _job(uid: str, loc: SchemaFileLocation) -> None: for fut in done: exc = fut.exception() if exc: + logger.error(":bomb: Error encountered while drafting plan => %s", exc) raise exc + logger.info(":star2: Draft plan creation complete => %s operations", len(plan.operations)) return plan def pretty_print_plan(plan: RestructureDeltaPlan) -> None: """Pretty print the restructure plan for the dbt project.""" + logger.info(":mega: Restructure plan includes => %s operations.", len(plan.operations)) for op in plan.operations: str_content = str(op.content)[:80] + "..." - logger.info(f"Processing {str_content}") + logger.info(":sparkles: Processing => %s", str_content) if not op.superseded_paths: - logger.info(f"CREATE or MERGE => {op.file_path}") + logger.info(":blue_book: CREATE or MERGE => %s", op.file_path) else: old_paths = [p.name for p in op.superseded_paths.keys()] or ["UNKNOWN"] - logger.info(f"{old_paths} -> {op.file_path}") + logger.info(":blue_book: %s -> %s", old_paths, op.file_path) def _remove_models(existing_doc: dict[str, t.Any], nodes: list[ResultNode]) -> None: """Clean up the existing yaml doc by removing models superseded by the restructure plan.""" + logger.debug(":scissors: Removing superseded models => %s", [n.name for n in nodes]) to_remove = {n.name for n in nodes if n.resource_type == NodeType.Model} keep = [] for section in existing_doc.get("models", []): @@ -1007,6 +1123,7 @@ def _remove_models(existing_doc: dict[str, t.Any], nodes: list[ResultNode]) -> N def _remove_seeds(existing_doc: dict[str, t.Any], nodes: list[ResultNode]) -> None: """Clean up the existing yaml doc by removing models superseded by the restructure plan.""" + logger.debug(":scissors: Removing superseded seeds => %s", [n.name for n in nodes]) to_remove = {n.name for n in nodes if n.resource_type == NodeType.Seed} keep = [] for section in existing_doc.get("seeds", []): @@ -1020,6 +1137,7 @@ def _remove_sources(existing_doc: dict[str, t.Any], nodes: list[ResultNode]) -> to_remove_sources = { (n.source_name, n.name) for n in nodes if n.resource_type == NodeType.Source } + logger.debug(":scissors: Removing superseded sources => %s", sorted(to_remove_sources)) keep_sources = [] for section in existing_doc.get("sources", []): keep_tables = [] @@ -1040,6 +1158,7 @@ def _sync_doc_section( This includes columns, description, meta, tags, etc. We assume node is the single source of truth, so doc_section is replaced. """ + logger.debug(":arrows_counterclockwise: Syncing doc_section with node => %s", node.unique_id) if node.description: doc_section["description"] = node.description else: @@ -1100,12 +1219,16 @@ def sync_node_to_yaml( All changes to the Node (columns, metadata, etc.) should happen before calling this function. """ if node is None: + logger.info(":wave: No single node specified; synchronizing all matched nodes.") for _, node in filter_models(context): sync_node_to_yaml(context, node, commit=commit) return current_path = get_current_yaml_path(context, node) if not current_path or not current_path.exists(): + logger.debug( + ":warning: Current path does not exist => %s. Using target path instead.", current_path + ) current_path = get_target_yaml_path(context, node) doc: dict[str, t.Any] = _read_yaml(context, current_path) @@ -1173,6 +1296,7 @@ def sync_node_to_yaml( _ = doc.pop(k, None) if commit: + logger.info(":inbox_tray: Committing YAML doc changes for => %s", node.unique_id) _write_yaml(context, current_path, doc) @@ -1181,10 +1305,11 @@ def apply_restructure_plan( ) -> None: """Apply the restructure plan for the dbt project.""" if not plan.operations: - logger.info("No changes needed.") + logger.info(":white_check_mark: No changes needed in the restructure plan.") return if confirm: + logger.info(":warning: Confirm option set => printing plan and waiting for user input.") pretty_print_plan(plan) while confirm: @@ -1194,9 +1319,10 @@ def apply_restructure_plan( elif response.lower() in ("n", "no", ""): logger.info("Skipping restructure plan.") return - logger.info("Please respond with 'y' or 'n'.") + logger.warning(":loudspeaker: Please respond with 'y' or 'n'.") for op in plan.operations: + logger.debug(":arrow_right: Applying restructure operation => %s", op) output_doc: dict[str, t.Any] = {"version": 2} if op.file_path.exists(): existing_data = _read_yaml(context, op.file_path) @@ -1232,11 +1358,16 @@ def apply_restructure_plan( if path in _YAML_BUFFER_CACHE: del _YAML_BUFFER_CACHE[path] context.register_mutations(1) - logger.info(f"Superseded entire file {path}") + logger.info(":heavy_minus_sign: Superseded entire file => %s", path) else: _write_yaml(context, path, existing_data) - logger.info(f"Migrated doc from {path} -> {op.file_path}") + logger.info( + ":arrow_forward: Migrated doc from => %s to => %s", path, op.file_path + ) + logger.info( + ":arrows_counterclockwise: Committing all restructure changes and reloading manifest." + ) _ = commit_yamls(context), reload_manifest(context.project) @@ -1252,7 +1383,7 @@ def _build_node_ancestor_tree( depth: int = 1, ) -> dict[str, list[str]]: """Build a flat graph of a node and it's ancestors.""" - + logger.debug(":seedling: Building ancestor tree/branch for => %s", node.unique_id) if tree is None or visited is None: visited = set(node.unique_id) tree = {"generation_0": [node.unique_id]} @@ -1312,6 +1443,7 @@ def _build_column_knowledge_graph( ) -> dict[str, dict[str, t.Any]]: """Generate a column knowledge graph for a dbt model or source node.""" tree = _build_node_ancestor_tree(context.project.manifest, node) + logger.debug(":family_tree: Node ancestor tree => %s", tree) pm = get_plugin_manager() node_column_variants: dict[str, list[str]] = {} @@ -1394,10 +1526,12 @@ def inherit_upstream_column_knowledge( ) -> None: """Inherit column level knowledge from the ancestors of a dbt model or source node.""" if node is None: + logger.info(":wave: Inheriting column knowledge across all matched nodes.") for _, node in filter_models(context): inherit_upstream_column_knowledge(context, node) return None + logger.info(":dna: Inheriting column knowledge for => %s", node.unique_id) inheritable = ["description"] if not context.settings.skip_add_tags: inheritable.append("tags") @@ -1415,14 +1549,19 @@ def inherit_upstream_column_knowledge( continue updated_metadata = {k: v for k, v in kwargs.items() if v is not None and k in inheritable} + logger.debug( + ":star2: Inheriting updated metadata => %s for column => %s", updated_metadata, name + ) node.columns[name] = node_column.replace(**updated_metadata) def inject_missing_columns(context: YamlRefactorContext, node: ResultNode | None = None) -> None: """Add missing columns to a dbt node and it's corresponding yaml section. Changes are implicitly buffered until commit_yamls is called.""" if context.settings.skip_add_columns: + logger.debug(":no_entry_sign: Skipping column injection (skip_add_columns=True).") return if node is None: + logger.info(":wave: Injecting missing columns for all matched nodes.") for _, node in filter_models(context): inject_missing_columns(context, node) return @@ -1434,7 +1573,9 @@ def inject_missing_columns(context: YamlRefactorContext, node: ResultNode | None for incoming_name, incoming_meta in incoming_columns.items(): if incoming_name not in node.columns and incoming_name not in current_columns: logger.info( - f"Detected and reconciling missing column {incoming_name} in node {node.unique_id}" + ":heavy_plus_sign: Reconciling missing column => %s in node => %s", + incoming_name, + node.unique_id, ) gen_col = {"name": incoming_name, "description": incoming_meta.comment or ""} if dtype := incoming_meta.type: @@ -1447,6 +1588,7 @@ def remove_columns_not_in_database( ) -> None: """Remove columns from a dbt node and it's corresponding yaml section that are not present in the database. Changes are implicitly buffered until commit_yamls is called.""" if node is None: + logger.info(":wave: Removing columns not in DB across all matched nodes.") for _, node in filter_models(context): remove_columns_not_in_database(context, node) return @@ -1457,7 +1599,11 @@ def remove_columns_not_in_database( incoming_columns = get_columns(context, get_table_ref(node)) extra_columns = current_columns - set(incoming_columns.keys()) for extra_column in extra_columns: - logger.info(f"Detected and removing extra column {extra_column} in node {node.unique_id}") + logger.info( + ":heavy_minus_sign: Removing extra column => %s in node => %s", + extra_column, + node.unique_id, + ) _ = node.columns.pop(extra_column, None) @@ -1466,9 +1612,11 @@ def sort_columns_as_in_database( ) -> None: """Sort columns in a dbt node and it's corresponding yaml section as they appear in the database. Changes are implicitly buffered until commit_yamls is called.""" if node is None: + logger.info(":wave: Sorting columns as they appear in DB across all matched nodes.") for _, node in filter_models(context): sort_columns_as_in_database(context, node) return + logger.info(":1234: Sorting columns by warehouse order => %s", node.unique_id) incoming_columns = get_columns(context, get_table_ref(node)) def _position(column: dict[str, t.Any]): @@ -1487,9 +1635,11 @@ def sort_columns_alphabetically( ) -> None: """Sort columns in a dbt node and it's corresponding yaml section alphabetically. Changes are implicitly buffered until commit_yamls is called.""" if node is None: + logger.info(":wave: Sorting columns alphabetically across all matched nodes.") for _, node in filter_models(context): sort_columns_alphabetically(context, node) return + logger.info(":alphabet_white: Sorting columns alphabetically => %s", node.unique_id) node.columns = {k: v for k, v in sorted(node.columns.items(), key=lambda i: i[0])} @@ -1517,6 +1667,7 @@ def get_candidates(self, name: str, node: ResultNode, context: DbtProjectContext cc := re.sub("_(.)", lambda m: m.group(1).upper(), name), # camelCase cc[0].upper() + cc[1:], # PascalCase ] + logger.debug(":lower_upper_case: FuzzyCaseMatching variants => %s", variants) return variants @@ -1539,7 +1690,11 @@ def get_candidates(self, name: str, node: ResultNode, context: DbtProjectContext lambda v: bool(v), ) if p: - variants.append(name.removeprefix(p)) + mut_name = name.removeprefix(p) + logger.debug( + ":scissors: FuzzyPrefixMatching => removing prefix '%s' => %s", p, mut_name + ) + variants.append(mut_name) return variants From 83e867c42995db370ba57019e0c8dadc3caecd84 Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Thu, 2 Jan 2025 03:16:22 -0700 Subject: [PATCH 45/46] WIP --- .github/workflows/lint.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 488d8048..3563e93d 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -20,7 +20,7 @@ jobs: - name: Install pre-commit hooks run: | pip install -U pip==23.1.0 - pip install -U pre-commit==3.4.0 + pip install -U pre-commit==3.4.0 uv==0.5.13 pre-commit install - name: Run pre-commit hooks run: | From 362307a9517f6e3e4a857e740525a7db590dd5fb Mon Sep 17 00:00:00 2001 From: z3z1ma Date: Thu, 2 Jan 2025 03:24:05 -0700 Subject: [PATCH 46/46] chore: fix lint --- .pre-commit-config.yaml | 27 +++++---------------------- Makefile | 3 +++ 2 files changed, 8 insertions(+), 22 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c9d74c33..56c45029 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v5.0.0 hooks: - id: check-ast - id: check-json @@ -11,7 +11,7 @@ repos: - id: detect-private-key - id: debug-statements - repo: https://github.com/commitizen-tools/commitizen - rev: v3.13.0 + rev: v4.1.0 hooks: - id: commitizen - repo: https://github.com/astral-sh/ruff-pre-commit @@ -22,27 +22,10 @@ repos: - id: ruff args: [--fix] - repo: https://github.com/gitleaks/gitleaks - rev: v8.18.2 + rev: v8.22.1 hooks: - id: gitleaks - repo: https://github.com/rhysd/actionlint - rev: v1.7.1 + rev: v1.7.5 hooks: - - id: actionlint-docker - - repo: https://github.com/shellcheck-py/shellcheck-py - rev: v0.10.0.1 - hooks: - - id: shellcheck - - repo: https://github.com/hadolint/hadolint - rev: v2.13.0-beta - hooks: - - id: hadolint-docker - - repo: local - hooks: - - id: uv-export-prod - name: uv-export-prod - entry: uv export -o requirements.txt --no-hashes --frozen - language: system - types: [file] - files: pyproject.toml|uv.lock - pass_filenames: false + - id: actionlint diff --git a/Makefile b/Makefile index 4b36f579..1fbb6dbf 100644 --- a/Makefile +++ b/Makefile @@ -54,3 +54,6 @@ scan-new-baseline: .uv-installed-$(PY_VERSION) scan-without-baseline: .uv-installed-$(PY_VERSION) @uvx bandit -r src + +requirements.txt: .uv-installed-$(PY_VERSION) + @uv export -o requirements.txt --no-hashes --frozen