Skip to content

Commit

Permalink
chore: typo dataset_tpye (#643)
Browse files Browse the repository at this point in the history
  • Loading branch information
longxiaofei authored Oct 15, 2024
1 parent 075ad7a commit 8cc152d
Show file tree
Hide file tree
Showing 9 changed files with 13 additions and 13 deletions.
6 changes: 3 additions & 3 deletions pygwalker/api/pygwalker.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ def __init__(
self.use_save_tool = use_save_tool
self.parse_dsl_type = self._get_parse_dsl_type(self.data_parser)
self.gw_mode = gw_mode
self.dataset_type = self.data_parser.dataset_tpye
self.dataset_type = self.data_parser.dataset_type
self.is_export_dataframe = is_export_dataframe
self._last_exported_dataframe = None
self.default_tab = default_tab
Expand Down Expand Up @@ -147,9 +147,9 @@ def _get_data_parser(
)

def _get_parse_dsl_type(self, data_parser: BaseDataParser) -> Literal["server", "client"]:
if data_parser.dataset_tpye.startswith("connector"):
if data_parser.dataset_type.startswith("connector"):
return "server"
if data_parser.dataset_tpye == "cloud_dataset":
if data_parser.dataset_type == "cloud_dataset":
return "server"
return "client"

Expand Down
4 changes: 2 additions & 2 deletions pygwalker/data_parsers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ def to_parquet(self) -> io.BytesIO:

@property
@abc.abstractmethod
def dataset_tpye(self) -> str:
def dataset_type(self) -> str:
"""get dataset type"""
raise NotImplementedError

Expand Down Expand Up @@ -215,7 +215,7 @@ def batch_get_datas_by_payload(self, payload_list: List[Dict[str, Any]]) -> List
]

@property
def dataset_tpye(self) -> str:
def dataset_type(self) -> str:
return "dataframe_default"

@property
Expand Down
2 changes: 1 addition & 1 deletion pygwalker/data_parsers/cloud_dataset_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def batch_get_datas_by_payload(self, payload_list: List[Dict[str, Any]]) -> List
]

@property
def dataset_tpye(self) -> str:
def dataset_type(self) -> str:
return "cloud_dataset"

@property
Expand Down
2 changes: 1 addition & 1 deletion pygwalker/data_parsers/database_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ def batch_get_datas_by_payload(self, payload_list: List[Dict[str, Any]]) -> List
]

@property
def dataset_tpye(self) -> str:
def dataset_type(self) -> str:
return f"connector_{self.conn.dialect_name}"

@property
Expand Down
2 changes: 1 addition & 1 deletion pygwalker/data_parsers/modin_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,5 +71,5 @@ def _infer_analytic(self, s: mpd.Series, field_name: str):
return "dimension"

@property
def dataset_tpye(self) -> str:
def dataset_type(self) -> str:
return "modin_dataframe"
2 changes: 1 addition & 1 deletion pygwalker/data_parsers/pandas_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,5 +60,5 @@ def _infer_analytic(self, s: pd.Series, field_name: str):
return "dimension"

@property
def dataset_tpye(self) -> str:
def dataset_type(self) -> str:
return "pandas_dataframe"
2 changes: 1 addition & 1 deletion pygwalker/data_parsers/polars_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,5 +62,5 @@ def _infer_analytic(self, s: pl.Series, field_name: str):
return "dimension"

@property
def dataset_tpye(self) -> str:
def dataset_type(self) -> str:
return "polars_dataframe"
2 changes: 1 addition & 1 deletion pygwalker/data_parsers/spark_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -105,7 +105,7 @@ def _rename_dataframe(self, df: DataFrame) -> DataFrame:
return df.toDF(*new_columns)

@property
def dataset_tpye(self) -> str:
def dataset_type(self) -> str:
return "spark_dataframe"

@property
Expand Down
4 changes: 2 additions & 2 deletions pygwalker/services/cloud_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -364,10 +364,10 @@ def create_cloud_dataset(
if name is None:
name = f"pygwalker_{datetime.now().strftime('%Y%m%d%H%M')}"

if data_parser.dataset_tpye == "cloud_dataset":
if data_parser.dataset_type == "cloud_dataset":
raise ValueError("dataset is already a cloud dataset")

if data_parser.dataset_tpye.startswith("connector"):
if data_parser.dataset_type.startswith("connector"):
connector = data_parser.conn
datasource_name = "pygwalker_" + hashlib.md5(connector.url.encode()).hexdigest()
datasource_id = self.get_datasource_by_name(datasource_name)
Expand Down

0 comments on commit 8cc152d

Please sign in to comment.