From d5c9aaf79e4d520d343e8b6b8d066ebf54ded0e2 Mon Sep 17 00:00:00 2001 From: amitca71 Date: Mon, 30 Aug 2021 12:17:04 +0300 Subject: [PATCH 01/23] implementing missing api's --- pbiapi/pbiapi.py | 171 ++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 168 insertions(+), 3 deletions(-) diff --git a/pbiapi/pbiapi.py b/pbiapi/pbiapi.py index 13a4a63..e5abec1 100644 --- a/pbiapi/pbiapi.py +++ b/pbiapi/pbiapi.py @@ -3,7 +3,7 @@ import os from typing import Callable, Dict, List, NoReturn, Union from urllib import parse - +import json import requests from pbiapi.utils import partition @@ -70,9 +70,9 @@ def get_workspaces(self) -> List: self.force_raise_http_error(response) @staticmethod - def find_entity_id_by_name(entity_list: List, name: str, entity_type: str, raise_if_missing: bool = False) -> str: + def find_entity_id_by_name(entity_list: List, name: str, entity_type: str , raise_if_missing: bool = False ,attribute_name_alias: str = "name") -> str: for item in entity_list: - if item["name"] == name: + if item[attribute_name_alias] == name: return item["id"] if raise_if_missing: raise RuntimeError(f"No {entity_type} was found with the name: '{name}'") @@ -157,6 +157,17 @@ def get_datasets_in_workspace(self, workspace_name: str) -> List: if response.status_code == HTTP_OK_CODE: return response.json()["value"] + @check_token + def get_datasets(self) -> List: + + datasets_url = self.base_url + f"datasets" + response = requests.get(datasets_url, headers=self.headers) + response.raise_for_status() + if response.status_code == HTTP_OK_CODE: + return response.json()["value"] + + + @check_token def refresh_dataset_by_id(self, workspace_name: str, dataset_id: str) -> None: workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) @@ -424,3 +435,157 @@ def get_workspace_and_dataset_id(self, workspace_name: str, dataset_name: str) - dataset_id = self.find_entity_id_by_name(datasets, dataset_name, "dataset", raise_if_missing=True) return workspace_id, dataset_id + + @check_token + def get_pipelines(self) -> List: + url = self.base_url + "pipelines" + print(url) + response = requests.get(url, headers=self.headers) + if response.status_code == HTTP_OK_CODE: + self._workspaces = response.json()["value"] + return self._workspaces + else: + logging.error("Failed to fetch pipelines!") + self.force_raise_http_error(response) + + + @check_token + def get_pipeline(self, pipeline_id: str ) -> List: + url = self.base_url + f"pipelines/{pipeline_id}" + response = requests.get(url, headers=self.headers) + print(response.json()) + if response.status_code == HTTP_OK_CODE: + self._workspaces = response.json() + return self._workspaces + else: + logging.error("Failed to fetch pipeline!") + self.force_raise_http_error(response) + + @check_token + def get_pipeline_by_name(self, pipeline_name) -> List: + pipelines_list=self.get_pipelines() + pipeline_id = self.find_entity_id_by_name(pipelines_list, pipeline_name, "pipelines", raise_if_missing=True,attribute_name_alias='displayName' ) + print('pipeline id: %s' % pipeline_id) + return (self.get_pipeline(pipeline_id)) + + @check_token + def get_pipeline_operations(self, pipeline_id: str) -> List: + url = self.base_url + f"pipelines/{pipeline_id}/operations" + response = requests.get(url, headers=self.headers) + if response.status_code == HTTP_OK_CODE: + self._workspaces = response.json()["value"] + return self._workspaces + else: + logging.error("Failed to fetch pipeline operations!") + self.force_raise_http_error(response) + @check_token + def get_pipeline_operations_by_name(self, pipeline_name: str) -> List: + pipelines_list=self.get_pipelines() + pipeline_id = self.find_entity_id_by_name(pipelines_list, pipeline_name, "pipelines", raise_if_missing=True,attribute_name_alias='displayName' ) + print('pipeline id: %s' % pipeline_id) + return (self.get_pipeline_operations(pipeline_id)) + + + @check_token + def clone_report_by_name(self, workspace_name: str, report_name: str, new_report_name: str , target_work_space_name: str=None, target_model_id: str=None) -> None: + workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) + workspace_reports=self.get_reports_in_workspace(workspace_name) + report_id=self.find_entity_id_by_name(workspace_reports, report_name, "reports", raise_if_missing=True) + url = self.base_url + f"groups/{workspace_id}/reports/{report_id}/Clone" + data={} + data['Name']=new_report_name + if (target_work_space_name != None): + target_workspace_id = self.find_entity_id_by_name(self.workspaces, target_work_space_name, "workspace", raise_if_missing=True) + data['targetWorkspaceId']= target_workspace_id + if (target_model_id != None): + data['targetModelId']= target_model_id + # data="Name=" + new_report_name + response = requests.post(url, data=data, headers=self.headers) + + if response.status_code == 200: + logging.info(f"report {report_id} from workspace {workspace_name}) was cloned ") + return (response.json()) + else: + logging.error("Dataset refresh failed!") + self.force_raise_http_error(response, expected_codes=200) + + @check_token + def get_dataset_datasources(self, workspace_id,dataset_id) -> List: + url = self.base_url + f"groups/{workspace_id}/datasets/{dataset_id}/datasources" + response = requests.get(url, headers=self.headers) + + if response.status_code == HTTP_OK_CODE: + self._workspaces = response.json()["value"] + return self._workspaces + else: + logging.error("Failed to datasources!") + self.force_raise_http_error(response) + + @check_token + def get_dataset_datasources_by_name(self, workspace_name,dataset_name) -> List: + workspace_id, dataset_id = self.get_workspace_and_dataset_id(workspace_name, dataset_name) + print('workspace_id: %s , dataset id: %s' % (workspace_id, dataset_id )) + return(self.get_dataset_datasources(workspace_id,dataset_id)) + + + + @check_token + def update_datasource(self, gateway_id: str, datasource_id: str, user_name: str, password: str): + + url = self.base_url + f"gateways/{gateway_id}/datasources/{datasource_id}" + headers = {"Content-Type": "application/json", **self.get_auth_header()} + + credentialDetails={"credentialType": "Basic", + "encryptedConnection": "Encrypted", + "encryptionAlgorithm": "None", + "privacyLevel": "None", + "useEndUserOAuth2Credentials": "False"} + credentialDetails["credentials"]="{'credentialData':[{'name':'username', 'value': user_name},{'name':'password', 'value': password}]}" + + data={'credentialDetails': credentialDetails} + print(data) + + response = requests.patch(url, headers=headers, json=data) + if response.status_code == HTTP_OK_CODE: + logging.info(f"update credentials Complete") + else: + logging.error(f"update credentials failed for gateway_id {gateway_id} and datasource_id {datasource_id}!") + self.force_raise_http_error(response) + + @check_token + def execute_queries(self, dataset_id: str, query_list: list, serializerSettings: dict) -> None: + + body = {"queries": query_list, "serializerSettings": serializerSettings} + # Workspace exists, lets add user: + url = self.base_url + f"datasets/{dataset_id}/executeQueries" + print ('url=%s' % url) + headers = {"Content-Type": "application/json", **self.get_auth_header()} + print ('headers=%s' % headers) + print ('json=%s' % json) + response = requests.post(url, json=body, headers=headers) + + if response.status_code == HTTP_OK_CODE: + logging.info(f"success execute_queries") + return(json.loads(response.text.encode('utf8'))) + else: + logging.error(f"Failed to execute_queries': {json}") + self.force_raise_http_error(response) + + @check_token + def execute_queries_by_name(self, workspace_name: str, dataset_name: str, query_list: list, serializerSettings: dict) -> None: + datasets = self.get_datasets_in_workspace(workspace_name) + dataset_id = self.find_entity_id_by_name(datasets, dataset_name, "dataset", True) + return(self.execute_queries(dataset_id=dataset_id, query_list=query_list, serializerSettings=serializerSettings)) + + @check_token + def bind_to_gateway(self, dataset_Id: str, gateway_id: str) -> None: +#403: {"Message":"API is not accessible for application"} + url = self.base_url + f"datasets/{dataset_Id}/Default.BindToGateway" + gatewayObject={"gatewayObjectId": gateway_id} + response = requests.post(url, json=gatewayObject, headers=self.headers) + + if response.status_code == HTTP_OK_CODE: + logging.info(f"Takeover of dataset {dataset_Id} Complete") + else: + logging.error(f"Takeover of dataset {dataset_Id} failed!") + self.force_raise_http_error(response) \ No newline at end of file From 2e6f5b1a3fc58a2a6de64079558b0222391e6036 Mon Sep 17 00:00:00 2001 From: amitca71 Date: Mon, 30 Aug 2021 12:37:52 +0300 Subject: [PATCH 02/23] adding execute_query_driver --- execute_query_driver.py | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 execute_query_driver.py diff --git a/execute_query_driver.py b/execute_query_driver.py new file mode 100644 index 0000000..cf4ee04 --- /dev/null +++ b/execute_query_driver.py @@ -0,0 +1,35 @@ +from pbiapi import PowerBIAPIClient +import os +import argparse +from pathlib import Path +from requests.exceptions import HTTPError +azure_tenant_id=os.environ.get('AZURE_TENANT_ID') +azure_client_id=os.environ.get('AZURE_CLIENT_ID') +azure_client_secret=os.environ.get('AZURE_CLIENT_SECRET') + +def main(): + pbi_client = PowerBIAPIClient( + azure_tenant_id, + azure_client_id, + azure_client_secret, + ) + parser = argparse.ArgumentParser() + parser.add_argument("--workspace_name",dest="workspace_name", help="workspace name") + parser.add_argument("--ds_name",dest="ds_name", help="ds_name") + parser.add_argument("--query",dest="query", help="dax query") + args =parser.parse_args() + print(args) + query={} + query['query']=args.query + queries=[] + queries.append(query) + serializerSettings={} + serializerSettings['includeNulls']= 'true' + res=pbi_client.execute_queries_by_name(workspace_name=args.workspace_name, dataset_name=args.ds_name, query_list=queries, serializerSettings=serializerSettings) + + print(res) + + +if __name__ == "__main__": + main() + From 05cf617264d6de6139c8580bd01743987986c52b Mon Sep 17 00:00:00 2001 From: amitca71 Date: Mon, 30 Aug 2021 12:53:55 +0300 Subject: [PATCH 03/23] add rebuild script --- rebuild.sh | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100755 rebuild.sh diff --git a/rebuild.sh b/rebuild.sh new file mode 100755 index 0000000..510acc8 --- /dev/null +++ b/rebuild.sh @@ -0,0 +1,6 @@ +pip uninstall pbiapi --y +poetry build +pip install ./dist/pbiapi-0.2.4-py3-none-any.whl +#cp ./dist/pbiapi-0.2.4-py3-none-any.whl ../../devops/powerbi_cicd_template/lib/pbiapi-0.2.4-py3-none-any.whl +#cd ../../devops/powerbi_cicd_template/ +#git add lib/pbiapi-0.2.4-py3-none-any.whl From 58c076e324fc648e7b7d1c7776af9b04f8fbf058 Mon Sep 17 00:00:00 2001 From: amitca71 Date: Mon, 30 Aug 2021 12:56:42 +0300 Subject: [PATCH 04/23] fix verstion --- rebuild.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rebuild.sh b/rebuild.sh index 510acc8..fd08fcd 100755 --- a/rebuild.sh +++ b/rebuild.sh @@ -1,6 +1,6 @@ pip uninstall pbiapi --y poetry build -pip install ./dist/pbiapi-0.2.4-py3-none-any.whl +pip install ./dist/pbiapi-0.2.2-py3-none-any.whl #cp ./dist/pbiapi-0.2.4-py3-none-any.whl ../../devops/powerbi_cicd_template/lib/pbiapi-0.2.4-py3-none-any.whl #cd ../../devops/powerbi_cicd_template/ #git add lib/pbiapi-0.2.4-py3-none-any.whl From 6751e977137c3bfed514caa6dc303959f2cc0f9c Mon Sep 17 00:00:00 2001 From: amitca71 Date: Mon, 30 Aug 2021 15:33:50 +0300 Subject: [PATCH 05/23] fix update credentials --- pbiapi/pbiapi.py | 23 +++++++++++++---------- 1 file changed, 13 insertions(+), 10 deletions(-) diff --git a/pbiapi/pbiapi.py b/pbiapi/pbiapi.py index e5abec1..dc9d238 100644 --- a/pbiapi/pbiapi.py +++ b/pbiapi/pbiapi.py @@ -430,8 +430,9 @@ def force_raise_http_error( def get_workspace_and_dataset_id(self, workspace_name: str, dataset_name: str) -> Union: workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) - + print ('workspace_id=%s' % workspace_id) datasets = self.get_datasets_in_workspace(workspace_name) + print ('datasets=%s' % datasets) dataset_id = self.find_entity_id_by_name(datasets, dataset_name, "dataset", raise_if_missing=True) return workspace_id, dataset_id @@ -490,7 +491,7 @@ def get_pipeline_operations_by_name(self, pipeline_name: str) -> List: def clone_report_by_name(self, workspace_name: str, report_name: str, new_report_name: str , target_work_space_name: str=None, target_model_id: str=None) -> None: workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) workspace_reports=self.get_reports_in_workspace(workspace_name) - report_id=self.find_entity_id_by_name(workspace_reports, report_name, "reports", raise_if_missing=True) + report_id=self.find_entity_id_by_name(workspace_reports, report_name, "reports", raise_if_missing=True) url = self.base_url + f"groups/{workspace_id}/reports/{report_id}/Clone" data={} data['Name']=new_report_name @@ -508,7 +509,7 @@ def clone_report_by_name(self, workspace_name: str, report_name: str, new_report else: logging.error("Dataset refresh failed!") self.force_raise_http_error(response, expected_codes=200) - + @check_token def get_dataset_datasources(self, workspace_id,dataset_id) -> List: url = self.base_url + f"groups/{workspace_id}/datasets/{dataset_id}/datasources" @@ -534,17 +535,19 @@ def update_datasource(self, gateway_id: str, datasource_id: str, user_name: str, url = self.base_url + f"gateways/{gateway_id}/datasources/{datasource_id}" headers = {"Content-Type": "application/json", **self.get_auth_header()} - + credentialDetails={"credentialType": "Basic", "encryptedConnection": "Encrypted", "encryptionAlgorithm": "None", "privacyLevel": "None", "useEndUserOAuth2Credentials": "False"} - credentialDetails["credentials"]="{'credentialData':[{'name':'username', 'value': user_name},{'name':'password', 'value': password}]}" - + + credentials={} + credentials['credentialData']=[{'name': 'username' , 'value': user_name} ,{'name': 'password', 'value': password} ] + credentialDetails["credentials"]=str(credentials) data={'credentialDetails': credentialDetails} print(data) - + response = requests.patch(url, headers=headers, json=data) if response.status_code == HTTP_OK_CODE: logging.info(f"update credentials Complete") @@ -554,7 +557,7 @@ def update_datasource(self, gateway_id: str, datasource_id: str, user_name: str, @check_token def execute_queries(self, dataset_id: str, query_list: list, serializerSettings: dict) -> None: - + body = {"queries": query_list, "serializerSettings": serializerSettings} # Workspace exists, lets add user: url = self.base_url + f"datasets/{dataset_id}/executeQueries" @@ -570,7 +573,7 @@ def execute_queries(self, dataset_id: str, query_list: list, serializerSettings else: logging.error(f"Failed to execute_queries': {json}") self.force_raise_http_error(response) - + @check_token def execute_queries_by_name(self, workspace_name: str, dataset_name: str, query_list: list, serializerSettings: dict) -> None: datasets = self.get_datasets_in_workspace(workspace_name) @@ -588,4 +591,4 @@ def bind_to_gateway(self, dataset_Id: str, gateway_id: str) -> None: logging.info(f"Takeover of dataset {dataset_Id} Complete") else: logging.error(f"Takeover of dataset {dataset_Id} failed!") - self.force_raise_http_error(response) \ No newline at end of file + self.force_raise_http_error(response) From 4264b202c3027b50d45ffdb07e034948ad5ebf8e Mon Sep 17 00:00:00 2001 From: amitca71 Date: Mon, 30 Aug 2021 15:42:36 +0300 Subject: [PATCH 06/23] credential update driver --- credentials_update_driver.py | 34 ++++++++++++++++++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 credentials_update_driver.py diff --git a/credentials_update_driver.py b/credentials_update_driver.py new file mode 100644 index 0000000..bc66de2 --- /dev/null +++ b/credentials_update_driver.py @@ -0,0 +1,34 @@ +from pbiapi import PowerBIAPIClient +import os +import argparse +from pathlib import Path +from requests.exceptions import HTTPError +import json +azure_tenant_id=os.environ.get('AZURE_TENANT_ID') +azure_client_id=os.environ.get('AZURE_CLIENT_ID') +azure_client_secret=os.environ.get('AZURE_CLIENT_SECRET') + + +#def setDBConnection(pbi_client, ws, ): + +def main(): + pbi_client = PowerBIAPIClient( + azure_tenant_id, + azure_client_id, + azure_client_secret, + ) + parser = argparse.ArgumentParser() + parser.add_argument("--workspace_name",dest="workspace_name", help="workspace name") + parser.add_argument("--ds_name",dest="ds_name", help="ds_name") + parser.add_argument("--username",dest="username", help="db username") + parser.add_argument("--password",dest="password", help="db password") + args =parser.parse_args() + print(args) + pbi_client.take_over_dataset(args.workspace_name, args.ds_name) + gw_id=pbi_client.get_dataset_datasources_by_name(args.workspace_name, args.ds_name) + gatewayId=gw_id[0]['gatewayId'] + datasourceId=gw_id[0]['datasourceId'] + pbi_client.update_datasource( gatewayId, datasourceId, user_name=args.username, password=args.password) +if __name__ == "__main__": + main() + From 8624ae683aa7b9362bae6d8f145ac3dec1ae3288 Mon Sep 17 00:00:00 2001 From: amit cahanovich Date: Mon, 30 Aug 2021 15:45:10 +0300 Subject: [PATCH 07/23] Update rebuild.sh --- rebuild.sh | 3 --- 1 file changed, 3 deletions(-) diff --git a/rebuild.sh b/rebuild.sh index fd08fcd..9560b02 100755 --- a/rebuild.sh +++ b/rebuild.sh @@ -1,6 +1,3 @@ pip uninstall pbiapi --y poetry build pip install ./dist/pbiapi-0.2.2-py3-none-any.whl -#cp ./dist/pbiapi-0.2.4-py3-none-any.whl ../../devops/powerbi_cicd_template/lib/pbiapi-0.2.4-py3-none-any.whl -#cd ../../devops/powerbi_cicd_template/ -#git add lib/pbiapi-0.2.4-py3-none-any.whl From 0edd3ca29f30135b5e373804d30f9c16df7601a5 Mon Sep 17 00:00:00 2001 From: amitca71 Date: Fri, 10 Sep 2021 11:42:56 +0300 Subject: [PATCH 08/23] formatting files by executing black and isort (poetry run black . poetry run isort -rc .) --- credentials_update_driver.py | 45 +++++----- execute_query_driver.py | 46 ++++++----- pbiapi/pbiapi.py | 154 +++++++++++++++++++++++------------ 3 files changed, 151 insertions(+), 94 deletions(-) diff --git a/credentials_update_driver.py b/credentials_update_driver.py index bc66de2..cd7a5f1 100644 --- a/credentials_update_driver.py +++ b/credentials_update_driver.py @@ -1,34 +1,35 @@ -from pbiapi import PowerBIAPIClient -import os import argparse +import json +import os from pathlib import Path + from requests.exceptions import HTTPError -import json -azure_tenant_id=os.environ.get('AZURE_TENANT_ID') -azure_client_id=os.environ.get('AZURE_CLIENT_ID') -azure_client_secret=os.environ.get('AZURE_CLIENT_SECRET') +from pbiapi import PowerBIAPIClient + +azure_tenant_id = os.environ.get("AZURE_TENANT_ID") +azure_client_id = os.environ.get("AZURE_CLIENT_ID") +azure_client_secret = os.environ.get("AZURE_CLIENT_SECRET") + + +# def setDBConnection(pbi_client, ws, ): -#def setDBConnection(pbi_client, ws, ): def main(): - pbi_client = PowerBIAPIClient( - azure_tenant_id, - azure_client_id, - azure_client_secret, - ) + pbi_client = PowerBIAPIClient(azure_tenant_id, azure_client_id, azure_client_secret,) parser = argparse.ArgumentParser() - parser.add_argument("--workspace_name",dest="workspace_name", help="workspace name") - parser.add_argument("--ds_name",dest="ds_name", help="ds_name") - parser.add_argument("--username",dest="username", help="db username") - parser.add_argument("--password",dest="password", help="db password") - args =parser.parse_args() + parser.add_argument("--workspace_name", dest="workspace_name", help="workspace name") + parser.add_argument("--ds_name", dest="ds_name", help="ds_name") + parser.add_argument("--username", dest="username", help="db username") + parser.add_argument("--password", dest="password", help="db password") + args = parser.parse_args() print(args) pbi_client.take_over_dataset(args.workspace_name, args.ds_name) - gw_id=pbi_client.get_dataset_datasources_by_name(args.workspace_name, args.ds_name) - gatewayId=gw_id[0]['gatewayId'] - datasourceId=gw_id[0]['datasourceId'] - pbi_client.update_datasource( gatewayId, datasourceId, user_name=args.username, password=args.password) + gw_id = pbi_client.get_dataset_datasources_by_name(args.workspace_name, args.ds_name) + gatewayId = gw_id[0]["gatewayId"] + datasourceId = gw_id[0]["datasourceId"] + pbi_client.update_datasource(gatewayId, datasourceId, user_name=args.username, password=args.password) + + if __name__ == "__main__": main() - diff --git a/execute_query_driver.py b/execute_query_driver.py index cf4ee04..a7ac2e0 100644 --- a/execute_query_driver.py +++ b/execute_query_driver.py @@ -1,35 +1,39 @@ -from pbiapi import PowerBIAPIClient -import os import argparse +import os from pathlib import Path + from requests.exceptions import HTTPError -azure_tenant_id=os.environ.get('AZURE_TENANT_ID') -azure_client_id=os.environ.get('AZURE_CLIENT_ID') -azure_client_secret=os.environ.get('AZURE_CLIENT_SECRET') + +from pbiapi import PowerBIAPIClient + +azure_tenant_id = os.environ.get("AZURE_TENANT_ID") +azure_client_id = os.environ.get("AZURE_CLIENT_ID") +azure_client_secret = os.environ.get("AZURE_CLIENT_SECRET") + def main(): - pbi_client = PowerBIAPIClient( - azure_tenant_id, - azure_client_id, - azure_client_secret, - ) + pbi_client = PowerBIAPIClient(azure_tenant_id, azure_client_id, azure_client_secret,) parser = argparse.ArgumentParser() - parser.add_argument("--workspace_name",dest="workspace_name", help="workspace name") - parser.add_argument("--ds_name",dest="ds_name", help="ds_name") - parser.add_argument("--query",dest="query", help="dax query") - args =parser.parse_args() + parser.add_argument("--workspace_name", dest="workspace_name", help="workspace name") + parser.add_argument("--ds_name", dest="ds_name", help="ds_name") + parser.add_argument("--query", dest="query", help="dax query") + args = parser.parse_args() print(args) - query={} - query['query']=args.query - queries=[] + query = {} + query["query"] = args.query + queries = [] queries.append(query) - serializerSettings={} - serializerSettings['includeNulls']= 'true' - res=pbi_client.execute_queries_by_name(workspace_name=args.workspace_name, dataset_name=args.ds_name, query_list=queries, serializerSettings=serializerSettings) + serializerSettings = {} + serializerSettings["includeNulls"] = "true" + res = pbi_client.execute_queries_by_name( + workspace_name=args.workspace_name, + dataset_name=args.ds_name, + query_list=queries, + serializerSettings=serializerSettings, + ) print(res) if __name__ == "__main__": main() - diff --git a/pbiapi/pbiapi.py b/pbiapi/pbiapi.py index dc9d238..52ca290 100644 --- a/pbiapi/pbiapi.py +++ b/pbiapi/pbiapi.py @@ -1,9 +1,10 @@ import datetime +import json import logging import os from typing import Callable, Dict, List, NoReturn, Union from urllib import parse -import json + import requests from pbiapi.utils import partition @@ -70,10 +71,17 @@ def get_workspaces(self) -> List: self.force_raise_http_error(response) @staticmethod - def find_entity_id_by_name(entity_list: List, name: str, entity_type: str , raise_if_missing: bool = False ,attribute_name_alias: str = "name") -> str: + def find_entity_id_by_name( + entity_list: List, + name: str, + entity_type: str, + raise_if_missing: bool = False, + attribute_name_alias: str = "name", + attribute_alias: str = "id", + ) -> str: for item in entity_list: if item[attribute_name_alias] == name: - return item["id"] + return item[attribute_alias] if raise_if_missing: raise RuntimeError(f"No {entity_type} was found with the name: '{name}'") @@ -166,8 +174,6 @@ def get_datasets(self) -> List: if response.status_code == HTTP_OK_CODE: return response.json()["value"] - - @check_token def refresh_dataset_by_id(self, workspace_name: str, dataset_id: str) -> None: workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) @@ -430,9 +436,9 @@ def force_raise_http_error( def get_workspace_and_dataset_id(self, workspace_name: str, dataset_name: str) -> Union: workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) - print ('workspace_id=%s' % workspace_id) + print("workspace_id=%s" % workspace_id) datasets = self.get_datasets_in_workspace(workspace_name) - print ('datasets=%s' % datasets) + print("datasets=%s" % datasets) dataset_id = self.find_entity_id_by_name(datasets, dataset_name, "dataset", raise_if_missing=True) return workspace_id, dataset_id @@ -449,9 +455,8 @@ def get_pipelines(self) -> List: logging.error("Failed to fetch pipelines!") self.force_raise_http_error(response) - @check_token - def get_pipeline(self, pipeline_id: str ) -> List: + def get_pipeline(self, pipeline_id: str) -> List: url = self.base_url + f"pipelines/{pipeline_id}" response = requests.get(url, headers=self.headers) print(response.json()) @@ -464,10 +469,12 @@ def get_pipeline(self, pipeline_id: str ) -> List: @check_token def get_pipeline_by_name(self, pipeline_name) -> List: - pipelines_list=self.get_pipelines() - pipeline_id = self.find_entity_id_by_name(pipelines_list, pipeline_name, "pipelines", raise_if_missing=True,attribute_name_alias='displayName' ) - print('pipeline id: %s' % pipeline_id) - return (self.get_pipeline(pipeline_id)) + pipelines_list = self.get_pipelines() + pipeline_id = self.find_entity_id_by_name( + pipelines_list, pipeline_name, "pipelines", raise_if_missing=True, attribute_name_alias="displayName" + ) + print("pipeline id: %s" % pipeline_id) + return self.get_pipeline(pipeline_id) @check_token def get_pipeline_operations(self, pipeline_id: str) -> List: @@ -479,39 +486,50 @@ def get_pipeline_operations(self, pipeline_id: str) -> List: else: logging.error("Failed to fetch pipeline operations!") self.force_raise_http_error(response) + @check_token def get_pipeline_operations_by_name(self, pipeline_name: str) -> List: - pipelines_list=self.get_pipelines() - pipeline_id = self.find_entity_id_by_name(pipelines_list, pipeline_name, "pipelines", raise_if_missing=True,attribute_name_alias='displayName' ) - print('pipeline id: %s' % pipeline_id) - return (self.get_pipeline_operations(pipeline_id)) - + pipelines_list = self.get_pipelines() + pipeline_id = self.find_entity_id_by_name( + pipelines_list, pipeline_name, "pipelines", raise_if_missing=True, attribute_name_alias="displayName" + ) + print("pipeline id: %s" % pipeline_id) + return self.get_pipeline_operations(pipeline_id) @check_token - def clone_report_by_name(self, workspace_name: str, report_name: str, new_report_name: str , target_work_space_name: str=None, target_model_id: str=None) -> None: + def clone_report_by_name( + self, + workspace_name: str, + report_name: str, + new_report_name: str, + target_work_space_name: str = None, + target_model_id: str = None, + ) -> None: workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) - workspace_reports=self.get_reports_in_workspace(workspace_name) - report_id=self.find_entity_id_by_name(workspace_reports, report_name, "reports", raise_if_missing=True) + workspace_reports = self.get_reports_in_workspace(workspace_name) + report_id = self.find_entity_id_by_name(workspace_reports, report_name, "reports", raise_if_missing=True) url = self.base_url + f"groups/{workspace_id}/reports/{report_id}/Clone" - data={} - data['Name']=new_report_name - if (target_work_space_name != None): - target_workspace_id = self.find_entity_id_by_name(self.workspaces, target_work_space_name, "workspace", raise_if_missing=True) - data['targetWorkspaceId']= target_workspace_id - if (target_model_id != None): - data['targetModelId']= target_model_id - # data="Name=" + new_report_name + data = {} + data["Name"] = new_report_name + if target_work_space_name != None: + target_workspace_id = self.find_entity_id_by_name( + self.workspaces, target_work_space_name, "workspace", raise_if_missing=True + ) + data["targetWorkspaceId"] = target_workspace_id + if target_model_id != None: + data["targetModelId"] = target_model_id + # data="Name=" + new_report_name response = requests.post(url, data=data, headers=self.headers) if response.status_code == 200: logging.info(f"report {report_id} from workspace {workspace_name}) was cloned ") - return (response.json()) + return response.json() else: logging.error("Dataset refresh failed!") self.force_raise_http_error(response, expected_codes=200) @check_token - def get_dataset_datasources(self, workspace_id,dataset_id) -> List: + def get_dataset_datasources(self, workspace_id, dataset_id) -> List: url = self.base_url + f"groups/{workspace_id}/datasets/{dataset_id}/datasources" response = requests.get(url, headers=self.headers) @@ -523,12 +541,10 @@ def get_dataset_datasources(self, workspace_id,dataset_id) -> List: self.force_raise_http_error(response) @check_token - def get_dataset_datasources_by_name(self, workspace_name,dataset_name) -> List: + def get_dataset_datasources_by_name(self, workspace_name, dataset_name) -> List: workspace_id, dataset_id = self.get_workspace_and_dataset_id(workspace_name, dataset_name) - print('workspace_id: %s , dataset id: %s' % (workspace_id, dataset_id )) - return(self.get_dataset_datasources(workspace_id,dataset_id)) - - + print("workspace_id: %s , dataset id: %s" % (workspace_id, dataset_id)) + return self.get_dataset_datasources(workspace_id, dataset_id) @check_token def update_datasource(self, gateway_id: str, datasource_id: str, user_name: str, password: str): @@ -536,16 +552,21 @@ def update_datasource(self, gateway_id: str, datasource_id: str, user_name: str, url = self.base_url + f"gateways/{gateway_id}/datasources/{datasource_id}" headers = {"Content-Type": "application/json", **self.get_auth_header()} - credentialDetails={"credentialType": "Basic", + credentialDetails = { + "credentialType": "Basic", "encryptedConnection": "Encrypted", "encryptionAlgorithm": "None", "privacyLevel": "None", - "useEndUserOAuth2Credentials": "False"} + "useEndUserOAuth2Credentials": "False", + } - credentials={} - credentials['credentialData']=[{'name': 'username' , 'value': user_name} ,{'name': 'password', 'value': password} ] - credentialDetails["credentials"]=str(credentials) - data={'credentialDetails': credentialDetails} + credentials = {} + credentials["credentialData"] = [ + {"name": "username", "value": user_name}, + {"name": "password", "value": password}, + ] + credentialDetails["credentials"] = str(credentials) + data = {"credentialDetails": credentialDetails} print(data) response = requests.patch(url, headers=headers, json=data) @@ -556,35 +577,37 @@ def update_datasource(self, gateway_id: str, datasource_id: str, user_name: str, self.force_raise_http_error(response) @check_token - def execute_queries(self, dataset_id: str, query_list: list, serializerSettings: dict) -> None: + def execute_queries(self, dataset_id: str, query_list: list, serializerSettings: dict) -> None: body = {"queries": query_list, "serializerSettings": serializerSettings} # Workspace exists, lets add user: url = self.base_url + f"datasets/{dataset_id}/executeQueries" - print ('url=%s' % url) + print("url=%s" % url) headers = {"Content-Type": "application/json", **self.get_auth_header()} - print ('headers=%s' % headers) - print ('json=%s' % json) + print("headers=%s" % headers) + print("json=%s" % json) response = requests.post(url, json=body, headers=headers) if response.status_code == HTTP_OK_CODE: logging.info(f"success execute_queries") - return(json.loads(response.text.encode('utf8'))) + return json.loads(response.text.encode("utf8")) else: logging.error(f"Failed to execute_queries': {json}") self.force_raise_http_error(response) @check_token - def execute_queries_by_name(self, workspace_name: str, dataset_name: str, query_list: list, serializerSettings: dict) -> None: + def execute_queries_by_name( + self, workspace_name: str, dataset_name: str, query_list: list, serializerSettings: dict + ) -> None: datasets = self.get_datasets_in_workspace(workspace_name) dataset_id = self.find_entity_id_by_name(datasets, dataset_name, "dataset", True) - return(self.execute_queries(dataset_id=dataset_id, query_list=query_list, serializerSettings=serializerSettings)) + return self.execute_queries(dataset_id=dataset_id, query_list=query_list, serializerSettings=serializerSettings) @check_token def bind_to_gateway(self, dataset_Id: str, gateway_id: str) -> None: -#403: {"Message":"API is not accessible for application"} + # 403: {"Message":"API is not accessible for application"} url = self.base_url + f"datasets/{dataset_Id}/Default.BindToGateway" - gatewayObject={"gatewayObjectId": gateway_id} + gatewayObject = {"gatewayObjectId": gateway_id} response = requests.post(url, json=gatewayObject, headers=self.headers) if response.status_code == HTTP_OK_CODE: @@ -592,3 +615,32 @@ def bind_to_gateway(self, dataset_Id: str, gateway_id: str) -> None: else: logging.error(f"Takeover of dataset {dataset_Id} failed!") self.force_raise_http_error(response) + + @check_token + def get_workspace_and_report_id(self, workspace_name: str, report_name: str) -> Union: + workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) + print("workspace_id=%s" % workspace_id) + reports = self.get_reports_in_workspace(workspace_name) + print("datasets=%s" % reports) + report_id = self.find_entity_id_by_name( + reports, report_name, "report", raise_if_missing=True, attribute_name_alias="name", attribute_alias="id" + ) + dataset_id = self.find_entity_id_by_name( + reports, + report_name, + "report", + raise_if_missing=True, + attribute_name_alias="name", + attribute_alias="datasetId", + ) + + return workspace_id, report_id, dataset_id + + def get_dataset_in_workspace(self, workspace_name: str, dataset_id: str) -> List: + workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) + + datasets_url = self.base_url + f"groups/{workspace_id}/datasets/{dataset_id}" + response = requests.get(datasets_url, headers=self.headers) + response.raise_for_status() + if response.status_code == HTTP_OK_CODE: + return response.json() From ac0fcd5849c3859e8fb32eec69baab4e6af63d6c Mon Sep 17 00:00:00 2001 From: amitca71 Date: Mon, 13 Sep 2021 10:01:33 +0300 Subject: [PATCH 09/23] remove case sensitivity from entity name comparison --- pbiapi/pbiapi.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pbiapi/pbiapi.py b/pbiapi/pbiapi.py index 52ca290..42badec 100644 --- a/pbiapi/pbiapi.py +++ b/pbiapi/pbiapi.py @@ -80,7 +80,7 @@ def find_entity_id_by_name( attribute_alias: str = "id", ) -> str: for item in entity_list: - if item[attribute_name_alias] == name: + if item[attribute_name_alias].lower() == name.lower(): return item[attribute_alias] if raise_if_missing: raise RuntimeError(f"No {entity_type} was found with the name: '{name}'") From dcf2b26721891cb227297e1885f4f2dfb9780f38 Mon Sep 17 00:00:00 2001 From: amitca71 Date: Mon, 13 Sep 2021 11:04:36 +0300 Subject: [PATCH 10/23] adding prints for lower space --- pbiapi/pbiapi.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pbiapi/pbiapi.py b/pbiapi/pbiapi.py index 42badec..75bb564 100644 --- a/pbiapi/pbiapi.py +++ b/pbiapi/pbiapi.py @@ -79,7 +79,9 @@ def find_entity_id_by_name( attribute_name_alias: str = "name", attribute_alias: str = "id", ) -> str: + print('lower name=%s' % name.lower()) for item in entity_list: + print('item[attribute_name_alias].lower()=%s' % item[attribute_name_alias].lower()) if item[attribute_name_alias].lower() == name.lower(): return item[attribute_alias] if raise_if_missing: From 5ca9bcea95ce5044e757d72a01ca222c548514e3 Mon Sep 17 00:00:00 2001 From: amitca71 Date: Mon, 13 Sep 2021 11:11:12 +0300 Subject: [PATCH 11/23] changing the version --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index d4d970c..87c210a 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ setuptools.setup( name="pbiapi", - version="0.2.2", + version="0.2.4", author="Scott Melhop", author_email="scott.melhop@gmail.com", description="A Python library for working with the Power BI API", From e01fdbf2af6ece78f08e658b978172ec008fa67f Mon Sep 17 00:00:00 2001 From: amitca71 Date: Mon, 13 Sep 2021 11:13:06 +0300 Subject: [PATCH 12/23] change version --- rebuild.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rebuild.sh b/rebuild.sh index 9560b02..71aef09 100755 --- a/rebuild.sh +++ b/rebuild.sh @@ -1,3 +1,3 @@ pip uninstall pbiapi --y poetry build -pip install ./dist/pbiapi-0.2.2-py3-none-any.whl +pip install ./dist/pbiapi-0.2.4-py3-none-any.whl From 2b5d8d8e4e82210826d53beed6d7561ef96b0059 Mon Sep 17 00:00:00 2001 From: amitca71 Date: Mon, 13 Sep 2021 11:27:36 +0300 Subject: [PATCH 13/23] change version --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 9af0d0f..08c8c32 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "pbiapi" -version = "0.2.2" +version = "0.2.4" description = "A Python library for working with the Power BI API" authors = ["Scott Melhop "] repository = "https://github.com/scottmelhop/PowerBI-API-Python" From f58e326604b2981b95489957c07cb573371137ac Mon Sep 17 00:00:00 2001 From: amitca71 Date: Tue, 14 Sep 2021 13:23:37 +0300 Subject: [PATCH 14/23] add print_all_datasources --- pbiapi/pbiapi.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/pbiapi/pbiapi.py b/pbiapi/pbiapi.py index 75bb564..7c6f446 100644 --- a/pbiapi/pbiapi.py +++ b/pbiapi/pbiapi.py @@ -646,3 +646,20 @@ def get_dataset_in_workspace(self, workspace_name: str, dataset_id: str) -> List response.raise_for_status() if response.status_code == HTTP_OK_CODE: return response.json() + + @check_token + def get_datasets_in_workspace_by_id(self, workspace_id: str) -> List: + datasets_url = self.base_url + f"groups/{workspace_id}/datasets" + response = requests.get(datasets_url, headers=self.headers) + response.raise_for_status() + if response.status_code == HTTP_OK_CODE: + return response.json()["value"] + def print_all_datasources(self): + for ws in self.workspaces: + wsname=ws['name'] + dss=self.get_datasets_in_workspace_by_id(ws['id']) + print ('ws name: %s, ws id: %s' % (wsname, ws['id']) ) + for ds in dss: + print (' dataset=%s datasetId=%s' % (ds['name'], ds['id'])) + datasource=self.get_dataset_datasources(ws['id'], ds['id']) + print (' datasource: %s' % datasource) \ No newline at end of file From c7498b23b9053a7b2fc6d5970ca9ed0229ec2c5b Mon Sep 17 00:00:00 2001 From: amitca71 Date: Thu, 16 Sep 2021 12:43:39 +0300 Subject: [PATCH 15/23] switching to use entity ids when possible --- pbiapi/pbiapi.py | 42 ++++++++++++++++++++++++++++++++---------- 1 file changed, 32 insertions(+), 10 deletions(-) diff --git a/pbiapi/pbiapi.py b/pbiapi/pbiapi.py index 7c6f446..de84a6c 100644 --- a/pbiapi/pbiapi.py +++ b/pbiapi/pbiapi.py @@ -79,9 +79,9 @@ def find_entity_id_by_name( attribute_name_alias: str = "name", attribute_alias: str = "id", ) -> str: - print('lower name=%s' % name.lower()) + # print('lower name=%s' % name.lower()) for item in entity_list: - print('item[attribute_name_alias].lower()=%s' % item[attribute_name_alias].lower()) + # print('item[attribute_name_alias].lower()=%s' % item[attribute_name_alias].lower()) if item[attribute_name_alias].lower() == name.lower(): return item[attribute_alias] if raise_if_missing: @@ -99,7 +99,8 @@ def create_workspace(self, name: str) -> None: if response.json()["@odata.count"] > 0: logging.info("Workspace already exists, no changes made!") - return +# print(response.json()) + return response.json()["value"][0]['id'] # Workspace does not exist, lets create it: logging.info(f"Trying to create a workspace with name: {name}...") @@ -109,6 +110,8 @@ def create_workspace(self, name: str) -> None: if response.status_code == HTTP_OK_CODE: logging.info("Workspace created successfully!") self.get_workspaces() # Update internal state + # print(response.json()) + return response.json()['id'] else: logging.error(f"Failed to create the new workspace: '{name}':") self.force_raise_http_error(response) @@ -167,6 +170,7 @@ def get_datasets_in_workspace(self, workspace_name: str) -> List: if response.status_code == HTTP_OK_CODE: return response.json()["value"] + @check_token def get_datasets(self) -> List: @@ -177,8 +181,7 @@ def get_datasets(self) -> List: return response.json()["value"] @check_token - def refresh_dataset_by_id(self, workspace_name: str, dataset_id: str) -> None: - workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) + def refresh_dataset_by_id(self, workspace_id: str, dataset_id: str) -> None: url = self.base_url + f"groups/{workspace_id}/datasets/{dataset_id}/refreshes" response = requests.post(url, data="notifyOption=NoNotification", headers=self.headers) @@ -190,9 +193,10 @@ def refresh_dataset_by_id(self, workspace_name: str, dataset_id: str) -> None: @check_token def refresh_dataset_by_name(self, workspace_name: str, dataset_name: str) -> None: + workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) datasets = self.get_datasets_in_workspace(workspace_name) dataset_id = self.find_entity_id_by_name(datasets, dataset_name, "dataset", True) - self.refresh_dataset_by_id(workspace_name, dataset_id) + self.refresh_dataset_by_id(workspace_id, dataset_id) @check_token def create_push_dataset(self, workspace_name: str, retention_policy: str) -> None: @@ -386,6 +390,25 @@ def update_parameters_in_dataset(self, workspace_name: str, dataset_name: str, p logging.error(f"Parameter update failed for dataset {dataset_name}!") self.force_raise_http_error(response) + + @check_token + def update_parameters_in_dataset_by_id(self, workspace_id: str, dataset_id: str, parameters: list): + update_details = {"updateDetails": parameters} + url = self.base_url + f"groups/{workspace_id}/datasets/{dataset_id}/UpdateParameters" + headers = {"Content-Type": "application/json", **self.get_auth_header()} + response = requests.post(url, json=update_details, headers=headers) + + if response.status_code == HTTP_OK_CODE: + for parameter in parameters: + logging.info( + f"Parameter \"{parameter['name']}\"", + f" updated to \"{parameter['newValue']}\"", + f" in Dataset named '{dataset_id}' in workspace '{workspace_id}'!", + ) + else: + logging.error(f"Parameter update failed for dataset {dataset_id}!") + self.force_raise_http_error(response) + @check_token def get_parameters_in_dataset(self, workspace_name: str, dataset_name: str) -> List: workspace_id, dataset_id = self.get_workspace_and_dataset_id(workspace_name, dataset_name) @@ -438,9 +461,9 @@ def force_raise_http_error( def get_workspace_and_dataset_id(self, workspace_name: str, dataset_name: str) -> Union: workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) - print("workspace_id=%s" % workspace_id) + # print("workspace_id=%s" % workspace_id) datasets = self.get_datasets_in_workspace(workspace_name) - print("datasets=%s" % datasets) + # print("datasets=%s" % datasets) dataset_id = self.find_entity_id_by_name(datasets, dataset_name, "dataset", raise_if_missing=True) return workspace_id, dataset_id @@ -461,7 +484,7 @@ def get_pipelines(self) -> List: def get_pipeline(self, pipeline_id: str) -> List: url = self.base_url + f"pipelines/{pipeline_id}" response = requests.get(url, headers=self.headers) - print(response.json()) + # print(response.json()) if response.status_code == HTTP_OK_CODE: self._workspaces = response.json() return self._workspaces @@ -569,7 +592,6 @@ def update_datasource(self, gateway_id: str, datasource_id: str, user_name: str, ] credentialDetails["credentials"] = str(credentials) data = {"credentialDetails": credentialDetails} - print(data) response = requests.patch(url, headers=headers, json=data) if response.status_code == HTTP_OK_CODE: From b9df0035534b432c6ed4c80fcb1d710abea746da Mon Sep 17 00:00:00 2001 From: amitca71 Date: Thu, 23 Sep 2021 08:30:46 +0300 Subject: [PATCH 16/23] adding functionality --- pbiapi/pbiapi.py | 82 ++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 80 insertions(+), 2 deletions(-) diff --git a/pbiapi/pbiapi.py b/pbiapi/pbiapi.py index de84a6c..e8e72cb 100644 --- a/pbiapi/pbiapi.py +++ b/pbiapi/pbiapi.py @@ -292,6 +292,14 @@ def get_reports_in_workspace(self, workspace_name: str) -> List: url = self.base_url + f"groups/{workspace_id}/reports" response = requests.get(url, headers=self.headers) + if response.status_code == HTTP_OK_CODE: + return response.json()["value"] + @check_token + def get_reports_in_workspace_by_id(self, workspace_id: str) -> List: + + url = self.base_url + f"groups/{workspace_id}/reports" + response = requests.get(url, headers=self.headers) + if response.status_code == HTTP_OK_CODE: return response.json()["value"] @@ -366,7 +374,7 @@ def import_file_into_workspace( if response.json()["importState"] == "Succeeded": logging.info("Import complete") - return + return response else: logging.info("Import in progress...") @@ -684,4 +692,74 @@ def print_all_datasources(self): for ds in dss: print (' dataset=%s datasetId=%s' % (ds['name'], ds['id'])) datasource=self.get_dataset_datasources(ws['id'], ds['id']) - print (' datasource: %s' % datasource) \ No newline at end of file + print (' datasource: %s' % datasource) + + @staticmethod + def find_entity_by_name( + entity_list: List, + name: str, + entity_type: str, + raise_if_missing: bool = False, + attribute_name_alias: str = "name", + attribute_alias: str = "id", + ) -> str: + for item in entity_list: + if item[attribute_name_alias].lower() == name.lower(): + return item + if raise_if_missing: + raise RuntimeError(f"No {entity_type} was found with the name: '{name}'") + + def get_report_by_workspace_id_and_report_id(self, workspace_id: str, report_id: str) -> dict: + + url = self.base_url + f"groups/{workspace_id}/reports/{report_id}" + response = requests.get(url, headers=self.headers) + + if response.status_code == HTTP_OK_CODE: + return response.json() + + @check_token + def rebind_report_in_workspace_by_id(self, workspace_id: str, dataset_id: str, report_id: str) -> None: + + url = self.base_url + f"groups/{workspace_id}/reports/{report_id}/Rebind" + headers = {"Content-Type": "application/json", **self.get_auth_header()} + payload = {"datasetId": dataset_id} + + response = requests.post(url, json=payload, headers=headers) + if response.status_code == HTTP_OK_CODE: + logging.info(f"Report named '{report_name}' rebound to dataset with name '{dataset_name}'") + else: + logging.error(f"Failed to rebind report with name '{report_name}' to dataset with name '{dataset_name}'") + self.force_raise_http_error(response) + + + @check_token + def clone_report_by_id( + self, + workspace_id: str, + report_id: str, + new_report_name: str, + target_workspace_id: str = None, + target_model_id: str = None, + ) -> None: + url = self.base_url + f"groups/{workspace_id}/reports/{report_id}/Clone" + data = {} + data["Name"] = new_report_name + if target_workspace_id != None: + data["targetWorkspaceId"] = target_workspace_id + if target_model_id != None: + data["targetModelId"] = target_model_id + response = requests.post(url, data=data, headers=self.headers) + + if response.status_code == 200: + logging.info(f"report {report_id} from workspace {workspace_id}) was cloned ") + return response.json() + else: + logging.error("Dataset refresh failed!") + self.force_raise_http_error(response, expected_codes=200) + + + @check_token + def get_dataset_by_ws_id_and_ds_name(self, workspace_id: str, dataset_name: str) -> None: + datasets = self.get_datasets_in_workspace_by_id(workspace_id) + dataset = self.find_entity_by_name(datasets, dataset_name, "dataset", True) + return (dataset) \ No newline at end of file From dd23f85d47e57f86fda719a10b8656d609f2d993 Mon Sep 17 00:00:00 2001 From: amitca71 Date: Wed, 29 Sep 2021 13:07:27 +0300 Subject: [PATCH 17/23] fix rebind_report_in_workspace_by_id --- pbiapi/pbiapi.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pbiapi/pbiapi.py b/pbiapi/pbiapi.py index e8e72cb..42fecb4 100644 --- a/pbiapi/pbiapi.py +++ b/pbiapi/pbiapi.py @@ -726,9 +726,9 @@ def rebind_report_in_workspace_by_id(self, workspace_id: str, dataset_id: str, r response = requests.post(url, json=payload, headers=headers) if response.status_code == HTTP_OK_CODE: - logging.info(f"Report named '{report_name}' rebound to dataset with name '{dataset_name}'") + logging.info(f"Report named '{report_id}' rebound to dataset with name '{dataset_id}'") else: - logging.error(f"Failed to rebind report with name '{report_name}' to dataset with name '{dataset_name}'") + logging.error(f"Failed to rebind report with name '{report_id}' to dataset with name '{dataset_id}'") self.force_raise_http_error(response) From 4e7582e095efc470f35a64fe7d6dd7e8b9db8ca9 Mon Sep 17 00:00:00 2001 From: amitca71 Date: Thu, 30 Sep 2021 14:03:20 +0300 Subject: [PATCH 18/23] add with id --- execute_query_by_id_driver.py | 37 +++++++++++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 execute_query_by_id_driver.py diff --git a/execute_query_by_id_driver.py b/execute_query_by_id_driver.py new file mode 100644 index 0000000..0ebf963 --- /dev/null +++ b/execute_query_by_id_driver.py @@ -0,0 +1,37 @@ +import argparse +import os +from pathlib import Path + +from requests.exceptions import HTTPError + +from pbiapi import PowerBIAPIClient + +azure_tenant_id = os.environ.get("AZURE_TENANT_ID") +azure_client_id = os.environ.get("AZURE_CLIENT_ID") +azure_client_secret = os.environ.get("AZURE_CLIENT_SECRET") + + +def main(): + pbi_client = PowerBIAPIClient(azure_tenant_id, azure_client_id, azure_client_secret,) + parser = argparse.ArgumentParser() + parser.add_argument("--ds_id", dest="ds_id", help="ds_id") + parser.add_argument("--query", dest="query", help="dax query") + args = parser.parse_args() + print(args) + query = {} + query["query"] = args.query + queries = [] + queries.append(query) + serializerSettings = {} + serializerSettings["includeNulls"] = "true" + res = pbi_client.execute_queries( + dataset_id=args.ds_id, + query_list=queries, + serializerSettings=serializerSettings, + ) + + print(res) + + +if __name__ == "__main__": + main() From 7197b68fedada52b37856b814c979c1c606e2cc9 Mon Sep 17 00:00:00 2001 From: amitca71 Date: Thu, 30 Sep 2021 14:35:23 +0300 Subject: [PATCH 19/23] removing header print --- pbiapi/pbiapi.py | 1 - 1 file changed, 1 deletion(-) diff --git a/pbiapi/pbiapi.py b/pbiapi/pbiapi.py index 42fecb4..8ef83c1 100644 --- a/pbiapi/pbiapi.py +++ b/pbiapi/pbiapi.py @@ -616,7 +616,6 @@ def execute_queries(self, dataset_id: str, query_list: list, serializerSettings: url = self.base_url + f"datasets/{dataset_id}/executeQueries" print("url=%s" % url) headers = {"Content-Type": "application/json", **self.get_auth_header()} - print("headers=%s" % headers) print("json=%s" % json) response = requests.post(url, json=body, headers=headers) From 50d60f69b065657ebda11061f05c5716e7adcedd Mon Sep 17 00:00:00 2001 From: amitca71 Date: Tue, 5 Oct 2021 12:21:44 +0300 Subject: [PATCH 20/23] support deletion of multiple file with same name on workspace --- pbiapi/pbiapi.py | 31 ++++++++++++++++++++++++++++++- 1 file changed, 30 insertions(+), 1 deletion(-) diff --git a/pbiapi/pbiapi.py b/pbiapi/pbiapi.py index 8ef83c1..06ed5cf 100644 --- a/pbiapi/pbiapi.py +++ b/pbiapi/pbiapi.py @@ -87,6 +87,22 @@ def find_entity_id_by_name( if raise_if_missing: raise RuntimeError(f"No {entity_type} was found with the name: '{name}'") + @staticmethod + def find_entities_list_id_by_name( + entity_list: List, + name: str, + entity_type: str, + attribute_name_alias: str = "name", + attribute_alias: str = "id", + ) -> str: + items=[] + for item in entity_list: + if item[attribute_name_alias].lower() == name.lower(): + items.append(item[attribute_alias]) + return items + + + @check_token def create_workspace(self, name: str) -> None: # Check if workspace exists already: @@ -761,4 +777,17 @@ def clone_report_by_id( def get_dataset_by_ws_id_and_ds_name(self, workspace_id: str, dataset_name: str) -> None: datasets = self.get_datasets_in_workspace_by_id(workspace_id) dataset = self.find_entity_by_name(datasets, dataset_name, "dataset", True) - return (dataset) \ No newline at end of file + return (dataset) + + @check_token + def delete_reports_by_workspace_id(self, workspace_id: str, report_name: str) -> None: + reports = self.get_reports_in_workspace_by_id(workspace_id) + report_id_list = self.find_entities_list_id_by_name(reports, report_name, "report") + for report_id in report_id_list: + url = self.base_url + f"groups/{workspace_id}/reports/{report_id}" + response = requests.delete(url, headers=self.headers) + if response.status_code == HTTP_OK_CODE: + logging.info(f"Report named '{report_name}' with id '{report_id}' in workspace '{workspace_id}' deleted successfully!") + else: + logging.error("Report deletion failed!") + self.force_raise_http_error(response) \ No newline at end of file From 651fc97dd1dc4f79208832288c61fa619be69790 Mon Sep 17 00:00:00 2001 From: amit cahanovich Date: Sun, 24 Oct 2021 15:22:07 +0300 Subject: [PATCH 21/23] Update pbiapi.py --- pbiapi/pbiapi.py | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/pbiapi/pbiapi.py b/pbiapi/pbiapi.py index 06ed5cf..a949e97 100644 --- a/pbiapi/pbiapi.py +++ b/pbiapi/pbiapi.py @@ -300,7 +300,11 @@ def truncate_table(self, workspace_name: str, dataset_id: str, table_name: str) else: logging.error("Table truncation failed!") self.force_raise_http_error(response) - + @check_token + def is_report_in_workspace(self, workspace_id: str, report_name: str): + reports = self.get_reports_in_workspace_by_id(workspace_id) + report_id_list = self.find_entities_list_id_by_name(reports, report_name, "report") + return (len (report_id_list)>0 ) @check_token def get_reports_in_workspace(self, workspace_name: str) -> List: workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) @@ -355,14 +359,12 @@ def delete_report(self, workspace_name: str, report_name: str) -> None: @check_token def import_file_into_workspace( - self, workspace_name: str, skip_report: bool, file_path: str, display_name: str + self, workspace_name: str, skip_report: bool, file_path: str, display_name: str, name_conflict: str ='CreateOrOverwrite' ) -> None: workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) if not os.path.isfile(file_path): - raise FileNotFoundError(2, f"No such file or directory: '{file_path}'") - - name_conflict = "CreateOrOverwrite" + raise FileNotFoundError(2, f"No such file or directory: '{file_path}'") url = ( self.base_url + f"groups/{workspace_id}/imports?datasetDisplayName={display_name}&nameConflict=" @@ -790,4 +792,4 @@ def delete_reports_by_workspace_id(self, workspace_id: str, report_name: str) -> logging.info(f"Report named '{report_name}' with id '{report_id}' in workspace '{workspace_id}' deleted successfully!") else: logging.error("Report deletion failed!") - self.force_raise_http_error(response) \ No newline at end of file + self.force_raise_http_error(response) From 928a2ec915291e74753950b52281ee99e8b95275 Mon Sep 17 00:00:00 2001 From: amitca71 Date: Sun, 24 Oct 2021 19:07:30 +0300 Subject: [PATCH 22/23] add get_reports_id_by_name_in_workspace update_workspace_report_content --- pbiapi/pbiapi.py | 50 ++++++++++++++++++++++++++++++++++++++++++------ 1 file changed, 44 insertions(+), 6 deletions(-) diff --git a/pbiapi/pbiapi.py b/pbiapi/pbiapi.py index a949e97..1dc37ce 100644 --- a/pbiapi/pbiapi.py +++ b/pbiapi/pbiapi.py @@ -300,11 +300,7 @@ def truncate_table(self, workspace_name: str, dataset_id: str, table_name: str) else: logging.error("Table truncation failed!") self.force_raise_http_error(response) - @check_token - def is_report_in_workspace(self, workspace_id: str, report_name: str): - reports = self.get_reports_in_workspace_by_id(workspace_id) - report_id_list = self.find_entities_list_id_by_name(reports, report_name, "report") - return (len (report_id_list)>0 ) + @check_token def get_reports_in_workspace(self, workspace_name: str) -> List: workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) @@ -322,6 +318,18 @@ def get_reports_in_workspace_by_id(self, workspace_id: str) -> List: if response.status_code == HTTP_OK_CODE: return response.json()["value"] + @check_token + def is_report_in_workspace(self, workspace_id: str, report_name: str): + reports = self.get_reports_in_workspace_by_id(workspace_id) + report_id_list = self.find_entities_list_id_by_name(reports, report_name, "report") + return (len (report_id_list)>0 ) + + @check_token + def get_reports_id_by_name_in_workspace(self, workspace_id: str, report_name: str): + reports = self.get_reports_in_workspace_by_id(workspace_id) + report_id_list = self.find_entities_list_id_by_name(reports, report_name, "report") + return (report_id_list) + @check_token def rebind_report_in_workspace(self, workspace_name: str, dataset_name: str, report_name: str) -> None: @@ -364,7 +372,8 @@ def import_file_into_workspace( workspace_id = self.find_entity_id_by_name(self.workspaces, workspace_name, "workspace", raise_if_missing=True) if not os.path.isfile(file_path): - raise FileNotFoundError(2, f"No such file or directory: '{file_path}'") + raise FileNotFoundError(2, f"No such file or directory: '{file_path}'") + url = ( self.base_url + f"groups/{workspace_id}/imports?datasetDisplayName={display_name}&nameConflict=" @@ -793,3 +802,32 @@ def delete_reports_by_workspace_id(self, workspace_id: str, report_name: str) -> else: logging.error("Report deletion failed!") self.force_raise_http_error(response) + @check_token + def delete_reports_by_report_id_and_workspace_id(self, workspace_id: str, report_id: str) -> None: + url = self.base_url + f"groups/{workspace_id}/reports/{report_id}" + response = requests.delete(url, headers=self.headers) + if response.status_code == HTTP_OK_CODE: + logging.info(f"Report with id '{report_id}' in workspace '{workspace_id}' deleted successfully!") + else: + logging.error("Report deletion failed!") + self.force_raise_http_error(response) + @check_token + def update_workspace_report_content(self, workspace_id: str, report_id: str, source_report_id: str, source_workspace_id:str , source_type: str= "ExistingReport"): + + source_report_details = { \ + "sourceReport": { \ + "sourceReportId": source_report_id, \ + "sourceWorkspaceId": source_workspace_id \ + }, \ + "sourceType": source_type \ + } + url = self.base_url + f"groups/{workspace_id}/reports/{report_id}/UpdateReportContent" + headers = {"Content-Type": "application/json", **self.get_auth_header()} + response = requests.post(url, json=source_report_details, headers=headers) + + if response.status_code == HTTP_OK_CODE: + logging.info("UpdateReportContent success") + else: + logging.error(f"UpdateReportContent failed for report_id {report_id}!") + self.force_raise_http_error(response) + From 005ffddb3fb4c458ac57a625629d634f40112d93 Mon Sep 17 00:00:00 2001 From: amitca71 Date: Tue, 2 Nov 2021 13:45:07 +0200 Subject: [PATCH 23/23] adding generate_token --- pbiapi/pbiapi.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/pbiapi/pbiapi.py b/pbiapi/pbiapi.py index 1dc37ce..4c69103 100644 --- a/pbiapi/pbiapi.py +++ b/pbiapi/pbiapi.py @@ -831,3 +831,23 @@ def update_workspace_report_content(self, workspace_id: str, report_id: str, sou logging.error(f"UpdateReportContent failed for report_id {report_id}!") self.force_raise_http_error(response) + + @check_token + def generate_token(self, dataset_list=None, reports_list=None, target_workspace_list=None,identities_list=None ): + scope = { \ + "datasets": dataset_list, \ + "reports": reports_list, \ + "targetWorkspaces": target_workspace_list, \ + "identities": identities_list + } + + url = self.base_url + f"GenerateToken" + headers = {"Content-Type": "application/json", **self.get_auth_header()} + response = requests.post(url, json=scope, headers=headers) + + if response.status_code == HTTP_OK_CODE: + logging.info("GenerateToken success") + else: + logging.error(f"GenerateToken failed") + self.force_raise_http_error(response) + return (response) \ No newline at end of file