From aea15c024e253b1cf8aa9dc7c28855952c1fc4fb Mon Sep 17 00:00:00 2001 From: botanical Date: Thu, 22 Feb 2024 23:36:13 +0000 Subject: [PATCH] =?UTF-8?q?Deploy=20preview=20for=20PR=20125=20?= =?UTF-8?q?=F0=9F=9B=AB?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../dataset-ingestion/catalog-ingestion.html | 2 +- pr-preview/pr-125/search.json | 2 +- pr-preview/pr-125/sitemap.xml | 88 +++++++++---------- 3 files changed, 46 insertions(+), 46 deletions(-) diff --git a/pr-preview/pr-125/contributing/dataset-ingestion/catalog-ingestion.html b/pr-preview/pr-125/contributing/dataset-ingestion/catalog-ingestion.html index 3802f0d2..e58a3506 100644 --- a/pr-preview/pr-125/contributing/dataset-ingestion/catalog-ingestion.html +++ b/pr-preview/pr-125/contributing/dataset-ingestion/catalog-ingestion.html @@ -769,7 +769,7 @@

3. Get token

# base url for the workflows api # experimental / subject to change in the future -base_url = "https://dev-api.delta-backend.com" +base_url = "https://dev.openveda.cloud/api/workflows" # endpoint to get the token from token_url = f"{base_url}/token" diff --git a/pr-preview/pr-125/search.json b/pr-preview/pr-125/search.json index e9200c88..b8813e25 100644 --- a/pr-preview/pr-125/search.json +++ b/pr-preview/pr-125/search.json @@ -110,7 +110,7 @@ "href": "contributing/dataset-ingestion/catalog-ingestion.html#step-iv-publication", "title": "Catalog Ingestion", "section": "STEP IV: Publication", - "text": "STEP IV: Publication\nThe publication process involves 3 steps:\n\n[VEDA] Publishing to the development STAC catalog https://dev.openveda.cloud/api/stac\n[EIS] Reviewing the collection/items published to the dev STAC catalog\n[VEDA] Publishing to the staging STAC catalog https://staging-stac.delta-backend.com\n\nTo use the VEDA Ingestion API to schedule ingestion/publication of the data follow these steps:\n\n1. Obtain credentials from a VEDA team member\nAsk a VEDA team member to create Cognito credentials (username and password) for VEDA authentication.\n\n\n2. Export username and password\nexport username=\"johndoe\"\nexport password=\"xxxx\"\n\n\n3. Get token\n# Required imports\nimport os\nimport requests\n\n# Pull username and password from environment variables\nusername = os.environ.get(\"username\")\npassword = os.environ.get(\"password\")\n\n# base url for the workflows api\n# experimental / subject to change in the future\nbase_url = \"https://dev-api.delta-backend.com\"\n\n# endpoint to get the token from\ntoken_url = f\"{base_url}/token\"\n\n# authentication credentials to be passed to the token_url\nbody = {\n \"username\": username,\n \"password\": password,\n}\n\n# request token\nresponse = requests.post(token_url, data=body)\nif not response.ok:\n raise Exception(\"Couldn't obtain the token. Make sure the username and password are correct.\")\nelse:\n # get token from response\n token = response.json().get(\"AccessToken\")\n # prepare headers for requests\n headers = {\n \"Authorization\": f\"Bearer {token}\"\n }\n\n\n4. Ingest the dataset\nThen, use the code snippet below to publish the dataset.\n# url for dataset validation / publication\nvalidate_url = f\"{base_url}/dataset/validate\"\n\npublish_url = f\"{base_url}/dataset/publish\"\n\n# prepare the body of the request,\nbody = json.load(open(\"dataset-definition.json\"))\n\n# Validate the data definition using the /validate endpoint\nvalidation_response = requests.post(\n validate_url,\n headers=headers,\n json=body\n)\n\n# look at the response\nvalidation_response.raise_for_status()\n\n# If the validation is successful, publish the dataset using /publish endpoint\npublish_response = requests.post(\n publish_url,\n headers=headers,\n json=body\n)\n\nif publish_response.ok:\n print(\"Success\")\n\n\nCheck the status of the execution\n# the id of the execution\n# should be available in the response of workflow execution request\nexecution_id = \"xxx\"\n# url for execution status\nexecution_status_url = f\"{workflow_execution_url}/{execution_id}\"\n# make the request\nresponse = requests.get(\n execution_status_url,\n headers=headers,\n)\nif response.ok:\n print(response.json())", + "text": "STEP IV: Publication\nThe publication process involves 3 steps:\n\n[VEDA] Publishing to the development STAC catalog https://dev.openveda.cloud/api/stac\n[EIS] Reviewing the collection/items published to the dev STAC catalog\n[VEDA] Publishing to the staging STAC catalog https://staging-stac.delta-backend.com\n\nTo use the VEDA Ingestion API to schedule ingestion/publication of the data follow these steps:\n\n1. Obtain credentials from a VEDA team member\nAsk a VEDA team member to create Cognito credentials (username and password) for VEDA authentication.\n\n\n2. Export username and password\nexport username=\"johndoe\"\nexport password=\"xxxx\"\n\n\n3. Get token\n# Required imports\nimport os\nimport requests\n\n# Pull username and password from environment variables\nusername = os.environ.get(\"username\")\npassword = os.environ.get(\"password\")\n\n# base url for the workflows api\n# experimental / subject to change in the future\nbase_url = \"https://dev.openveda.cloud/api/workflows\"\n\n# endpoint to get the token from\ntoken_url = f\"{base_url}/token\"\n\n# authentication credentials to be passed to the token_url\nbody = {\n \"username\": username,\n \"password\": password,\n}\n\n# request token\nresponse = requests.post(token_url, data=body)\nif not response.ok:\n raise Exception(\"Couldn't obtain the token. Make sure the username and password are correct.\")\nelse:\n # get token from response\n token = response.json().get(\"AccessToken\")\n # prepare headers for requests\n headers = {\n \"Authorization\": f\"Bearer {token}\"\n }\n\n\n4. Ingest the dataset\nThen, use the code snippet below to publish the dataset.\n# url for dataset validation / publication\nvalidate_url = f\"{base_url}/dataset/validate\"\n\npublish_url = f\"{base_url}/dataset/publish\"\n\n# prepare the body of the request,\nbody = json.load(open(\"dataset-definition.json\"))\n\n# Validate the data definition using the /validate endpoint\nvalidation_response = requests.post(\n validate_url,\n headers=headers,\n json=body\n)\n\n# look at the response\nvalidation_response.raise_for_status()\n\n# If the validation is successful, publish the dataset using /publish endpoint\npublish_response = requests.post(\n publish_url,\n headers=headers,\n json=body\n)\n\nif publish_response.ok:\n print(\"Success\")\n\n\nCheck the status of the execution\n# the id of the execution\n# should be available in the response of workflow execution request\nexecution_id = \"xxx\"\n# url for execution status\nexecution_status_url = f\"{workflow_execution_url}/{execution_id}\"\n# make the request\nresponse = requests.get(\n execution_status_url,\n headers=headers,\n)\nif response.ok:\n print(response.json())", "crumbs": [ "Contributing", "Dataset Ingestion", diff --git a/pr-preview/pr-125/sitemap.xml b/pr-preview/pr-125/sitemap.xml index 399a8fdf..353c9fb0 100644 --- a/pr-preview/pr-125/sitemap.xml +++ b/pr-preview/pr-125/sitemap.xml @@ -2,178 +2,178 @@ https://nasa-impact.github.io/veda-docs/contributing/docs-and-notebooks.html - 2024-02-22T16:43:38.417Z + 2024-02-22T23:33:20.487Z https://nasa-impact.github.io/veda-docs/contributing/dashboard-configuration/discovery-configuration.html - 2024-02-22T16:43:38.417Z + 2024-02-22T23:33:20.487Z https://nasa-impact.github.io/veda-docs/contributing/dataset-ingestion/file-preparation.html - 2024-02-22T16:43:38.417Z + 2024-02-22T23:33:20.487Z https://nasa-impact.github.io/veda-docs/contributing/dataset-ingestion/catalog-ingestion.html - 2024-02-22T16:43:38.417Z + 2024-02-22T23:33:20.487Z https://nasa-impact.github.io/veda-docs/contributing/dataset-ingestion/index.html - 2024-02-22T16:43:38.417Z + 2024-02-22T23:33:20.487Z https://nasa-impact.github.io/veda-docs/notebooks/veda-operations/stac-item-creation.html - 2024-02-22T16:43:39.109Z + 2024-02-22T23:33:21.175Z https://nasa-impact.github.io/veda-docs/notebooks/veda-operations/stac-collection-creation.html - 2024-02-22T16:43:39.081Z + 2024-02-22T23:33:21.147Z https://nasa-impact.github.io/veda-docs/notebooks/datasets/air-quality-covid.html - 2024-02-22T16:43:38.417Z + 2024-02-22T23:33:20.487Z https://nasa-impact.github.io/veda-docs/notebooks/datasets/volcano-so2-monitoring.html - 2024-02-22T16:43:38.449Z + 2024-02-22T23:33:20.515Z https://nasa-impact.github.io/veda-docs/notebooks/quickstarts/downsample-zarr.html - 2024-02-22T16:43:38.485Z + 2024-02-22T23:33:20.551Z https://nasa-impact.github.io/veda-docs/notebooks/quickstarts/open-and-plot.html - 2024-02-22T16:43:38.685Z + 2024-02-22T23:33:20.751Z https://nasa-impact.github.io/veda-docs/notebooks/quickstarts/visualize-multiple-times.html - 2024-02-22T16:43:38.721Z + 2024-02-22T23:33:20.791Z https://nasa-impact.github.io/veda-docs/notebooks/quickstarts/timeseries-stac-api.html - 2024-02-22T16:43:38.689Z + 2024-02-22T23:33:20.755Z https://nasa-impact.github.io/veda-docs/notebooks/quickstarts/hls-visualization.html - 2024-02-22T16:43:38.485Z + 2024-02-22T23:33:20.555Z https://nasa-impact.github.io/veda-docs/notebooks/quickstarts/no2-map-plot.html - 2024-02-22T16:43:38.649Z + 2024-02-22T23:33:20.719Z https://nasa-impact.github.io/veda-docs/notebooks/templates/template-using-the-raster-api.html - 2024-02-22T16:43:38.733Z + 2024-02-22T23:33:20.799Z https://nasa-impact.github.io/veda-docs/notebooks/tutorials/mapping-fires.html - 2024-02-22T16:43:39.077Z + 2024-02-22T23:33:21.143Z https://nasa-impact.github.io/veda-docs/notebooks/tutorials/zonal-statistics-validation.html - 2024-02-22T16:43:39.081Z + 2024-02-22T23:33:21.147Z https://nasa-impact.github.io/veda-docs/notebooks/tutorials/gif-generation.html - 2024-02-22T16:43:38.733Z + 2024-02-22T23:33:20.799Z https://nasa-impact.github.io/veda-docs/services/data-store.html - 2024-02-22T16:43:39.109Z + 2024-02-22T23:33:21.175Z https://nasa-impact.github.io/veda-docs/services/dashboard.html - 2024-02-22T16:43:39.109Z + 2024-02-22T23:33:21.175Z https://nasa-impact.github.io/veda-docs/external-resources.html - 2024-02-22T16:43:38.417Z + 2024-02-22T23:33:20.487Z https://nasa-impact.github.io/veda-docs/index.html - 2024-02-22T16:43:38.417Z + 2024-02-22T23:33:20.487Z https://nasa-impact.github.io/veda-docs/services/index.html - 2024-02-22T16:43:39.109Z + 2024-02-22T23:33:21.175Z https://nasa-impact.github.io/veda-docs/services/apis.html - 2024-02-22T16:43:39.109Z + 2024-02-22T23:33:21.175Z https://nasa-impact.github.io/veda-docs/services/jupyterhub.html - 2024-02-22T16:43:39.109Z + 2024-02-22T23:33:21.175Z https://nasa-impact.github.io/veda-docs/notebooks/tutorials/netcdf-to-cog-cmip6.html - 2024-02-22T16:43:39.077Z + 2024-02-22T23:33:21.143Z https://nasa-impact.github.io/veda-docs/notebooks/tutorials/stac_ipyleaflet.html - 2024-02-22T16:43:39.077Z + 2024-02-22T23:33:21.143Z https://nasa-impact.github.io/veda-docs/notebooks/templates/template-accessing-the-data-directly.html - 2024-02-22T16:43:38.733Z + 2024-02-22T23:33:20.799Z https://nasa-impact.github.io/veda-docs/notebooks/index.html - 2024-02-22T16:43:38.449Z + 2024-02-22T23:33:20.515Z https://nasa-impact.github.io/veda-docs/notebooks/quickstarts/list-collections.html - 2024-02-22T16:43:38.649Z + 2024-02-22T23:33:20.719Z https://nasa-impact.github.io/veda-docs/notebooks/quickstarts/download-assets.html - 2024-02-22T16:43:38.449Z + 2024-02-22T23:33:20.515Z https://nasa-impact.github.io/veda-docs/notebooks/quickstarts/visualize-zarr.html - 2024-02-22T16:43:38.733Z + 2024-02-22T23:33:20.799Z https://nasa-impact.github.io/veda-docs/notebooks/quickstarts/timeseries-rioxarray-stackstac.html - 2024-02-22T16:43:38.685Z + 2024-02-22T23:33:20.755Z https://nasa-impact.github.io/veda-docs/notebooks/quickstarts/intake.html - 2024-02-22T16:43:38.649Z + 2024-02-22T23:33:20.719Z https://nasa-impact.github.io/veda-docs/notebooks/datasets/nceo-biomass-statistics.html - 2024-02-22T16:43:38.445Z + 2024-02-22T23:33:20.515Z https://nasa-impact.github.io/veda-docs/notebooks/datasets/ocean-npp-timeseries-analysis.html - 2024-02-22T16:43:38.449Z + 2024-02-22T23:33:20.515Z https://nasa-impact.github.io/veda-docs/notebooks/veda-operations/generate-cmip6-kerchunk-historical.html - 2024-02-22T16:43:39.081Z + 2024-02-22T23:33:21.147Z https://nasa-impact.github.io/veda-docs/notebooks/veda-operations/publish-cmip6-kerchunk-stac.html - 2024-02-22T16:43:39.081Z + 2024-02-22T23:33:21.147Z https://nasa-impact.github.io/veda-docs/contributing/index.html - 2024-02-22T16:43:38.417Z + 2024-02-22T23:33:20.487Z https://nasa-impact.github.io/veda-docs/contributing/dataset-ingestion/stac-item-conventions.html - 2024-02-22T16:43:38.417Z + 2024-02-22T23:33:20.487Z https://nasa-impact.github.io/veda-docs/contributing/dataset-ingestion/stac-collection-conventions.html - 2024-02-22T16:43:38.417Z + 2024-02-22T23:33:20.487Z https://nasa-impact.github.io/veda-docs/contributing/dashboard-configuration/index.html - 2024-02-22T16:43:38.417Z + 2024-02-22T23:33:20.487Z https://nasa-impact.github.io/veda-docs/contributing/dashboard-configuration/dataset-configuration.html - 2024-02-22T16:43:38.417Z + 2024-02-22T23:33:20.487Z