Skip to content

[PECO-1803] Databricks sqlalchemy is split into this folder #40

[PECO-1803] Databricks sqlalchemy is split into this folder

[PECO-1803] Databricks sqlalchemy is split into this folder #40

Workflow file for this run

name: Integration
on:
pull_request:
types: [ opened, synchronize, reopened ]
branches: [ main, PECO-1803 ]
workflow_dispatch:
jobs:
build_and_test:
runs-on: ubuntu-latest
environment: azure-prod
env:
DATABRICKS_SERVER_HOSTNAME: ${{ secrets.DATABRICKS_SERVER_HOSTNAME }}
DATABRICKS_HTTP_PATH: ${{ secrets.DATABRICKS_HTTP_PATH }}
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }}
DATABRICKS_CATALOG: ${{ secrets.DATABRICKS_CATALOG }}
DATABRICKS_SCHEMA : ${{ secrets.DATABRICKS_SCHEMA }}
DATABRICKS_USER: ${{ secrets.DATABRICKS_USER }}
steps:
# Checkout your own repository
- name: Checkout Repository
uses: actions/checkout@v3
# Checkout the other repository
- name: Checkout Dependency Repository
uses: actions/checkout@v3
with:
repository: jprakash-db/databricks-sql-python
path: databricks_sql_python
ref : jprakash-db/PECO-1803
# Set up Python
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.9'
# Install Poetry
- name: Install Poetry
run: |
python -m pip install --upgrade pip
pip3 install poetry
python3 -m venv venv
# Install the requirements of your repository
- name: Install Dependencies
run: |
source venv/bin/activate
poetry build
pip3 install dist/*.whl
# Build the .whl file in the dependency repository
- name: Build Dependency Package
run: |
source venv/bin/activate
pip3 install databricks_sql_python/databricks_sql_connector_core/dist/*.whl
# Run pytest to execute tests in your repository
- name: Run Tests
run: |
source venv/bin/activate
pip3 list
pip3 install pytest
- name : Main Tests
run: |
source venv/bin/activate
pytest src/databricks_sqlalchemy/test_local