Skip to content

Commit

Permalink
Add oneMKL for CI and align environment setup script with bundle
Browse files Browse the repository at this point in the history
  • Loading branch information
CuiYifeng committed Jul 4, 2024
1 parent 1e838be commit 797a737
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 8 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/inductor_xpu_e2e_ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ jobs:
cd ../pytorch
pip install -r requirements.txt
export USE_XPU=1
source /opt/intel/oneapi/compiler/latest/env/vars.sh
source /opt/intel/oneapi/pytorch-gpu-dev-0.5/oneapi-vars.sh
export CMAKE_PREFIX_PATH=${CONDA_PREFIX:-"$(dirname $(which conda))/../"}
python setup.py bdist_wheel
pip install --force-reinstall dist/*.whl
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/inductor_xpu_e2e_nightly.yml
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ jobs:
echo "TRANSFORMERS_VERSION=$(<.ci/docker/ci_commit_pins/huggingface.txt)" |tee -a "${GITHUB_OUTPUT}" >> "${GITHUB_ENV}"
echo "TIMM_COMMIT_ID=$(<.ci/docker/ci_commit_pins/timm.txt)" |tee -a "${GITHUB_OUTPUT}" >> "${GITHUB_ENV}"
echo "MODEL_ONLY_NAME=${{ inputs.model }}" |tee -a "${GITHUB_OUTPUT}" >> "${GITHUB_ENV}"
source /opt/intel/oneapi/compiler/latest/env/vars.sh
source /opt/intel/oneapi/pytorch-gpu-dev-0.5/oneapi-vars.sh
echo "DRIVER_VERSION=$(dkms status 2>&1 |grep 'intel-i915-dkms' |sed 's/.*\///;s/,.*//')" |tee -a "${GITHUB_OUTPUT}" >> "${GITHUB_ENV}"
echo "BUNDLE_VERSION=$(dpcpp --version 2>&1 |grep 'DPC++/C++' |sed 's/.*(//;s/).*//')" |tee -a "${GITHUB_OUTPUT}" >> "${GITHUB_ENV}"
. /etc/os-release
Expand All @@ -133,7 +133,7 @@ jobs:
cd ../pytorch
pip install -r requirements.txt
export USE_XPU=1
source /opt/intel/oneapi/compiler/latest/env/vars.sh
source /opt/intel/oneapi/pytorch-gpu-dev-0.5/oneapi-vars.sh
export CMAKE_PREFIX_PATH=${CONDA_PREFIX:-"$(dirname $(which conda))/../"}
python setup.py bdist_wheel
pip install --force-reinstall dist/*.whl
Expand Down
10 changes: 5 additions & 5 deletions .github/workflows/pull.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ jobs:
cd ../pytorch
pip install -r requirements.txt
export USE_XPU=1
source /opt/intel/oneapi/compiler/latest/env/vars.sh
source /opt/intel/oneapi/pytorch-gpu-dev-0.5/oneapi-vars.sh
export CMAKE_PREFIX_PATH=${CONDA_PREFIX:-"$(dirname $(which conda))/../"}
python setup.py bdist_wheel
pip install --force-reinstall dist/*.whl
Expand All @@ -58,23 +58,23 @@ jobs:
if: ${{ hashFiles('examples/') != '' }}
run: |
xpu-smi discovery
source /opt/intel/oneapi/compiler/latest/env/vars.sh
source /opt/intel/oneapi/pytorch-gpu-dev-0.5/oneapi-vars.sh
source activate xpu_op_${ZE_AFFINITY_MASK}
cd examples
pip install pytest
timeout 8000 pytest -v
- name: Run XPU OP Extended UT
if: ${{ hashFiles('test/xpu/') != '' }}
run: |
source /opt/intel/oneapi/compiler/latest/env/vars.sh
source /opt/intel/oneapi/pytorch-gpu-dev-0.5/oneapi-vars.sh
source activate xpu_op_${ZE_AFFINITY_MASK}
export PYTORCH_TEST_WITH_SLOW=1
cd ../pytorch/third_party/torch-xpu-ops/test/xpu/extended/
timeout 10000 python run_test_with_skip.py
- name: Run XPU OP UT
if: ${{ hashFiles('test/xpu/') != '' }}
run: |
source /opt/intel/oneapi/compiler/latest/env/vars.sh
source /opt/intel/oneapi/pytorch-gpu-dev-0.5/oneapi-vars.sh
source activate xpu_op_${ZE_AFFINITY_MASK}
export PYTORCH_ENABLE_XPU_FALLBACK=1
export PYTORCH_TEST_WITH_SLOW=1
Expand All @@ -87,7 +87,7 @@ jobs:
timeout 10000 python run_test_with_only.py
- name: Run Torch XPU UT
run: |
source /opt/intel/oneapi/compiler/latest/env/vars.sh
source /opt/intel/oneapi/pytorch-gpu-dev-0.5/oneapi-vars.sh
source activate xpu_op_${ZE_AFFINITY_MASK}
cd ../pytorch
TEST_REPORTS_DIR=$(pwd)/test/test-reports
Expand Down

0 comments on commit 797a737

Please sign in to comment.