Skip to content

Commit

Permalink
mkl abi=1
Browse files Browse the repository at this point in the history
  • Loading branch information
mengfei25 committed Jul 26, 2024
1 parent 813403b commit 120cdec
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 7 deletions.
6 changes: 5 additions & 1 deletion .github/workflows/_linux_ut.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,10 @@ on:
jobs:
Torch-XPU-UT-Tests:
runs-on: ${{ inputs.runner }}
timeout-minutes: 900
timeout-minutes: 9000
env:
USE_STATIC_MKL: 1
ZE_AFFINITY_MASK: 0
steps:
- name: Checkout torch-xpu-ops
uses: actions/checkout@v4
Expand Down Expand Up @@ -67,6 +70,7 @@ jobs:
cd ../pytorch
pip install -r requirements.txt
export USE_XPU=1
export _GLIBCXX_USE_CXX11_ABI=1
source /opt/intel/oneapi/pytorch-gpu-dev-0.5/oneapi-vars.sh
export CMAKE_PREFIX_PATH=${CONDA_PREFIX:-"$(dirname $(which conda))/../"}
WERROR=1 python setup.py bdist_wheel
Expand Down
14 changes: 8 additions & 6 deletions .github/workflows/nightly_ondemand.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ on:
pytorch:
required: false
type: string
default: 'main'
default: 'gh/guangyey/48/orig'
description: Pytorch branch/commit
keep_torch_xpu_ops:
required: false
Expand All @@ -19,7 +19,7 @@ on:
ut:
required: false
type: string
default: 'torch_xpu'
default: 'op_example,op_extended,op_ut,torch_xpu'
description: UT scope. `op_example,op_extended,op_ut,torch_xpu`. Delimiter is comma
triton:
required: false
Expand Down Expand Up @@ -72,18 +72,19 @@ jobs:
ut: ${{ github.event_name == 'schedule' && 'op_example,op_extended,op_ut,torch_xpu' || inputs.ut }}
pytorch: ${{ github.event_name == 'schedule' && 'main' || inputs.pytorch }}
python: ${{ github.event_name == 'schedule' && '3.10' || inputs.python }}
runner: linux.idc.xpu
runner: e2e_internal

Linux-Nightly-Ondemand-E2E-Tests:
runs-on: pvc_e2e
runs-on: e2e_internal
# Don't run on forked repos
if: github.repository_owner == 'intel'
timeout-minutes: 900
timeout-minutes: 9000
env:
pytorch: ${{ github.event_name == 'schedule' && 'main' || inputs.pytorch }}
keep_torch_xpu_ops: ${{ github.event_name == 'schedule' && 'false' || inputs.keep_torch_xpu_ops }}
ut: ${{ github.event_name == 'schedule' && 'op_example,op_extended,op_ut,torch_xpu' || inputs.ut }}
python: ${{ github.event_name == 'schedule' && '3.10' || inputs.python }}
USE_STATIC_MKL: 1
outputs:
TORCH_BRANCH_ID: ${{ steps.pinned.outputs.TORCH_BRANCH_ID }}
TORCH_COMMIT_ID: ${{ steps.pinned.outputs.TORCH_COMMIT_ID }}
Expand Down Expand Up @@ -165,6 +166,7 @@ jobs:
cd ../pytorch
pip install -r requirements.txt
export USE_XPU=1
export _GLIBCXX_USE_CXX11_ABI=1
source /opt/intel/oneapi/pytorch-gpu-dev-0.5/oneapi-vars.sh
export CMAKE_PREFIX_PATH=${CONDA_PREFIX:-"$(dirname $(which conda))/../"}
python setup.py bdist_wheel
Expand Down Expand Up @@ -245,7 +247,7 @@ jobs:

Tests-Failure-And-Report:
if: always()
runs-on: pvc_e2e
runs-on: e2e_internal
permissions:
issues: write
env:
Expand Down

0 comments on commit 120cdec

Please sign in to comment.