From 95ead2a055afe0387b18eb7af1c1b973304b7553 Mon Sep 17 00:00:00 2001 From: Logan Adams <114770087+loadams@users.noreply.github.com> Date: Thu, 5 Dec 2024 12:16:46 -0800 Subject: [PATCH] Pin transformers version in cpu-torch-latest due to multiprocessing error. (#6823) This is a copy of https://github.com/microsoft/DeepSpeed/pull/6820 for the cpu-torch-latest tests. This PR will revert/fix these: https://github.com/microsoft/DeepSpeed/pull/6822 --- .github/workflows/cpu-torch-latest.yml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.github/workflows/cpu-torch-latest.yml b/.github/workflows/cpu-torch-latest.yml index 0125fa50bc14..51bc60c2c2ae 100644 --- a/.github/workflows/cpu-torch-latest.yml +++ b/.github/workflows/cpu-torch-latest.yml @@ -37,6 +37,15 @@ jobs: python -c "import torch; print('torch:', torch.__version__, torch)" python -c "import torch; print('CUDA available:', torch.cuda.is_available())" + - name: Install transformers + run: | + git clone https://github.com/huggingface/transformers + cd transformers + # if needed switch to the last known good SHA until transformers@master is fixed + git checkout 6c3f168b3 + git rev-parse --short HEAD + pip install . + - name: Install deepspeed run: | pip install .[dev,autotuning]