[TPU][Bugfix] Use XLA rank for persistent cache path (#8137)

This commit is contained in:
Woosuk Kwon
2024-09-03 18:35:33 -07:00
committed by GitHub
parent d4db9f53c8
commit 61f4a93d14
2 changed files with 3 additions and 2 deletions

View File

@@ -59,7 +59,7 @@ First, install the dependencies:
$ export DATE="20240828"
$ export TORCH_VERSION="2.5.0"
$ pip install https://storage.googleapis.com/pytorch-xla-releases/wheels/tpuvm/torch-${TORCH_VERSION}.dev${DATE}-cp310-cp310-linux_x86_64.whl
$ pip3 install https://storage.googleapis.com/pytorch-xla-releases/wheels/tpuvm/torch_xla-${TORCH_VERSION}.dev${DATE}-cp310-cp310-linux_x86_64.whl
$ pip install https://storage.googleapis.com/pytorch-xla-releases/wheels/tpuvm/torch_xla-${TORCH_VERSION}.dev${DATE}-cp310-cp310-linux_x86_64.whl
$ # Install JAX and Pallas.
$ pip install torch_xla[tpu] -f https://storage.googleapis.com/libtpu-releases/index.html