[TPU] Avoid initializing TPU runtime in is_tpu (#7763)

This commit is contained in:
Woosuk Kwon 2024-08-21 21:31:49 -07:00 committed by GitHub
parent aae74ef95c
commit eeee1c3b1a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 4 additions and 2 deletions

View File

@ -8,8 +8,10 @@ current_platform: Platform
is_tpu = False
try:
import torch_xla.core.xla_model as xm
xm.xla_device(devkind="TPU")
# While it's technically possible to install libtpu on a non-TPU machine,
# this is a very uncommon scenario. Therefore, we assume that libtpu is
# installed if and only if the machine has TPUs.
import libtpu # noqa: F401
is_tpu = True
except Exception:
pass