Created
June 8, 2025 07:17
-
-
Save janaSunrise/6b47cbe9370f0aa16102c3b65f73c05b to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/bin/bash | |
| set -e | |
| # colors | |
| RED='\033[0;31m' | |
| GREEN='\033[0;32m' | |
| YELLOW='\033[1;33m' | |
| BLUE='\033[0;34m' | |
| NC='\033[0m' # No Color | |
| # logging | |
| log() { | |
| echo -e "${BLUE}[$(date +'%Y-%m-%d %H:%M:%S')] $1${NC}" | |
| } | |
| error() { | |
| echo -e "${RED}[ERROR] $1${NC}" | |
| } | |
| success() { | |
| echo -e "${GREEN}[SUCCESS] $1${NC}" | |
| } | |
| warning() { | |
| echo -e "${YELLOW}[WARNING] $1${NC}" | |
| } | |
| check_gcp_environment() { | |
| if curl -s -H "Metadata-Flavor: Google" http://metadata.google.internal/computeMetadata/v1/instance/ > /dev/null 2>&1; then | |
| success "Running on Google Cloud Platform" | |
| return 0 | |
| else | |
| warning "Not running on Google Cloud Platform" | |
| return 1 | |
| fi | |
| } | |
| check_tpu_availability() { | |
| log "Checking for TPUs" | |
| if python3 -c "import torch_xla; import torch_xla.core.xla_model as xm; print(f'TPU devices: {xm.get_xla_supported_devices()}'); print(f'Device count: {torch_xla.runtime.world_size()}')" 2>/dev/null; then | |
| success "TPU detected" | |
| else | |
| warning "No TPU found or Pytorch XLA not properly configured" | |
| fi | |
| } | |
| install_pytorch_xla() { | |
| log "Installing Pytorch with TPU support" | |
| pip3 install --upgrade torch_xla[tpu] -f https://storage.googleapis.com/libtpu-releases/index.html > /dev/null 2>&1 | |
| success "Pytorch with TPU support installed" | |
| } | |
| setup_environment() { | |
| log "Setting up TPU environmental variables" | |
| cat >> ~/.bashrc << 'EOF' | |
| # Pytorch TPU Environment Variables | |
| export TPU_NAME="local" | |
| export XLA_USE_BF16=1 | |
| export PYTHONPATH="${PYTHONPATH}:${HOME}" | |
| EOF | |
| success "Environment variables configured" | |
| } | |
| create_aliases() { | |
| log "Setting up aliases" | |
| cat >> ~/.bashrc << 'EOF' | |
| alias pytorch-tpu-info="python3 -c 'import torch_xla.core.xla_model as xm; print(f\"XLA devices: {xm.get_xla_supported_devices()}\"); print(f\"World size: {torch_xla.runtime.world_size()}\")'" | |
| EOF | |
| success "Aliases created" | |
| } | |
| main() { | |
| log "Starting Pytorch TPU setup" | |
| # Check if we're on GCP | |
| if check_gcp_environment; then | |
| log "Proceeding with GCP TPU setup" | |
| else | |
| warning "Not on GCP - some TPU features may not work" | |
| read -p "Continue anyway? (y/n): " -n 1 -r | |
| echo | |
| if [[ ! $REPLY =~ ^[Yy]$ ]]; then | |
| exit 1 | |
| fi | |
| fi | |
| install_pytorch_xla | |
| setup_environment | |
| create_aliases | |
| log "Checking TPU availability" | |
| check_tpu_availability | |
| success "TPU setup completed" | |
| echo | |
| log "Next steps:" | |
| echo "1. Restart your shell or run: source ~/.bashrc" | |
| echo "2. Check TPU info: pytorch-tpu-info" | |
| } | |
| main "$@" |
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| try: | |
| import torch_xla | |
| import torch_xla.core.xla_model as xm | |
| print('`torch_xla` imported successfully') | |
| try: | |
| device = xm.xla_device() | |
| print(f'TPU device: {device}') | |
| print(f'TPU device type: {xm.xla_device_hw(device)}') | |
| print(f'World size: {torch_xla.runtime.world_size()}') | |
| except Exception as e: | |
| print(f'TPU device not available: {e}') | |
| print('This is normal if not running on TPU hardware') | |
| except ImportError as e: | |
| print(f'Failed to import torch_xla: {e}') |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment