apiVersion: v1
kind: Pod
metadata:
  name: pytorch-gpu-pod-example
spec:
  restartPolicy: Never
  containers:
  - name: pytorch-gpu-container
    image: rocm/pytorch:latest
    command:
    - python3
    - "-c"
    - |
      import torch
      if torch.cuda.is_available():
        print(f"GPU is available. Device count: {torch.cuda.device_count()}")
        print(f"Device name: {torch.cuda.get_device_name(0)}")
        x = torch.ones(3, 3, device='cuda')
        y = torch.ones(3, 3, device='cuda') * 2
        z = x + y
        print(f"Result of tensor addition on GPU: {z}")
      else:
        print("No GPU available.")
    resources:
      limits:
        amd.com/gpu: 1  # Request 1 AMD GPU