查看torch是否可用GPU
import torch# setting device on GPU if available, else CPUdevice = torch.device('cuda' if torch.cuda.is_available() else 'cpu')print('Using device:', device)print()#Additional Info when using cudaif device.type == 'cuda': print(torch.cuda.get_device_name(0)) print('Memory Usage:') print('Allocated:', round(torch.cuda.memory_allocated(0)/1024**3,1), 'GB') print('Cached: ', round(torch.cuda.memory_reserved(0)/1024**3,1), 'GB')
查看tensorflow是否可用GPU
import tensorflow as tfprint(tf.test.is_gpu_available())print(tf.test.gpu_device_name())
from tensorflow.python.client import device_lib # 列出所有的本地机器设备local_device_protos = device_lib.list_local_devices() # 只打印GPU设备[print(x) for x in local_device_protos if x.device_type == 'GPU']