-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtest.py
40 lines (34 loc) · 1.22 KB
/
test.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
import tensorflow as tf
import torch
import onnxruntime
def check_tensorflow_gpu():
if tf.test.is_gpu_available():
print("TensorFlow GPU support is available.")
physical_devices = tf.config.list_physical_devices('GPU')
for device in physical_devices:
print(f"GPU Name: {device.name}")
else:
print("TensorFlow GPU not found. Using CPU.")
def check_pytorch_gpu():
if torch.cuda.is_available():
print("PyTorch GPU support is available.")
gpu_name = torch.cuda.get_device_name(0)
print(f"GPU Name: {gpu_name}")
else:
print("PyTorch GPU not found. Using CPU.")
def check_onnx_gpu():
device = onnxruntime.get_device()
print(device)
if device == 'GPU':
print("ONNX GPU support is available.")
gpu_name = onnxruntime.get_available_providers()
print(f"Available Execution Providers: {gpu_name}")
else:
print("ONNX GPU not found. Using CPU.")
if __name__ == "__main__":
print("Checking TensorFlow GPU support:")
check_tensorflow_gpu()
print("\nChecking PyTorch GPU support:")
check_pytorch_gpu()
print("\nChecking ONNX GPU support:")
check_onnx_gpu()