-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy path01d_test_tf2_GPU.py
More file actions
32 lines (22 loc) · 1.19 KB
/
01d_test_tf2_GPU.py
File metadata and controls
32 lines (22 loc) · 1.19 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
import tensorflow as tf
# If you want to use tensorflow with GPU computations you have to install compatible version of CUDA and cuDNN
# for tensorflow 2.11 ytou need CUDA 11.2 and cuDNN 8.1
# https://www.tensorflow.org/install/source#gpu
# HOW TO INSTALL CUDA IN WINDOWS 10
# https://towardsdatascience.com/how-to-finally-install-tensorflow-gpu-on-windows-10-63527910f255
if __name__ == '__main__':
print(tf.test.is_gpu_available())
#is_cuda_gpu_available = tf.test.is_gpu_available(cuda_only=True)
#is_cuda_gpu_min_3 = tf.test.is_gpu_available(True, (3, 0))
#tf.config.list_physical_devices('GPU')
print("Num GPUs Available: ", len(tf.config.list_physical_devices('GPU')))
# Create some tensors
a = tf.constant([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]])
b = tf.constant([[1.0, 2.0], [3.0, 4.0], [5.0, 6.0]])
c = tf.matmul(a, b)
print(c)
#import tensorflow as tf
#from tensorflow.python.client import device_lib
#print("Num GPUs Available: ", len(tf.config.list_physical_devices('GPU')))
#device_lib.list_local_devices()
# docker run -it --rm --gpus all peterpirogtf/rllib210:gpu python -c "import tensorflow as tf;print(tf.test.is_gpu_available())"