Created
February 1, 2019 16:46
-
-
Save paulaksm/41f4dfe2be82f64de156c84de3887790 to your computer and use it in GitHub Desktop.
GPU memory available on Google Colab
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
!ln -sf /opt/bin/nvidia-smi /usr/bin/nvidia-smi | |
!pip install gputil | |
import psutil | |
import humanize | |
import os | |
import GPUtil as GPU | |
GPUs = GPU.getGPUs() | |
# XXX: only one GPU on Colab and isn’t guaranteed | |
gpu = GPUs[0] | |
def printm(): | |
process = psutil.Process(os.getpid()) | |
print("Gen RAM Free: " + humanize.naturalsize( psutil.virtual_memory().available ), " | Proc size: " + humanize.naturalsize( process.memory_info().rss)) | |
print("GPU RAM Free: {0:.0f}MB | Used: {1:.0f}MB | Util {2:3.0f}% | Total {3:.0f}MB".format(gpu.memoryFree, gpu.memoryUsed, gpu.memoryUtil*100, gpu.memoryTotal)) | |
printm() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment