@@ -17,14 +17,14 @@ def _manage_memory_units(data_in_bytes, units):
1717
1818def  get_object_size (obj , units = "Mb" ):
1919    """Calculate the size of an object. 
20-      
20+ 
2121    Args: 
2222        obj (obj or str or array): Object. 
2323        units (str): Units [bytes, Kb, Mb, Gb] 
24-      
24+ 
2525    Returns: 
2626        float: Size of the object. 
27-      
27+ 
2828    Examples: 
2929        >>> get_object_size(7, "bytes") 
3030        28 
@@ -41,30 +41,31 @@ def get_pandas_df_size(obj, units="Mb"):
4141    Args: 
4242        obj (pd.DataFrame): Dataframe. 
4343        units (str): Units [bytes, Kb, Mb, Gb] 
44-      
44+ 
4545    Returns: 
4646        float: Size of the object. 
47-      
47+ 
4848    Examples: 
49-         >>> df = pd.DataFrame({"a":[1]*100, "b":[0.5]*100})  
50-         >>> get_pandas_df_size(df, "Kb") 
51-         1.6875 
49+         >>> import pandas as pd 
50+         >>> df = pd.DataFrame({"a":[1]*100, "b":[0.5]*100}) 
51+         >>> get_pandas_df_size(df, "Kb")  # doctest: +ELLIPSIS 
52+         1.6... 
5253    """ 
53-     obj_bytes  =  obj .memory_usage (deep = True ).sum ()
54+     obj_bytes  =  obj .memory_usage (deep = False ).sum ()
5455    return  _manage_memory_units (obj_bytes , units )
5556
5657
5758def  get_ram_memory (units = "Mb" ):
5859    """Get the RAM memory of the current machine. 
59-      
60+ 
6061    Args: 
6162        units (str): Units [bytes, Kb, Mb, Gb] 
62-      
63+ 
6364    Returns: 
6465        float: Memory size. 
65-      
66+ 
6667    Examples: 
67-         >>> num = get_ram_memory("Gb")   
68+         >>> num = get_ram_memory("Gb") 
6869        >>> num >= 2 
6970        True 
7071    """ 
@@ -74,82 +75,88 @@ def get_ram_memory(units="Mb"):
7475
7576def  get_total_gpu_memory (units = "Mb" ):
7677    """Get the memory of the GPUs in the system 
77-      
78+ 
7879    Returns: 
7980        list: List of strings with the GPU memory in Mb 
80-      
81+ 
8182    Examples: 
8283        >>> get_total_gpu_memory() #doctest: +SKIP 
8384        [16280.875] 
8485    """ 
8586    try :
8687        import  numba 
88+ 
8789        memory_list  =  []
8890        for  gpu  in  numba .cuda .gpus :
8991            with  gpu :
9092                meminfo  =  numba .cuda .current_context ().get_memory_info ()
9193                memory_list .append (_manage_memory_units (meminfo [1 ], units ))
9294        return  memory_list 
93-     except  Exception : # numba.cuda.cudadrv.error.CudaSupportError:
95+     except  Exception :   #  numba.cuda.cudadrv.error.CudaSupportError:
9496        return  []
9597
9698
9799def  get_free_gpu_memory (units = "Mb" ):
98100    """Get the memory of the GPUs in the system 
99-      
101+ 
100102    Returns: 
101103        list: List of strings with the GPU memory in Mb 
102-      
104+ 
103105    Examples: 
104106        >>> get_free_gpu_memory() #doctest: +SKIP 
105107        [15987.8125] 
106108
107109    """ 
108110    try :
109111        import  numba 
112+ 
110113        memory_list  =  []
111114        for  gpu  in  numba .cuda .gpus :
112115            with  gpu :
113116                meminfo  =  numba .cuda .current_context ().get_memory_info ()
114117                memory_list .append (_manage_memory_units (meminfo [0 ], units ))
115118        return  memory_list 
116-     except  Exception : # numba.cuda.cudadrv.error.CudaSupportError: 
119+     except  Exception :   # numba.cuda.cudadrv.error.CudaSupportError: 
117120        return  []
118121
119122
120123def  clear_memory_all_gpus ():
121124    """Clear memory of all GPUs. 
122-      
125+ 
123126    Examples: 
124127        >>> clear_memory_all_gpus() #doctest: +SKIP 
125128        No CUDA available 
126129
127130    """ 
128131    try :
129132        import  numba 
133+ 
130134        for  gpu  in  numba .cuda .gpus :
131135            with  gpu :
132136                numba .cuda .current_context ().deallocations .clear ()
133-     except  Exception : # numba.cuda.cudadrv.errorCudaSupportError: 
137+     except  Exception :   # numba.cuda.cudadrv.errorCudaSupportError: 
134138        print ("No CUDA available" )
135139
136140
137141def  clear_memory_gpu_id (id ):
138142    """Clear memory of all GPUs. 
139-      
143+ 
140144    Args: 
141145        id (int): GPU id. 
142-      
146+ 
143147    Examples: 
144148        >>> clear_memory_gpu_id(0) #doctest: +SKIP 
145149        No CUDA available 
146150    """ 
147151    try :
148152        import  numba 
153+ 
149154        for  gpu  in  numba .cuda .gpus :
150155            numba .cuda .select_device (gpu .id )
151156            numba .cuda .close ()
152-     except  Exception : # numba.cuda.cudadrv.error.CudaSupportError: 
157+     except  Exception :   # numba.cuda.cudadrv.error.CudaSupportError: 
153158        print ("No CUDA available" )
154159    except  IndexError :
155-         raise  ValueError ("GPU id should be between 0 and {}" .format (len (numba .cuda .gpus ) -  1 ))
160+         raise  ValueError (
161+             "GPU id should be between 0 and {}" .format (len (numba .cuda .gpus ) -  1 )
162+         )
0 commit comments