Example #1
0
def gpu_mem_free():
    """
    .. todo::

        WRITEME
    """
    global cuda
    if cuda is None:
        from theano.sandbox import cuda
    return cuda.mem_info()[0]/1024./1024
Example #2
0
def gpu_mem_free():
    """
    .. todo::

        WRITEME
    """
    global cuda
    if cuda is None:
        from theano.sandbox import cuda
    return cuda.mem_info()[0] / 1024. / 1024
Example #3
0
def gpu_mem_free():
    """
    Returns
    -------
    megs_free : float
        Number of megabytes of memory free on the GPU used by Theano
    """
    global cuda
    if cuda is None:
        from theano.sandbox import cuda
    return cuda.mem_info()[0]/1024./1024
Example #4
0
def get_gpu_fit_size(X, already_alloc_mem=0):
    d_types = ['train', 'valid', 'test']
    gpu_size = dict() 
    for d_type in d_types:
        gpu_size[d_type] = X[d_type].shape[0] 
    if theano.theano.config.device.startswith('gpu'):  
        mem_requirements = [X[d_type].nbytes for d_type in d_types]
        total_mem_required = sum(mem_requirements)    
        free_mem, total_size = cuda.mem_info()
        free_mem += already_alloc_mem
        free_mem *= 0.9  # to be on the safe side, and to account for loading annotations which I am not counting for now
        if free_mem < total_mem_required:
            red_ratio = float(free_mem) / total_mem_required
            for d_type in d_types:
                gpu_size[d_type] = int(X[d_type].shape[0] * red_ratio)
            # gpu_size = {d_type:int(X[d_type].shape[0] * red_ratio) for d_type in d_types}
    return gpu_size
Example #5
0
def gpu_mem_free():
    global cuda
    if cuda is None:
        from theano.sandbox import cuda
    return cuda.mem_info()[0]/1024./1024
Example #6
0
#!/usr/bin/python -i
"""
This does manage to store data on the GPU.
"""

import cPickle
import gzip
import numpy
import os
import theano
import theano.sandbox.cuda as cuda
import theano.tensor as T
import urllib

print "%.1fMB free GPU memory" % (cuda.mem_info()[0] / (2.0**20))

# Contains pickled Theano data for digit recognition
MNIST = "http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz"
"""
Gives us a local data path for the data available at the particular URL source.
"""


def data_path(source):
    _, fname = os.path.split(source)
    answer = os.path.abspath(os.path.expanduser("~/data/" + fname))
    if not os.path.isfile(answer):
        # We need to download it
        print "Downloading data from " + source
        urllib.urlretrieve(source, answer)
    return answer
Example #7
0
def gpu_mem_free():
    global cuda
    if cuda is None:
        from theano.sandbox import cuda
    return cuda.mem_info()[0]/1024./1024
Example #8
0
#!/usr/bin/python -i
"""
This does manage to store data on the GPU.
"""

import cPickle
import gzip
import numpy
import os
import theano
import theano.sandbox.cuda as cuda
import theano.tensor as T
import urllib
 
print "%.1fMB free GPU memory" % (cuda.mem_info()[0] / (2.0 ** 20))


# Contains pickled Theano data for digit recognition
MNIST = "http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz"

"""
Gives us a local data path for the data available at the particular URL source.
"""
def data_path(source):
  _, fname = os.path.split(source)
  answer = os.path.abspath(os.path.expanduser("~/data/" + fname))
  if not os.path.isfile(answer):
    # We need to download it
    print "Downloading data from " + source
    urllib.urlretrieve(source, answer)
  return answer