Exemplo n.º 1
0
def memoize(fn, paths):
    cwd = os.getcwd()

    def change_to_target_dir():
        for dir in paths[:-1]:
            try:
                os.mkdir(dir)
            except OSError as e:
                pass
            os.chdir(dir)

    change_to_target_dir()
    filename = paths[-1]
    if os.path.exists(filename):
        data = hdf5.read(filename)
        os.chdir(cwd)
        return data

    os.chdir(cwd)
    data = fn()
    change_to_target_dir()
    tmp = '%s.pid.%d.tmp' % (filename, os.getpid())
    hdf5.write(tmp, data)
    os.rename(tmp, filename)
    os.chdir(cwd)

    return jsdict(data)
Exemplo n.º 2
0
def process_data_sub_job(settings, filename_in_fmt, filename_out_fmt, id,
                         num_jobs):

    pid = os.getpid()
    reader = mat_reader(target, settings.data_dir)

    num_processed = 0
    for i in range(id + 1, sys.maxsize, num_jobs):
        out_index = i - 1
        filename_in = filename_in_fmt % i
        filename_out = filename_out_fmt % out_index if filename_out_fmt is not None else None
        filename_out_temp = '%s.pid.%d.tmp' % (
            filename_out, pid) if filename_out is not None else None

        if filename_out is not None and os.path.exists(filename_out):
            num_processed += 1
            continue

        if not reader.exists(filename_in):
            if i == id + 1:
                print('Could not find file', reader.filename(filename_in))
                return 0
            break

        print('Runner %d processing %s' % (id, reader.filename(filename_in)))

        segment = reader.read(filename_in)
        data = process_data(segment)
        hdf5.write(filename_out_temp, data)

        os.rename(filename_out_temp, filename_out)

        num_processed += 1

    return num_processed
def process_data_sub_job(settings, filename_in_fmt, filename_out_fmt, id, num_jobs):

    pid = os.getpid()
    reader = mat_reader(target, settings.data_dir)

    num_processed = 0
    for i in xrange(id + 1, sys.maxint, num_jobs):
        out_index = i - 1
        filename_in = filename_in_fmt % i
        filename_out = filename_out_fmt % out_index if filename_out_fmt is not None else None
        filename_out_temp = '%s.pid.%d.tmp' % (filename_out, pid) if filename_out is not None else None

        if filename_out is not None and os.path.exists(filename_out):
            num_processed += 1
            continue

        if not reader.exists(filename_in):
            if i == id + 1:
                print 'Could not find file', reader.filename(filename_in)
                return 0
            break

        print 'Runner %d processing %s' % (id, reader.filename(filename_in))

        segment = reader.read(filename_in)
        data = process_data(segment)
        hdf5.write(filename_out_temp, data)

        os.rename(filename_out_temp, filename_out)

        num_processed += 1

    return num_processed
Exemplo n.º 4
0
            except OSError, e:
                pass
            os.chdir(dir)

    change_to_target_dir()
    filename = paths[-1]
    if os.path.exists(filename):
        data = hdf5.read(filename)
        os.chdir(cwd)
        return data

    os.chdir(cwd)
    data = fn()
    change_to_target_dir()
    tmp = '%s.pid.%d.tmp' % (filename, os.getpid())
    hdf5.write(tmp, data)
    os.rename(tmp, filename)
    os.chdir(cwd)

    return jsdict(data)


# Fast process-if-not-yet-processed method for training data
def check_training_data_loaded(settings, target, pipeline, quiet=False):
    if not load_pipeline_data(
            settings, target, 'preictal', pipeline, check_only=True,
            quiet=quiet):
        load_pipeline_data(settings,
                           target,
                           'preictal',
                           pipeline,
Exemplo n.º 5
0
            except OSError, e:
                pass
            os.chdir(dir)

    change_to_target_dir()
    filename = paths[-1]
    if os.path.exists(filename):
        data = hdf5.read(filename)
        os.chdir(cwd)
        return data

    os.chdir(cwd)
    data = fn()
    change_to_target_dir()
    tmp = '%s.pid.%d.tmp' % (filename, os.getpid())
    hdf5.write(tmp, data)
    os.rename(tmp, filename)
    os.chdir(cwd)

    return jsdict(data)


# Fast process-if-not-yet-processed method for training data
def check_training_data_loaded(settings, target, pipeline, quiet=False):
    if not load_pipeline_data(settings, target, 'preictal', pipeline, check_only=True, quiet=quiet):
        load_pipeline_data(settings, target, 'preictal', pipeline, check_only=False, quiet=quiet)
    if not load_pipeline_data(settings, target, 'interictal', pipeline, check_only=True, quiet=quiet):
        load_pipeline_data(settings, target, 'interictal', pipeline, check_only=False, quiet=quiet)


# Fast process-if-not-yet-processed method for test data