def task_validate_rawdata_file(pk, **kwargs):
    """validates rawdata file - that is an archive holding voltage traces from
    extracellular recordings to be a data to be analysed with spike sorting methods

    :type pk: int
    :param pk: pk of `Asset` entity

    :returns: bool -- True if file validates, False else. Processing
    log, including errors, will be written to the `Datafile` entity.
    """

    # init and checks
    valid = False
    logger = Logger.get_logger(StringIO())
    try:
        obj = Asset.objects.get(id=pk)
        assert obj.kind == "rd_file"
        df = obj.content_object
    except:
        logger.log("ERROR")
        return valid

    try:
        logger.log("looking at raw data file with pk: %s" % pk)
        rd, sr = read_hdf5_arc(obj.data.path)
        logger.log("found rd_file: %s" % obj.name)
        len_rd_sec = rd.shape[0] / sr
        logger.log("found data in %d channels, for %d sec" % (rd.shape[1], len_rd_sec))

        # TODO: more checks?

        logger.log("rd_file passed all checks")
        valid = True
    except Exception, ex:
        logger.log("ERROR: rawdata file check: %s" % str(ex))
Exemple #2
0
def task_validate_rawdata_file(pk):
    """validates a rawdata file - that is an archive holding data to be analysed

    :type pk: int
    :param pk: pk for Data entity

    :returns: bool -- True if Data validates, False else. Processing
    log, including errors, will be written to the Data entity.
    """

    # init and checks
    valid = False
    logger = Logger.get_logger(StringIO())
    try:
        df = Data.objects.get(id=pk)
        assert df.kind == 'rd_file'
        tr = df.content_object
    except:
        logger.log('ERROR')
        return valid

    try:
        logger.log('looking at raw data file with pk: %s' % pk)
        rd, sr = read_hdf5_arc(df.file.path)
        logger.log('found rd_file: %s' % df.name)
        len_rd_sec = rd.shape[0] / sr
        logger.log('found data in %d channels, for %d sec' % (
            rd.shape[1], len_rd_sec))

        # TODO: more checks?

        logger.log('rd_file passed all checks')
        valid = True
    except Exception, ex:
        logger.log('ERROR: rawdata file check: %s' % str(ex))
def task_run_modules(ana_pk, **kwargs):
    """run all enabled modules for an analysis

    :type ana_pk: Analysis
    :param ana_pk: Analysis entity
    :keyword: any, will be passed to modules as parameters

    :returns: True on success, False on error
    """

    ana = None
    success = None
    logger = Logger.get_logger(StringIO(""))
    try:
        # get analysis
        ana = Analysis.objects.get(pk=ana_pk.pk)
        logger.log_delimiter_line()
        logger.log("processing %s" % ana)
        ana.status = Analysis.STATUS.running
        ana.save()

        # get module list
        mod_list = ana.datafile.dataset.module_set.all()
        assert mod_list, "module list is empty!"

        # get file handles
        logger.log("reading input files")
        rd_file = ana.datafile.rd_file
        gt_file = ana.datafile.gt_file
        st_file = ana.st_file
        rd, sampling_rate = read_hdf5_arc(rd_file.data.path)
        if sampling_rate is not None:
            kwargs.update(sampling_rate=sampling_rate)
        ev_sts = read_gdf_sts(st_file.data.path)
        gt_sts = read_gdf_sts(gt_file.data.path)
        logger.log("done reading input files")

        # apply modules
        _tick_all = datetime.now()
        for mod in mod_list:
            logger.log_delimiter_line()
            logger.log("starting {}".format(mod))
            module_cls = mod.get_module_cls()
            _tick_ = datetime.now()
            module = module_cls(rd, gt_sts, ev_sts, logger, **kwargs)
            module.apply()
            module.save(mod, ana)
            _tock_ = datetime.now()
            logger.log("finished in {}".format(str(_tock_ - _tick_)))
            del module, module_cls, _tick_, _tock_
        _tock_all = datetime.now()
        logger.log_delimiter_line()
        logger.log("finished all module in {}".format(str(_tock_all - _tick_all)))
    except Exception, ex:
        logger.log_delimiter_line()
        logger.log("ERROR ({}) :: {}".format(ex.__class__.__name__, ex))
        success = False
Exemple #4
0
def task_run_modules(ev, **kwargs):
    """core function to run all modules for an evaluation

    :type ev: Evaluation
    :param ev: Evaluation entity
    :keyword: any, will be passed to modules as parameters

    :returns: True on success, False on error
    """

    success = None
    try:
        ev.status = ev.STATUS.running
        ev.save()
        mod_list = ev.trial.benchmark.module_set.all()
        logger = Logger.get_logger(StringIO())
    except:
        success = False
    else:
        try:
            logger.log_delimiter_line()

            rd_file = ev.trial.rd_file
            gt_file = ev.trial.gt_file
            ev_file = ev.ev_file
            logger.log('processing: %s' % ev)

            logger.log('reading input files')
            rd, sampling_rate = read_hdf5_arc(rd_file.file.path)
            if sampling_rate is not None:
                kwargs.update(sampling_rate=sampling_rate)
            ev_sts = read_gdf_sts(ev_file.file.path)
            gt_sts = read_gdf_sts(gt_file.file.path)
            logger.log('done reading input files')

            logger.log_delimiter_line()

            # modules
            assert len(mod_list), 'Module list is empty!'
            for mod in mod_list:
                logger.log('starting module: %s' % mod)
                module_pkg = importlib.import_module('spike.module.%s' % mod.path)
                _tick_ = datetime.now()
                module = module_pkg.module_cls(rd, gt_sts, ev_sts, logger, **kwargs)
                module.apply()
                module.save(mod, ev)
                _tock_ = datetime.now()
                logger.log('finished: %s' % str(_tock_ - _tick_))
                logger.log_delimiter_line()
                del module, module_pkg
        except Exception, ex:
            logger.log('ERROR: (%s) %s' % (ex.__class__.__name__, str(ex)))
            success = False
            ev.status = ev.STATUS.failure
        else:
def task_validate_rawdata_file(pk, **kwargs):
    """validates rawdata file - that is an archive holding voltage traces from
    extracellular recordings to be a data to be analysed with spike sorting methods

    :type pk: int
    :param pk: pk of `Asset` entity

    :returns: bool -- True if file validates, False else. Processing
    log, including errors, will be written to the `Datafile` entity.
    """

    # init and checks
    valid = False
    logger = Logger.get_logger(StringIO())
    try:
        obj = Asset.objects.get(id=pk)
        assert obj.kind == "rd_file"
        df = obj.content_object
    except:
        logger.log("ERROR")
        return valid

    try:
        logger.log("looking at raw data file with pk: %s" % pk)
        rd, sr = read_hdf5_arc(obj.data.path)
        logger.log("found rd_file: %s" % obj.name)
        len_rd_sec = rd.shape[0] / sr
        logger.log("found data in %d channels, for %d sec" %
                   (rd.shape[1], len_rd_sec))

        # TODO: more checks?

        logger.log("rd_file passed all checks")
        valid = True
    except Exception, ex:
        logger.log("ERROR: rawdata file check: %s" % str(ex))