Example #1
0
def task_validate_rawdata_file(pk, **kwargs):
    """validates rawdata file - that is an archive holding voltage traces from
    extracellular recordings to be a data to be analysed with spike sorting methods

    :type pk: int
    :param pk: pk of `Asset` entity

    :returns: bool -- True if file validates, False else. Processing
    log, including errors, will be written to the `Datafile` entity.
    """

    # init and checks
    valid = False
    logger = Logger.get_logger(StringIO())
    try:
        obj = Asset.objects.get(id=pk)
        assert obj.kind == "rd_file"
        df = obj.content_object
    except:
        logger.log("ERROR")
        return valid

    try:
        logger.log("looking at raw data file with pk: %s" % pk)
        rd, sr = read_hdf5_arc(obj.data.path)
        logger.log("found rd_file: %s" % obj.name)
        len_rd_sec = rd.shape[0] / sr
        logger.log("found data in %d channels, for %d sec" % (rd.shape[1], len_rd_sec))

        # TODO: more checks?

        logger.log("rd_file passed all checks")
        valid = True
    except Exception, ex:
        logger.log("ERROR: rawdata file check: %s" % str(ex))
Example #2
0
 def test_logger_file(self):
     fname = os.path.join(os.path.dirname(__file__), 'test_logger.txt')
     if os.path.exists(fname):
         os.remove(fname)
     canvas = open(fname, 'a+')
     logger = Logger.get_logger(canvas)
     logger.log(*self.str_test)
     self.assertEqual(logger.get_content().split(), self.str_test)
     os.remove(fname)
Example #3
0
 def test_logger_file(self):
     fname = os.path.join(os.path.dirname(__file__), 'test_logger.txt')
     if os.path.exists(fname):
         os.remove(fname)
     canvas = open(fname, 'a+')
     logger = Logger.get_logger(canvas)
     logger.log(*self.str_test)
     self.assertEqual(logger.get_content().split(), self.str_test)
     os.remove(fname)
Example #4
0
def task_run_modules(ana_pk, **kwargs):
    """run all enabled modules for an analysis

    :type ana_pk: Analysis
    :param ana_pk: Analysis entity
    :keyword: any, will be passed to modules as parameters

    :returns: True on success, False on error
    """

    ana = None
    success = None
    logger = Logger.get_logger(StringIO(""))
    try:
        # get analysis
        ana = Analysis.objects.get(pk=ana_pk.pk)
        logger.log_delimiter_line()
        logger.log("processing %s" % ana)
        ana.status = Analysis.STATUS.running
        ana.save()

        # get module list
        mod_list = ana.datafile.dataset.module_set.all()
        assert mod_list, "module list is empty!"

        # get file handles
        logger.log("reading input files")
        rd_file = ana.datafile.rd_file
        gt_file = ana.datafile.gt_file
        st_file = ana.st_file
        rd, sampling_rate = read_hdf5_arc(rd_file.data.path)
        if sampling_rate is not None:
            kwargs.update(sampling_rate=sampling_rate)
        ev_sts = read_gdf_sts(st_file.data.path)
        gt_sts = read_gdf_sts(gt_file.data.path)
        logger.log("done reading input files")

        # apply modules
        _tick_all = datetime.now()
        for mod in mod_list:
            logger.log_delimiter_line()
            logger.log("starting {}".format(mod))
            module_cls = mod.get_module_cls()
            _tick_ = datetime.now()
            module = module_cls(rd, gt_sts, ev_sts, logger, **kwargs)
            module.apply()
            module.save(mod, ana)
            _tock_ = datetime.now()
            logger.log("finished in {}".format(str(_tock_ - _tick_)))
            del module, module_cls, _tick_, _tock_
        _tock_all = datetime.now()
        logger.log_delimiter_line()
        logger.log("finished all module in {}".format(str(_tock_all - _tick_all)))
    except Exception, ex:
        logger.log_delimiter_line()
        logger.log("ERROR ({}) :: {}".format(ex.__class__.__name__, ex))
        success = False
Example #5
0
def task_validate_spiketrain_file(pk, **kwargs):
    """validate spike train file - that is a text file in gdf format (space separated, 2col, [key,time])

    :type pk: int
    :param pk: pk of `Datafile` entity

    :returns: bool -- True if file validates, False else. Processing
    log, including errors, will be written to the `Datafile` entity.
    """

    # init and checks
    valid = False
    logger = Logger.get_logger(StringIO())
    try:
        obj = Asset.objects.get(id=pk)
        assert obj.kind == "st_file"
        df = obj.content_object
    except:
        logger.log("ERROR")
        return valid

    try:
        logger.log("looking at spike train file with pk: %s" % obj.id)
        sts = read_gdf_sts(obj.data.path)
        logger.log("found st_file: %s" % obj.name)
        for st in sts:
            if not isinstance(sts[st], sp.ndarray):
                raise TypeError("spike train %s not ndarray" % st)
            if not sts[st].ndim == 1:
                raise ValueError("spike trains have to be ndim==1")

        # TODO: more checks?

        logger.log("st_file passed all checks")
        valid = True
    except Exception, ex:
        logger.log('ERROR: spike train file check: %s' % str(ex))
Example #6
0
def task_validate_spiketrain_file(pk, **kwargs):
    """validate spike train file - that is a text file in gdf format (space separated, 2col, [key,time])

    :type pk: int
    :param pk: pk of `Datafile` entity

    :returns: bool -- True if file validates, False else. Processing
    log, including errors, will be written to the `Datafile` entity.
    """

    # init and checks
    valid = False
    logger = Logger.get_logger(StringIO())
    try:
        obj = Asset.objects.get(id=pk)
        assert obj.kind == "st_file"
        df = obj.content_object
    except:
        logger.log("ERROR")
        return valid

    try:
        logger.log("looking at spike train file with pk: %s" % obj.id)
        sts = read_gdf_sts(obj.data.path)
        logger.log("found st_file: %s" % obj.name)
        for st in sts:
            if not isinstance(sts[st], sp.ndarray):
                raise TypeError("spike train %s not ndarray" % st)
            if not sts[st].ndim == 1:
                raise ValueError("spike trains have to be ndim==1")

        # TODO: more checks?

        logger.log("st_file passed all checks")
        valid = True
    except Exception, ex:
        logger.log('ERROR: spike train file check: %s' % str(ex))
Example #7
0
def task_validate_rawdata_file(pk, **kwargs):
    """validates rawdata file - that is an archive holding voltage traces from
    extracellular recordings to be a data to be analysed with spike sorting methods

    :type pk: int
    :param pk: pk of `Asset` entity

    :returns: bool -- True if file validates, False else. Processing
    log, including errors, will be written to the `Datafile` entity.
    """

    # init and checks
    valid = False
    logger = Logger.get_logger(StringIO())
    try:
        obj = Asset.objects.get(id=pk)
        assert obj.kind == "rd_file"
        df = obj.content_object
    except:
        logger.log("ERROR")
        return valid

    try:
        logger.log("looking at raw data file with pk: %s" % pk)
        rd, sr = read_hdf5_arc(obj.data.path)
        logger.log("found rd_file: %s" % obj.name)
        len_rd_sec = rd.shape[0] / sr
        logger.log("found data in %d channels, for %d sec" %
                   (rd.shape[1], len_rd_sec))

        # TODO: more checks?

        logger.log("rd_file passed all checks")
        valid = True
    except Exception, ex:
        logger.log("ERROR: rawdata file check: %s" % str(ex))
Example #8
0
 def test_delimiter_line(self):
     L = Logger(StringIO())
     L.log_delimiter_line()
     self.assertEqual(L.get_content().strip(), '*' * 20)
Example #9
0
 def test_logger_strio(self):
     canvas = StringIO()
     logger = Logger.get_logger(canvas)
     logger.log(*self.str_test)
     self.assertEqual(logger.get_content().split(), self.str_test)
Example #10
0
 def test_is_file_like(self):
     self.assertTrue(Logger.is_file_like(StringIO()))
     self.assertFalse(Logger.is_file_like(self))
Example #11
0
 def test_delimiter_line(self):
     L = Logger(StringIO())
     L.log_delimiter_line()
     self.assertEqual(L.get_content().strip(), '*' * 20)
Example #12
0
 def test_logger_strio(self):
     canvas = StringIO()
     logger = Logger.get_logger(canvas)
     logger.log(*self.str_test)
     self.assertEqual(logger.get_content().split(), self.str_test)
Example #13
0
 def test_is_file_like(self):
     self.assertTrue(Logger.is_file_like(StringIO()))
     self.assertFalse(Logger.is_file_like(self))