コード例 #1
0
 def execute(self):
   print('Dataset: '+self._dataset['name'])
   val=pa.get(self._key)
   if val:
     if val.startswith('in-process'):
       print('In progress, skipping...')
     else:
       print('Completed, skipping...')
     return
   if not pa.set(self._key,'in-process-'+self._code,overwrite=False):
     print('Problem setting in-process value skipping...')
     return
   try:
     result=self.run()
   except:
     if pa.get(self._key)=='in-process-'+self._code:
       pa.set(self._key,'error')
     else:
       print('Unexpected: not setting error value because existing value does not match')
     raise
   if pa.get(self._key)=='in-process-'+self._code:
     print('Saving result object.')
     kb.saveObject(key=self._key,object=result)
   else:
     print('Unexpected: not setting result because existing value does not match.')
コード例 #2
0
ファイル: sf_batch2.py プロジェクト: magland/spikeforest
def clear_job_result(job, *, incomplete_only=True):
    val = pa.get(key=job)
    if val:
        if (not incomplete_only) or (val.startswith('in-process')) or (
                val.startswith('error')):
            print('Clearing job: ' + job['label'])
            pa.set(key=job, value=None)
コード例 #3
0
    def process(self):
        for dataset in self._datasets:
            for sorter in self._sorters:
                print ('SORTER: {}     DATASET: {}'.format(sorter['processor'].NAME,dataset['name']))
                lock_obj=self._get_lock_object(sorter,dataset)

                if pa.set(key=lock_obj,value='running',overwrite=False):
                    try:
                        print ('Running...')
                        result=sf.sortDataset(
                            sorter = sorter,
                            dataset = dataset
                        )
                        result['comparison_with_truth'] = sf.compareWithTruth(result)
                        result['summary'] = sf.summarizeSorting(result)
                        kb.saveObject(key=lock_obj,object=result)
                    except:
                        pa.set(key=lock_obj,value='error',overwrite=True)
                        raise
                else:
                    val0=pa.get(key=lock_obj)
                    if val0 == 'running':
                        print ('Skipping (result is running)...')
                    else:
                        print ('Skipping (result is locked)...')
コード例 #4
0
  def saveFile(self,fname,*,key=None,share_id=None,upload_token=None,basename=None,remote=None):
    ret=self._save_file_helper(fname,share_id=share_id,upload_token=upload_token,basename=basename,remote=remote)

    if key:
      sha1=self.computeFileSha1(fname)
      pairio.set(key,sha1)

    return ret
コード例 #5
0
ファイル: test_001.py プロジェクト: tjd2002/spikeforest2
 def test_001(self):
   key0='testkey'
   val0='testval000'
   pa.set(key0,val0)
   val=pa.get(key0)
   self.assertEqual(val,val0)
   pa.set(key0,val0+'abc')
   val=pa.get(key0)
   self.assertEqual(val,val0+'abc')
コード例 #6
0
def clear_result_for_key(*, key, in_process_only=False):
    val = pa.get(key=key)
    if val:
        if in_process_only:
            do_clear = ((val.startswith('in-process'))
                        or (val.startswith('error')))
        else:
            do_clear = True
        if do_clear:
            print('Clearing results for: {}' + json.dumps(key))
            pa.set(key=key, value=None)
コード例 #7
0
def sf_batch_run(config):
    login(config)
    study_obj = kb.loadObject(key=dict(name='spikeforest_recordings'))
    recordings = select_recordings(study_obj, config)
    sorters = config['sorters']

    code = ''.join(random.choice(string.ascii_uppercase) for x in range(10))
    for i, ds in enumerate(recordings):
        if config.get('summarize_recordings', None):
            key = dict(name='summarize_recording',
                       batch_name=config['name'],
                       study_name=ds['study'],
                       recording_name=ds['name'])
            if acquire_lock_for_key(key=key, code=code):
                try:
                    print(
                        '========= Summarizing recording {}/{}: {}/{}'.format(
                            i, len(recordings), ds['study'], ds['name']))
                    result0 = sf_summarize_recording(ds)
                except:
                    if check_consistent_code(key=key, code=code):
                        pa.set(key=key, value='error-' + code)
                    raise
                if check_consistent_code(key=key, code=code):
                    kb.saveObject(key=key, object=result0)
                else:
                    print('Warning: inconsistent code for {}'.format(
                        json.dumps(key)))

        for sorter in sorters:
            key = dict(name='sort_recording',
                       batch_name=config['name'],
                       study_name=ds['study'],
                       recording_name=ds['name'],
                       sorter_name=sorter['name'],
                       sorter_params=sorter['params'])
            if acquire_lock_for_key(key=key, code=code):
                try:
                    print(
                        '========= Sorting recording {}/{}: {} - {}/{}'.format(
                            i, len(recordings), sorter['name'], ds['study'],
                            ds['name']))
                    result0 = sf_sort_recording(sorter, ds)
                except:
                    if check_consistent_code(key=key, code=code):
                        pa.set(key=key, value='error-' + code)
                    raise
                if check_consistent_code(key=key, code=code):
                    kb.saveObject(key=key, object=result0)
                else:
                    print('Warning: inconsistent code for {}'.format(
                        json.dumps(key)))
コード例 #8
0
ファイル: sf_batch2.py プロジェクト: magland/spikeforest
def run_job(job):
    val = pa.get(key=job)
    if val:
        return
    code = ''.join(random.choice(string.ascii_uppercase) for x in range(10))
    if not pa.set(key=job, value='in-process-' + code, overwrite=False):
        return
    print('Running job: ' + job['label'])
    result = do_run_job(job)
    val = pa.get(key=job)
    if val != 'in-process-' + code:
        return
    if 'error' in result:
        print('Error running job: ' + result['error'])
        pa.set(key=job, value='error-' + code)
        kb.save(key=dict(job=job, name='error'), value=result)
        return
    kb.saveObject(key=job, object=result)
コード例 #9
0
ファイル: impl.py プロジェクト: magland/spikeforest_batch_run
def _run_job(job):
    val = pa.get(key=job)
    if val:
        return
    code = ''.join(random.choice(string.ascii_uppercase) for x in range(10))
    if not pa.set(key=job, value='in-process-' + code, overwrite=False):
        return
    status = dict(time_started=_make_timestamp(), status='running')
    _set_job_status(job, status)

    print('Running job: ' + job['label'])
    try:
        result = _do_run_job(job)
    except:
        status['time_finished'] = _make_timestamp()
        status['status'] = 'error'
        status['error'] = 'Exception in _do_run_job'
        val = pa.get(key=job)
        if val == 'in-process-' + code:
            _set_job_status(job, status)
        raise

    val = pa.get(key=job)
    if val != 'in-process-' + code:
        print(
            'Not saving result because in-process code does not match {} <> {}.'
            .format(val, 'in-process-' + code))
        return

    status['time_finished'] = _make_timestamp()
    status['result'] = result
    if 'error' in result:
        print('Error running job: ' + result['error'])
        status['status'] = 'error'
        status['error'] = result['error']
        _set_job_status(job, status)
        pa.set(key=job, value='error-' + code)
        return
    status['status'] = 'finished'
    kb.saveObject(
        key=job, object=result
    )  # Not needed in future, because we should instead use the status object
コード例 #10
0
def acquire_lock_for_key(*, key, code):
    val = pa.get(key=key)
    if val:
        if val.startswith('in-process'):
            return False
        if val.startswith('error'):
            return False
        return False
    if not pa.set(key, 'in-process-' + code, overwrite=False):
        return False
    return True
コード例 #11
0
 def clearResults(self,*,in_process_only):
   val=pa.get(self._key)
   if val:
     if (not in_process_only) or (val.startswith('in-process')) or (val.startswith('error')):
       print('Clearing results for: '+self._key['dataset_name'])
       pa.set(key=self._key,value=None)
コード例 #12
0
 def clearResults(self):
     for dataset in self._datasets:
         for sorter in self._sorters:
             lock_obj=self._get_lock_object(sorter,dataset)
             pa.set(key=lock_obj,value=None)
コード例 #13
0
def setBatchStatus(*,batch_name,status):
  pa.set(key=dict(name='spikeforest_batch_status',batch_name=batch_name),value=status)