def _clear_job_results(*, jobs, incomplete_only=True): for job in jobs: val = mt.getValue(key=job) if val: if (not incomplete_only) or (val.startswith('in-process')) or ( val.startswith('error')): print('Clearing job: ' + job['label']) mt.setValue(key=job, value=None)
def _run_job(job): val = mt.getValue(key=job) if val: return code = ''.join(random.choice(string.ascii_uppercase) for x in range(10)) if not mt.setValue(key=job, value='in-process-' + code, overwrite=False): return status = dict(time_started=_make_timestamp(), status='running') _set_job_status(job, status) print('Running job: ' + job['label']) try: result = _do_run_job(job) except: status['time_finished'] = _make_timestamp() status['status'] = 'error' status['error'] = 'Exception in _do_run_job' val = mt.getValue(key=job) if val == 'in-process-' + code: _set_job_status(job, status) raise val = mt.getValue(key=job) if val != 'in-process-' + code: print( 'Not saving result because in-process code does not match {} <> {}.' .format(val, 'in-process-' + code)) return status['time_finished'] = _make_timestamp() status['result'] = result if 'error' in result: print('Error running job: ' + result['error']) status['status'] = 'error' status['error'] = result['error'] _set_job_status(job, status) mt.setValue(key=job, value='error-' + code) return status['status'] = 'finished' mt.saveObject( key=job, object=result ) # Not needed in future, because we should instead use the status object
def _test1(ii): key = dict(test='key3') val0 = '{}'.format(ii) mt.setValue(key=key, value=val0) val1 = mt.getValue(key=key) return val1
CAIRIO_ADMIN_TOKEN = os.environ['CAIRIO_ADMIN_TOKEN'] mt.setPairioToken('admin', CAIRIO_ADMIN_TOKEN) # Let's set the tokens for the remote collections print('Setting tokens for the remote pairio collections') mt.addRemoteCollection(collection='spikeforest', token=os.environ['PAIRIO_SPIKEFOREST_TOKEN'], admin_token=CAIRIO_ADMIN_TOKEN) mt.addRemoteCollection(collection='morley', token=os.environ['PAIRIO_MORLEY_TOKEN'], admin_token=CAIRIO_ADMIN_TOKEN) # Set up the kachery aliases print('Setting up the kachery aliases') mt.setValue(key='kbucket', value='http://kbucket.flatironinstitute.org', collection='spikeforest') # mt.setValue(key='public', value='http://45.79.176.243:8080', collection='spikeforest') mt.setValue(key='public', value='http://132.249.245.246:24341', collection='spikeforest') ## TODO: # mt.setValue(key='public', value='http://spikeforestpublic.org:24341', collection='spikeforest') mt.setValue(key='public1', value='http://132.249.245.246:24341', collection='spikeforest') mt.setValue(key='public2', value='http://132.249.245.246:24342',
def setBatchStatus(*, batch_name, status): mt.setValue(key=dict(name='spikeforest_batch_status', batch_name=batch_name), value=status)
from mountaintools import client as mt print('===================') # Local key/value store for associating relatively short strings (<=80 characters) with arbitrary keys (strings or dicts) # Setting values (these should be short strings, <=80 characters) mt.setValue(key='some-key1', value='hello 1') mt.setValue(key=dict(name='some_name', number=2), value='hello 2') # Getting values val1 = mt.getValue(key='some-key1') val2 = mt.getValue(key=dict(name='some_name', number=2)) print(val1) print(val2) print('===================') # Setting password-protected values mt.setValue(key='some_key2', password='******', value='the-secret-*y$#a') # Retrieving password-protected values print(mt.getValue(key='some_key2', password='******')) print('===================') # Local storage of data and files, retrievable by SHA-1 hash path = mt.saveText('This is some text', basename='test.txt') print(path) # Output: sha1://482cb0cfcbed6740a2bcb659c9ccc22a4d27b369/test.txt