def save_analysis(destination, recording, modelspecs, xfspec, figures, log, add_tree_path=False): '''Save an analysis file collection to a particular destination.''' if add_tree_path: treepath = tree_path(recording, modelspecs, xfspec) base_uri = os.path.join(destination, treepath) else: base_uri = destination base_uri = base_uri if base_uri[-1] == '/' else base_uri + '/' xfspec_uri = base_uri + 'xfspec.json' # For attaching to modelspecs for number, modelspec in enumerate(modelspecs): set_modelspec_metadata(modelspec, 'xfspec', xfspec_uri) save_resource(base_uri + 'modelspec.{:04d}.json'.format(number), json=modelspec) for number, figure in enumerate(figures): save_resource(base_uri + 'figure.{:04d}.png'.format(number), data=figure) save_resource(base_uri + 'log.txt', data=log) save_resource(xfspec_uri, json=xfspec) return {'savepath': base_uri}
cellid, batch, modelname)) #savefile = nw.fit_model_xforms_baphy(cellid, batch, modelname, saveInDB=True) savefile = xhelp.fit_model_xform(cellid, batch, modelname, saveInDB=True) log.info("Done with fit.") # Mark completed in the queue. Note that this should happen last thing! # Otherwise the job might still crash after being marked as complete. if db_exists & bool(queueid): nd.update_job_complete(queueid) if 'SLURM_JOB_ID' in os.environ: # need to copy the job log over to the queue log dir log_file_dir = Path.home() / 'job_history' log_file = list( log_file_dir.glob( f'*jobid{os.environ["SLURM_JOB_ID"]}_log.out')) if len(log_file) == 1: log_file = log_file[0] log.info(f'Found log file: "{str(log_file)}"') log.info('Copying log file to queue log repo.') with open(log_file, 'r') as f: log_data = f.read() dst_prefix = r'http://' + get_setting( 'NEMS_BAPHY_API_HOST') + ":" + str( get_setting('NEMS_BAPHY_API_PORT')) dst_loc = dst_prefix + '/queuelog/' + str(queueid) save_resource(str(dst_loc), data=log_data)