def main(what, calc_id: int = -1, webapi=False, local=False, *, extract_dir='.'): """ Extract an output from the datastore and save it into an .hdf5 file. By default uses the WebAPI, otherwise the extraction is done locally. """ with performance.Monitor('extract', measuremem=True) as mon: if local: if calc_id == -1: calc_id = logs.dbcmd('get_job', calc_id).id aw = WebExtractor(calc_id, 'http://localhost:8800', '').get(what) elif webapi: aw = WebExtractor(calc_id).get(what) else: aw = Extractor(calc_id).get(what) w = what.replace('/', '-').replace('?', '-') if hasattr(aw, 'array') and isinstance(aw.array, str): # CSV string fname = os.path.join(extract_dir, '%s_%d.csv' % (w, calc_id)) with open(fname, 'w', encoding='utf-8') as f: f.write(aw.array) else: # save as npz fname = os.path.join(extract_dir, '%s_%d.npz' % (w, calc_id)) hdf5.save_npz(aw, fname) print('Saved', fname) if mon.duration > 1: print(mon)
def extract(what, calc_id=-1, webapi=False, local=False, extract_dir='.'): """ Extract an output from the datastore and save it into an .hdf5 file. By default uses the WebAPI, otherwise the extraction is done locally. """ with performance.Monitor('extract', measuremem=True) as mon: if local: if calc_id == -1: calc_id = logs.dbcmd('get_job', calc_id).id aw = WebExtractor(calc_id, 'http://localhost:8800', '').get(what) elif webapi: aw = WebExtractor(calc_id).get(what) else: aw = Extractor(calc_id).get(what) w = what.replace('/', '-').replace('?', '-') if isinstance(aw.array, str): # a big string fname = os.path.join(extract_dir, '%s_%d.csv' % (w, calc_id)) with open(fname, 'w', encoding='utf-8') as f: f.write(aw.array) elif aw.is_good(): # a regular ArrayWrapper fname = os.path.join(extract_dir, '%s_%d.npz' % (w, calc_id)) hdf5.save_npz(aw, fname) else: # ArrayWrapper of strings, dictionaries or other types fname = os.path.join(extract_dir, '%s_%d.txt' % (w, calc_id)) open(fname, 'w').write(aw.toml()) print('Saved', fname) if mon.duration > 1: print(mon)
def extract(what, calc_id=-1, webapi=True, local=False): """ Extract an output from the datastore and save it into an .hdf5 file. By default uses the WebAPI, otherwise the extraction is done locally. """ with performance.Monitor('extract', measuremem=True) as mon: if local: obj = WebExtractor(calc_id, 'http://localhost:8800', '').get(what) elif webapi: obj = WebExtractor(calc_id).get(what) else: obj = Extractor(calc_id).get(what) w = what.replace('/', '-').replace('?', '-') fname = '%s_%d.npz' % (w, calc_id) hdf5.save_npz(obj, fname) print('Saved', fname) if mon.duration > 1: print(mon)
def extract(request, calc_id, what): """ Wrapper over the `oq extract` command. If `setting.LOCKDOWN` is true only calculations owned by the current user can be retrieved. """ job = logs.dbcmd('get_job', int(calc_id)) if job is None: return HttpResponseNotFound() if not utils.user_has_permission(request, job.user_name): return HttpResponseForbidden() path = request.get_full_path() n = len(request.path_info) query_string = unquote_plus(path[n:]) try: # read the data and save them on a temporary .npz file with datastore.read(job.ds_calc_dir + '.hdf5') as ds: fd, fname = tempfile.mkstemp( prefix=what.replace('/', '-'), suffix='.npz') os.close(fd) obj = _extract(ds, what + query_string) hdf5.save_npz(obj, fname) except Exception as exc: tb = ''.join(traceback.format_tb(exc.__traceback__)) return HttpResponse( content='%s: %s in %s\n%s' % (exc.__class__.__name__, exc, path, tb), content_type='text/plain', status=500) # stream the data back stream = FileWrapper(open(fname, 'rb')) stream.close = lambda: (FileWrapper.close(stream), os.remove(fname)) response = FileResponse(stream, content_type='application/octet-stream') response['Content-Disposition'] = ( 'attachment; filename=%s' % os.path.basename(fname)) response['Content-Length'] = str(os.path.getsize(fname)) return response