def _show_loss_models(): "Display a list of available loss models and ids on stdout" connections = db_connections(db_settings.db_confs) with connections['loss_contrib'].cursor() as cursor: models = _list_loss_models(cursor) for md in models: print('{0}\t{1}'.format(md.get('id'), md.get('name')))
def exposure_to_nrml(model_id): """ Return a NRML XML tree for the exposure model with the specified id """ connections = db_connections(db_settings.db_confs) with connections['ged4all_contrib'].cursor() as cursor: cursor.execute(MODEL_QUERY, [model_id]) model_dict = dictfetchone(cursor) if model_dict is None: return None return _build_tree(model_id, model_dict, cursor)
def import_exposure_model(ex, nrml_file): """ Import exposure from an exposure model node """ verbose_message("Model contains {0} assets\n".format(len(ex.assets))) connections = db_connections(db_settings.db_confs) connection = connections['gedcontrib'] with connection.cursor() as cursor: model_id = _import_model(cursor, ex) _import_contribution(cursor, ex, model_id) verbose_message('Inserted model, id={0}\n'.format(model_id)) ctd = _import_cost_types(cursor, ex, model_id) _import_assets(cursor, ex, ctd, model_id, nrml_file) connection.commit() return model_id
def import_event_set(es): """ Import data from a scenario EventSet """ verbose_message("Model contains {0} events\n" .format(len(es.events))) connections = db_connections(db_settings.db_confs) with connections['hazard_contrib'].cursor() as cursor: event_set_id = _import_event_set(cursor, es) _import_contribution(cursor, event_set_id, es.contribution) verbose_message('Inserted event_set, id={0}\n'.format(event_set_id)) _import_events(cursor, event_set_id, es.events) verbose_message('Updating bounding box\n') _fix_bb_geometry(cursor, event_set_id) connections['hazard_contrib'].commit() return event_set_id
def import_loss_model(loss_model): """ Import loss_model into the loss DB, return id """ verbose_message("Model contains {0} maps\n".format( len(loss_model.loss_maps))) verbose_message("Model contains {0} curve maps\n".format( len(loss_model.loss_curve_maps))) connections = db_connections(db_settings.db_confs) connection = connections['loss_contrib'] with connection.cursor() as cursor: model_id = _import_loss_model(cursor, loss_model) _import_contribution(cursor, model_id, loss_model.contribution) _import_loss_maps(cursor, model_id, loss_model.loss_maps) _import_loss_curve_maps(cursor, model_id, loss_model.loss_curve_maps) connection.commit() verbose_message('Inserted loss model, id={0}\n'.format(model_id)) return model_id
def _export_loss_model(model_id): "Export the fiven model_id to a json file" connections = db_connections(db_settings.db_confs) verbose_message("Loading model {0}\n".format(model_id)) loss_model = None with connections['loss_reader'].cursor() as cursor: loss_model = load_loss_model(cursor, model_id) if loss_model is None: sys.stderr.write("Model {0} not found\n".format(model_id)) exit(1) else: jname = 'loss_model_{0}.json'.format(model_id) with open(jname, 'w') as fout: json.dump(loss_model, fout, default=dumper, indent=2) verbose_message("Exported model id {0} to {1}\n".format( model_id, jname))