def get(self, project_db): check_config(project_db) SetupProjectDatabase.init(project_db) try: m = Project_config.get() d = get_model_to_dict_dates(m, project_db) if m.gis_version is not None and not import_gis.is_supported_version( m.gis_version ) and not import_gis_legacy.is_supported_version(m.gis_version): abort( 400, message= "This version of SWAT+ Editor does not support QSWAT+ {uv}." .format(uv=m.gis_version)) #d["has_ps"] = gis.Gis_points.select().where((gis.Gis_points.ptype == 'P') | (gis.Gis_points.ptype == 'I')).count() > 0 #d["has_res"] = gis.Gis_water.select().count() > 0 description = None conn = lib.open_db(project_db) if lib.exists_table(conn, 'object_cnt'): oc = Object_cnt.get_or_none() if oc is not None: description = oc.name d["project_description"] = description return d except Project_config.DoesNotExist: abort(404, message="Could not retrieve project configuration data.")
def __init__(self, project_db, editor_version, project_name=None, datasets_db=None, constant_ps=True, is_lte=False): self.__abort = False base_path = os.path.dirname(project_db) rel_project_db = os.path.relpath(project_db, base_path) if datasets_db is None: conn = lib.open_db(project_db) if not lib.exists_table(conn, 'project_config'): sys.exit( 'No datasets database provided and the project_config table in your project database does not exist. Please provide either a datasets database file or an existing project database.' ) SetupProjectDatabase.init(project_db) try: config = Project_config.get() datasets_db = utils.full_path(project_db, config.reference_db) project_name = config.project_name except Project_config.DoesNotExist: sys.exit('Could not retrieve project configuration data.') rel_datasets_db = os.path.relpath(datasets_db, base_path) # Run updates if needed SetupProjectDatabase.init(project_db, datasets_db) config = Project_config.get() if config.editor_version in update_project.available_to_update: update_project.UpdateProject(project_db, editor_version, update_project_values=True) # Backup original db before beginning try: self.emit_progress(2, 'Backing up project database...') filename, file_extension = os.path.splitext(rel_project_db) bak_filename = filename + '_bak_' + time.strftime( '%Y%m%d-%H%M%S') + file_extension bak_dir = os.path.join(base_path, 'DatabaseBackups') if not os.path.exists(bak_dir): os.makedirs(bak_dir) backup_db_file = os.path.join(bak_dir, bak_filename) copyfile(project_db, backup_db_file) except IOError as err: sys.exit(err) self.emit_progress(5, 'Updating project settings...') config = Project_config.get( ) # Get again due to modification when updating config.imported_gis = False config.is_lte = is_lte config.save() api = GisImport(project_db, True, constant_ps, backup_db_file) api.insert_default()
def check_config(project_db): conn = lib.open_db(project_db) if lib.exists_table(conn, 'project_config'): config_cols = lib.get_column_names(conn, 'project_config') col_names = [v['name'] for v in config_cols] if 'output_last_imported' not in col_names: migrator = SqliteMigrator(SqliteDatabase(project_db)) migrate( migrator.add_column('project_config', 'output_last_imported', DateTimeField(null=True)), migrator.add_column('project_config', 'imported_gis', BooleanField(default=False)), migrator.add_column('project_config', 'is_lte', BooleanField(default=False)), ) if lib.exists_table(conn, 'plants_plt'): lib.delete_table(project_db, 'plants_plt')
def check_version(datasets_db, editor_version, compatibility_versions=['1.1.0', '1.1.1', '1.1.2', '1.2.0']): conn = db_lib.open_db(datasets_db) if db_lib.exists_table(conn, 'version'): SetupDatasetsDatabase.init(datasets_db) m = definitions.Version.get() if not (m.value in compatibility_versions or m.value == editor_version): return 'Please update your swatplus_datasets.sqlite to the most recent version: {new_version}. Your version is {current_version}.'.format(new_version=editor_version, current_version=m.value) else: return 'Please update your swatplus_datasets.sqlite to the most recent version, {new_version}, before creating your project.'.format(new_version=editor_version) return None
def __init__(self, project_db, editor_version, project_name=None, datasets_db=None, constant_ps=True, is_lte=False, project_description=None): self.__abort = False base_path = os.path.dirname(project_db) rel_project_db = os.path.relpath(project_db, base_path) if datasets_db is None: conn = lib.open_db(project_db) if not lib.exists_table(conn, 'project_config'): sys.exit( 'No datasets database provided and the project_config table in your project database does not exist. Please provide either a datasets database file or an existing project database.' ) SetupProjectDatabase.init(project_db) try: config = Project_config.get() datasets_db = utils.full_path(project_db, config.reference_db) if project_name is None: project_name = config.project_name except Project_config.DoesNotExist: sys.exit('Could not retrieve project configuration data.') rel_datasets_db = os.path.relpath(datasets_db, base_path) ver_check = SetupDatasetsDatabase.check_version( datasets_db, editor_version) if ver_check is not None: sys.exit(ver_check) # Backup original db before beginning do_gis = False if os.path.exists(project_db): do_gis = True try: self.emit_progress(2, 'Backing up GIS database...') filename, file_extension = os.path.splitext(rel_project_db) bak_filename = filename + '_bak_' + time.strftime( '%Y%m%d-%H%M%S') + file_extension bak_dir = os.path.join(base_path, 'DatabaseBackups') if not os.path.exists(bak_dir): os.makedirs(bak_dir) backup_db_file = os.path.join(bak_dir, bak_filename) copyfile(project_db, backup_db_file) except IOError as err: sys.exit(err) try: SetupProjectDatabase.init(project_db, datasets_db) self.emit_progress(10, 'Creating database tables...') SetupProjectDatabase.create_tables() self.emit_progress(50, 'Copying data from SWAT+ datasets database...') description = project_description if project_description is not None else project_name SetupProjectDatabase.initialize_data( description, is_lte, overwrite_plants=OVERWRITE_PLANTS) config = Project_config.get_or_create_default( editor_version=editor_version, project_name=project_name, project_db=rel_project_db, reference_db=rel_datasets_db, project_directory='', is_lte=is_lte) conn = lib.open_db(project_db) plant_cols = lib.get_column_names(conn, 'plants_plt') plant_col_names = [v['name'] for v in plant_cols] if 'days_mat' not in plant_col_names: migrator = SqliteMigrator(SqliteDatabase(project_db)) migrate( migrator.rename_column('plants_plt', 'plnt_hu', 'days_mat')) for p in project_plants: dp = dataset_plants.get_or_none( dataset_plants.name == p.name) if dp is not None: p.days_mat = dp.days_mat else: p.days_mat = 0 p.save() except Exception as ex: if backup_db_file is not None: self.emit_progress(50, "Error occurred. Rolling back database...") SetupProjectDatabase.rollback(project_db, backup_db_file) self.emit_progress(100, "Error occurred.") sys.exit(str(ex)) if do_gis: api = GisImport(project_db, True, constant_ps, backup_db_file) api.insert_default()
def add_wgn_stations_db(self, start_prog, total_prog): if self.__abort: return conn = sqlite3.connect(self.wgn_database) conn.row_factory = sqlite3.Row monthly_table = "{}_mon".format(self.wgn_table) if not db_lib.exists_table(conn, self.wgn_table): raise ValueError("Table {table} does not exist in {file}.".format( table=self.wgn_table, file=self.wgn_database)) if not db_lib.exists_table(conn, monthly_table): raise ValueError("Table {table} does not exist in {file}.".format( table=monthly_table, file=self.wgn_database)) if Rout_unit_con.select().count() > 0: coords = Rout_unit_con.select( fn.Min(Rout_unit_con.lat).alias("min_lat"), fn.Max(Rout_unit_con.lat).alias("max_lat"), fn.Min(Rout_unit_con.lon).alias("min_lon"), fn.Max(Rout_unit_con.lon).alias("max_lon")).get() query = "select * from {table_name} where lat between ? and ? and lon between ? and ? order by name".format( table_name=self.wgn_table) tol = 0.5 cursor = conn.cursor().execute( query, (coords.min_lat - tol, coords.max_lat + tol, coords.min_lon - tol, coords.max_lon + tol)) elif Chandeg_con.select().count() > 0: coords = Chandeg_con.select( fn.Min(Chandeg_con.lat).alias("min_lat"), fn.Max(Chandeg_con.lat).alias("max_lat"), fn.Min(Chandeg_con.lon).alias("min_lon"), fn.Max(Chandeg_con.lon).alias("max_lon")).get() query = "select * from {table_name} where lat between ? and ? and lon between ? and ? order by name".format( table_name=self.wgn_table) tol = 0.5 cursor = conn.cursor().execute( query, (coords.min_lat - tol, coords.max_lat + tol, coords.min_lon - tol, coords.max_lon + tol)) else: query = "select * from {table_name} order by name".format( table_name=self.wgn_table) cursor = conn.cursor().execute(query) wgns = [] ids = [] data = cursor.fetchall() records = len(data) #print(records) i = 1 print('\t - Preparing weather generator') for row in data: if self.__abort: return try: existing = Weather_wgn_cli.get( Weather_wgn_cli.name == row['name']) except Weather_wgn_cli.DoesNotExist: prog = round(i * (total_prog / 2) / records) + start_prog # self.emit_progress(prog, "Preparing weather generator {name}...".format(name=row['name'])) i += 1 ids.append(row['id']) wgn = { "id": row['id'], "name": row['name'], "lat": row['lat'], "lon": row['lon'], "elev": row['elev'], "rain_yrs": row['rain_yrs'] } wgns.append(wgn) prog = start_prog if records < 1 else round(i * (total_prog / 2) / records) + start_prog # self.emit_progress(prog, "Inserting {total} weather generators...".format(total=len(ids))) db_lib.bulk_insert(project_base.db, Weather_wgn_cli, wgns) # Chunk the id array so we don't hit the SQLite parameter limit! max_length = 999 id_chunks = [ ids[i:i + max_length] for i in range(0, len(ids), max_length) ] i = 1 start_prog = start_prog + (total_prog / 2) mon_count_query = "select count(*) from {table_name}".format( table_name=monthly_table) total_mon_rows = conn.cursor().execute(mon_count_query).fetchone()[0] current_total = 0 for chunk in id_chunks: monthly_values = [] mon_query = "select * from {table_name} where wgn_id in ({ids})".format( table_name=monthly_table, ids=",".join('?' * len(chunk))) mon_cursor = conn.cursor().execute(mon_query, chunk) mon_data = mon_cursor.fetchall() mon_records = len(mon_data) i = 1 for row in mon_data: if self.__abort: return if i == 1 or (i % 12 == 0): prog = round(i * (total_prog / 2) / mon_records) + start_prog # self.emit_progress(prog, "Preparing monthly values {i}/{total}...".format(i=i, total=mon_records)) i += 1 mon = { "weather_wgn_cli": row['wgn_id'], "month": row['month'], "tmp_max_ave": row['tmp_max_ave'], "tmp_min_ave": row['tmp_min_ave'], "tmp_max_sd": row['tmp_max_sd'], "tmp_min_sd": row['tmp_min_sd'], "pcp_ave": row['pcp_ave'], "pcp_sd": row['pcp_sd'], "pcp_skew": row['pcp_skew'], "wet_dry": row['wet_dry'], "wet_wet": row['wet_wet'], "pcp_days": row['pcp_days'], "pcp_hhr": row['pcp_hhr'], "slr_ave": row['slr_ave'], "dew_ave": row['dew_ave'], "wnd_ave": row['wnd_ave'] } monthly_values.append(mon) prog = round(i * (total_prog / 2) / mon_records) + start_prog current_total = current_total + mon_records # self.emit_progress(prog, "Inserting monthly values {rec}/{total}...".format(rec=current_total, total=total_mon_rows)) db_lib.bulk_insert(project_base.db, Weather_wgn_cli_mon, monthly_values)
def post(self): parser = reqparse.RequestParser() parser.add_argument('project_db', type=str, required=True, location='json') parser.add_argument('output_db', type=str, required=True, location='json') args = parser.parse_args(strict=False) SetupOutputDatabase.init(args.output_db) SetupProjectDatabase.init(args.project_db) required_tables = [ 'basin_wb_aa', 'basin_nb_aa', 'basin_pw_aa', 'basin_ls_aa', 'basin_psc_aa', 'basin_aqu_aa', 'aquifer_aa', 'recall_aa', 'basin_sd_cha_aa', 'channel_sd_aa', 'channel_sdmorph_aa', 'hru_ls_aa', 'hru_wb_aa', 'hru_pw_aa', 'crop_yld_aa' ] conn = lib.open_db(args.output_db) for table in required_tables: if not lib.exists_table(conn, table): abort( 500, message= 'Could not load SWAT+ Check because the table "{}" does not exist in your output database. Re-run your model and check all yearly and average annual files under the print options, and keep the analyze output box checked.' .format(table)) try: has_res = lib.exists_table(conn, 'basin_res_aa') has_yr_res = lib.exists_table(conn, 'reservoir_yr') has_project_config = lib.exists_table(conn, 'project_config') total_area = connect.Rout_unit_con.select( fn.Sum(connect.Rout_unit_con.area)).scalar() wb = waterbal.Basin_wb_aa.get_or_none() aqu = aquifer.Basin_aqu_aa.get_or_none() nb = nutbal.Basin_nb_aa.get_or_none() pw = plantwx.Basin_pw_aa.get_or_none() ls = losses.Basin_ls_aa.get_or_none() basin_cha = channel.Basin_sd_cha_aa.get_or_none() cha = channel.Channel_sd_aa.select() info = get_info(has_project_config) hydrology = get_hyd(wb, aqu) ncycle = get_ncycle(nb, pw, ls) pcycle = get_pcycle(nb, pw, ls) pg = get_pg(nb, pw) landscape = get_landscape(ls, ncycle, aqu) landuse = get_landuse() psrc = get_psrc(ls, total_area) res = get_res(has_res, has_yr_res) instream = get_instream(basin_cha, cha, wb, ls, total_area, psrc) sed = get_sed(instream, psrc, ls, wb) return { 'setup': info.toJson(), 'hydrology': hydrology.toJson(), 'nitrogenCycle': ncycle.toJson(), 'phosphorusCycle': pcycle.toJson(), 'plantGrowth': pg.toJson(), 'landscapeNutrientLosses': landscape.toJson(), 'landUseSummary': landuse.toJson(), 'pointSources': psrc.toJson(), 'reservoirs': res.toJson(), 'instreamProcesses': instream.toJson(), 'sediment': sed.toJson() } except Exception as ex: abort(500, message='Error loading SWAT+ Check. Exception: {ex} {tb}'. format(ex=str(ex), tb=traceback.format_exc()))
def get(self, project_db): SetupProjectDatabase.init(project_db) conn = lib.open_db(project_db) if not lib.exists_table(conn, 'chandeg_con'): abort(400, message='Project has not been set up.') try: m = Project_config.get() gis_type = 'QSWAT+ ' if m.gis_type == 'qgis' else 'GIS ' gis_text = '' if m.gis_version is None else gis_type + m.gis_version landuse_distrib = [] if m.gis_version is not None: landuse_distrib = gis.Gis_hrus.select( fn.Lower(gis.Gis_hrus.landuse).alias('name'), fn.Sum(gis.Gis_hrus.arslp).alias('y')).group_by( gis.Gis_hrus.landuse) current_path = os.path.dirname(project_db) scenarios_path = os.path.join(current_path, 'Scenarios') scenarios = [] if os.path.isdir(scenarios_path): for p in os.listdir(scenarios_path): if os.path.isdir(os.path.join( scenarios_path, p)) and p != 'Default' and p != 'default': db_files = [ f for f in os.listdir( os.path.join(scenarios_path, p)) if f.endswith('.sqlite') ] if len(db_files) > 0: scenarios.append({ 'name': p, 'path': os.path.join(scenarios_path, p, db_files[0]) }) oc = Object_cnt.get_or_none() info = { 'name': m.project_name, 'description': oc.name, 'file_path': current_path, 'last_modified': utils.json_encode_datetime( datetime.fromtimestamp(os.path.getmtime(project_db))), 'is_lte': m.is_lte, 'status': { 'imported_weather': climate.Weather_sta_cli.select().count() > 0 and climate.Weather_wgn_cli.select().count() > 0, 'wrote_inputs': m.input_files_last_written is not None, 'ran_swat': m.swat_last_run is not None, 'imported_output': m.output_last_imported is not None, 'using_gis': m.gis_version is not None }, 'simulation': model_to_dict(simulation.Time_sim.get_or_none()), 'total_area': connect.Rout_unit_con. select(fn.Sum(connect.Rout_unit_con.area)).scalar( ), #gis.Gis_subbasins.select(fn.Sum(gis.Gis_subbasins.area)).scalar(), 'totals': { 'hru': connect.Hru_con.select().count(), 'lhru': connect.Hru_lte_con.select().count(), 'rtu': connect.Rout_unit_con.select().count(), 'mfl': connect.Modflow_con.select().count(), 'aqu': connect.Aquifer_con.select().count(), 'cha': connect.Channel_con.select().count(), 'res': connect.Reservoir_con.select().count(), 'rec': connect.Recall_con.select().count(), 'exco': connect.Exco_con.select().count(), 'dlr': connect.Delratio_con.select().count(), 'out': connect.Outlet_con.select().count(), 'lcha': connect.Chandeg_con.select().count(), 'aqu2d': connect.Aquifer2d_con.select().count(), 'lsus': regions.Ls_unit_def.select().count(), 'subs': gis.Gis_subbasins.select().count() }, 'editor_version': m.editor_version, 'gis_version': gis_text, 'charts': { 'landuse': [{ 'name': o.name, 'y': o.y } for o in landuse_distrib] }, 'scenarios': scenarios } return info except Project_config.DoesNotExist: abort(404, message="Could not retrieve project configuration data.")
def insert(db_file_name, db_table, wgn_table, wgn_monthly_value_table, start_id=1, is_mdb=True, insert_db_obj=db): #if is_mdb: #odbc_conn_str = 'DRIVER={Microsoft Access Driver (*.mdb)};DBQ=%s' % db_file_name #conn = pyodbc.connect(odbc_conn_str) #else: #conn = sqlite3.connect(db_file_name) conn = sqlite3.connect(db_file_name) if not db_lib.exists_table(conn, db_table): raise ValueError("Table {table} does not exist in {file}.".format( table=db_table, file=db_file_name)) cursor = conn.cursor().execute( 'select * from {table_name} order by OBJECTID'.format( table_name=db_table)) ncols = len(cursor.description) col = [cursor.description[i][0] for i in range(ncols)] wgns = [] monthly_values = [] c = 0 id = start_id for row in cursor.fetchall(): wgn = { 'id': id, 'name': row[col.index('STATION')], 'desc': row[col.index('LSTATION')], 'state': None if row[col.index('STATE')] == 'NA' else row[col.index('STATE')], 'lat': row[col.index('WLATITUDE')], 'lon': row[col.index('WLONGITUDE')], 'elev': row[col.index('WELEV')], 'rain_yrs': row[col.index('RAIN_YRS')] } wgns.append(wgn) for i in range(1, 13): mval = { 'wgn': id, 'month': i, 'tmp_max_ave': row[col.index('TMPMX%s' % i)], 'tmp_min_ave': row[col.index('TMPMN%s' % i)], 'tmp_max_sd': row[col.index('TMPSTDMX%s' % i)], 'tmp_min_sd': row[col.index('TMPSTDMN%s' % i)], 'pcp_ave': row[col.index('PCPMM%s' % i)], 'pcp_sd': row[col.index('PCPSTD%s' % i)], 'pcp_skew': row[col.index('PCPSKW%s' % i)], 'wet_dry': row[col.index('PR_W1_%s' % i)], 'wet_wet': row[col.index('PR_W2_%s' % i)], 'pcp_days': row[col.index('PCPD%s' % i)], 'pcp_hhr': row[col.index('RAINHHMX%s' % i)], 'slr_ave': row[col.index('SOLARAV%s' % i)], 'dew_ave': row[col.index('DEWPT%s' % i)], 'wnd_ave': row[col.index('WNDAV%s' % i)] } monthly_values.append(mval) id += 1 c += 1 if c == 100: db_lib.bulk_insert(insert_db_obj, wgn_table, wgns) db_lib.bulk_insert(insert_db_obj, wgn_monthly_value_table, monthly_values) print('Saved {id} - {id2}'.format(id=wgns[0]['id'], id2=wgns[len(wgns) - 1]['id'])) c = 0 wgns.clear() monthly_values.clear() if len(wgns) > 0: db_lib.bulk_insert(insert_db_obj, wgn_table, wgns) db_lib.bulk_insert(insert_db_obj, wgn_monthly_value_table, monthly_values)