Beispiel #1
0
    def get(self, project_db):
        check_config(project_db)

        SetupProjectDatabase.init(project_db)
        try:
            m = Project_config.get()
            d = get_model_to_dict_dates(m, project_db)

            if m.gis_version is not None and not import_gis.is_supported_version(
                    m.gis_version
            ) and not import_gis_legacy.is_supported_version(m.gis_version):
                abort(
                    400,
                    message=
                    "This version of SWAT+ Editor does not support QSWAT+ {uv}."
                    .format(uv=m.gis_version))

            #d["has_ps"] = gis.Gis_points.select().where((gis.Gis_points.ptype == 'P') | (gis.Gis_points.ptype == 'I')).count() > 0
            #d["has_res"] = gis.Gis_water.select().count() > 0

            description = None
            conn = lib.open_db(project_db)
            if lib.exists_table(conn, 'object_cnt'):
                oc = Object_cnt.get_or_none()
                if oc is not None:
                    description = oc.name
            d["project_description"] = description

            return d
        except Project_config.DoesNotExist:
            abort(404,
                  message="Could not retrieve project configuration data.")
    def __init__(self,
                 project_db,
                 editor_version,
                 project_name=None,
                 datasets_db=None,
                 constant_ps=True,
                 is_lte=False):
        self.__abort = False

        base_path = os.path.dirname(project_db)
        rel_project_db = os.path.relpath(project_db, base_path)

        if datasets_db is None:
            conn = lib.open_db(project_db)
            if not lib.exists_table(conn, 'project_config'):
                sys.exit(
                    'No datasets database provided and the project_config table in your project database does not exist. Please provide either a datasets database file or an existing project database.'
                )

            SetupProjectDatabase.init(project_db)
            try:
                config = Project_config.get()
                datasets_db = utils.full_path(project_db, config.reference_db)
                project_name = config.project_name
            except Project_config.DoesNotExist:
                sys.exit('Could not retrieve project configuration data.')

        rel_datasets_db = os.path.relpath(datasets_db, base_path)

        # Run updates if needed
        SetupProjectDatabase.init(project_db, datasets_db)
        config = Project_config.get()
        if config.editor_version in update_project.available_to_update:
            update_project.UpdateProject(project_db,
                                         editor_version,
                                         update_project_values=True)

        # Backup original db before beginning
        try:
            self.emit_progress(2, 'Backing up project database...')
            filename, file_extension = os.path.splitext(rel_project_db)
            bak_filename = filename + '_bak_' + time.strftime(
                '%Y%m%d-%H%M%S') + file_extension
            bak_dir = os.path.join(base_path, 'DatabaseBackups')
            if not os.path.exists(bak_dir):
                os.makedirs(bak_dir)
            backup_db_file = os.path.join(bak_dir, bak_filename)
            copyfile(project_db, backup_db_file)
        except IOError as err:
            sys.exit(err)

        self.emit_progress(5, 'Updating project settings...')
        config = Project_config.get(
        )  # Get again due to modification when updating
        config.imported_gis = False
        config.is_lte = is_lte
        config.save()

        api = GisImport(project_db, True, constant_ps, backup_db_file)
        api.insert_default()
Beispiel #3
0
	def check_version(datasets_db, editor_version, compatibility_versions=['1.1.0', '1.1.1', '1.1.2', '1.2.0']):
		conn = db_lib.open_db(datasets_db)
		if db_lib.exists_table(conn, 'version'):
			SetupDatasetsDatabase.init(datasets_db)
			m = definitions.Version.get()
			if not (m.value in compatibility_versions or m.value == editor_version):
				return 'Please update your swatplus_datasets.sqlite to the most recent version: {new_version}. Your version is {current_version}.'.format(new_version=editor_version, current_version=m.value)
		else:
			return 'Please update your swatplus_datasets.sqlite to the most recent version, {new_version}, before creating your project.'.format(new_version=editor_version)

		return None
Beispiel #4
0
def check_config(project_db):
    conn = lib.open_db(project_db)
    if lib.exists_table(conn, 'project_config'):
        config_cols = lib.get_column_names(conn, 'project_config')
        col_names = [v['name'] for v in config_cols]
        if 'output_last_imported' not in col_names:
            migrator = SqliteMigrator(SqliteDatabase(project_db))
            migrate(
                migrator.add_column('project_config', 'output_last_imported',
                                    DateTimeField(null=True)),
                migrator.add_column('project_config', 'imported_gis',
                                    BooleanField(default=False)),
                migrator.add_column('project_config', 'is_lte',
                                    BooleanField(default=False)),
            )

            if lib.exists_table(conn, 'plants_plt'):
                lib.delete_table(project_db, 'plants_plt')
Beispiel #5
0
    def __init__(self,
                 project_db,
                 editor_version,
                 project_name=None,
                 datasets_db=None,
                 constant_ps=True,
                 is_lte=False,
                 project_description=None):
        self.__abort = False

        base_path = os.path.dirname(project_db)
        rel_project_db = os.path.relpath(project_db, base_path)

        if datasets_db is None:
            conn = lib.open_db(project_db)
            if not lib.exists_table(conn, 'project_config'):
                sys.exit(
                    'No datasets database provided and the project_config table in your project database does not exist. Please provide either a datasets database file or an existing project database.'
                )

            SetupProjectDatabase.init(project_db)
            try:
                config = Project_config.get()
                datasets_db = utils.full_path(project_db, config.reference_db)
                if project_name is None:
                    project_name = config.project_name
            except Project_config.DoesNotExist:
                sys.exit('Could not retrieve project configuration data.')

        rel_datasets_db = os.path.relpath(datasets_db, base_path)

        ver_check = SetupDatasetsDatabase.check_version(
            datasets_db, editor_version)
        if ver_check is not None:
            sys.exit(ver_check)

        # Backup original db before beginning
        do_gis = False
        if os.path.exists(project_db):
            do_gis = True
            try:
                self.emit_progress(2, 'Backing up GIS database...')
                filename, file_extension = os.path.splitext(rel_project_db)
                bak_filename = filename + '_bak_' + time.strftime(
                    '%Y%m%d-%H%M%S') + file_extension
                bak_dir = os.path.join(base_path, 'DatabaseBackups')
                if not os.path.exists(bak_dir):
                    os.makedirs(bak_dir)
                backup_db_file = os.path.join(bak_dir, bak_filename)
                copyfile(project_db, backup_db_file)
            except IOError as err:
                sys.exit(err)

        try:
            SetupProjectDatabase.init(project_db, datasets_db)
            self.emit_progress(10, 'Creating database tables...')
            SetupProjectDatabase.create_tables()
            self.emit_progress(50,
                               'Copying data from SWAT+ datasets database...')
            description = project_description if project_description is not None else project_name
            SetupProjectDatabase.initialize_data(
                description, is_lte, overwrite_plants=OVERWRITE_PLANTS)

            config = Project_config.get_or_create_default(
                editor_version=editor_version,
                project_name=project_name,
                project_db=rel_project_db,
                reference_db=rel_datasets_db,
                project_directory='',
                is_lte=is_lte)

            conn = lib.open_db(project_db)
            plant_cols = lib.get_column_names(conn, 'plants_plt')
            plant_col_names = [v['name'] for v in plant_cols]
            if 'days_mat' not in plant_col_names:
                migrator = SqliteMigrator(SqliteDatabase(project_db))
                migrate(
                    migrator.rename_column('plants_plt', 'plnt_hu',
                                           'days_mat'))
                for p in project_plants:
                    dp = dataset_plants.get_or_none(
                        dataset_plants.name == p.name)
                    if dp is not None:
                        p.days_mat = dp.days_mat
                    else:
                        p.days_mat = 0
                    p.save()
        except Exception as ex:
            if backup_db_file is not None:
                self.emit_progress(50,
                                   "Error occurred. Rolling back database...")
                SetupProjectDatabase.rollback(project_db, backup_db_file)
                self.emit_progress(100, "Error occurred.")
            sys.exit(str(ex))

        if do_gis:
            api = GisImport(project_db, True, constant_ps, backup_db_file)
            api.insert_default()
    def post(self):
        parser = reqparse.RequestParser()
        parser.add_argument('project_db',
                            type=str,
                            required=True,
                            location='json')
        parser.add_argument('output_db',
                            type=str,
                            required=True,
                            location='json')
        args = parser.parse_args(strict=False)

        SetupOutputDatabase.init(args.output_db)
        SetupProjectDatabase.init(args.project_db)

        required_tables = [
            'basin_wb_aa', 'basin_nb_aa', 'basin_pw_aa', 'basin_ls_aa',
            'basin_psc_aa', 'basin_aqu_aa', 'aquifer_aa', 'recall_aa',
            'basin_sd_cha_aa', 'channel_sd_aa', 'channel_sdmorph_aa',
            'hru_ls_aa', 'hru_wb_aa', 'hru_pw_aa', 'crop_yld_aa'
        ]

        conn = lib.open_db(args.output_db)
        for table in required_tables:
            if not lib.exists_table(conn, table):
                abort(
                    500,
                    message=
                    'Could not load SWAT+ Check because the table "{}" does not exist in your output database. Re-run your model and check all yearly and average annual files under the print options, and keep the analyze output box checked.'
                    .format(table))

        try:
            has_res = lib.exists_table(conn, 'basin_res_aa')
            has_yr_res = lib.exists_table(conn, 'reservoir_yr')
            has_project_config = lib.exists_table(conn, 'project_config')

            total_area = connect.Rout_unit_con.select(
                fn.Sum(connect.Rout_unit_con.area)).scalar()

            wb = waterbal.Basin_wb_aa.get_or_none()
            aqu = aquifer.Basin_aqu_aa.get_or_none()
            nb = nutbal.Basin_nb_aa.get_or_none()
            pw = plantwx.Basin_pw_aa.get_or_none()
            ls = losses.Basin_ls_aa.get_or_none()
            basin_cha = channel.Basin_sd_cha_aa.get_or_none()
            cha = channel.Channel_sd_aa.select()

            info = get_info(has_project_config)
            hydrology = get_hyd(wb, aqu)
            ncycle = get_ncycle(nb, pw, ls)
            pcycle = get_pcycle(nb, pw, ls)
            pg = get_pg(nb, pw)
            landscape = get_landscape(ls, ncycle, aqu)
            landuse = get_landuse()
            psrc = get_psrc(ls, total_area)
            res = get_res(has_res, has_yr_res)
            instream = get_instream(basin_cha, cha, wb, ls, total_area, psrc)
            sed = get_sed(instream, psrc, ls, wb)

            return {
                'setup': info.toJson(),
                'hydrology': hydrology.toJson(),
                'nitrogenCycle': ncycle.toJson(),
                'phosphorusCycle': pcycle.toJson(),
                'plantGrowth': pg.toJson(),
                'landscapeNutrientLosses': landscape.toJson(),
                'landUseSummary': landuse.toJson(),
                'pointSources': psrc.toJson(),
                'reservoirs': res.toJson(),
                'instreamProcesses': instream.toJson(),
                'sediment': sed.toJson()
            }
        except Exception as ex:
            abort(500,
                  message='Error loading SWAT+ Check. Exception: {ex} {tb}'.
                  format(ex=str(ex), tb=traceback.format_exc()))
Beispiel #7
0
    def get(self, project_db):
        SetupProjectDatabase.init(project_db)

        conn = lib.open_db(project_db)
        if not lib.exists_table(conn, 'chandeg_con'):
            abort(400, message='Project has not been set up.')

        try:
            m = Project_config.get()

            gis_type = 'QSWAT+ ' if m.gis_type == 'qgis' else 'GIS '
            gis_text = '' if m.gis_version is None else gis_type + m.gis_version

            landuse_distrib = []
            if m.gis_version is not None:
                landuse_distrib = gis.Gis_hrus.select(
                    fn.Lower(gis.Gis_hrus.landuse).alias('name'),
                    fn.Sum(gis.Gis_hrus.arslp).alias('y')).group_by(
                        gis.Gis_hrus.landuse)

            current_path = os.path.dirname(project_db)
            scenarios_path = os.path.join(current_path, 'Scenarios')
            scenarios = []
            if os.path.isdir(scenarios_path):
                for p in os.listdir(scenarios_path):
                    if os.path.isdir(os.path.join(
                            scenarios_path,
                            p)) and p != 'Default' and p != 'default':
                        db_files = [
                            f for f in os.listdir(
                                os.path.join(scenarios_path, p))
                            if f.endswith('.sqlite')
                        ]
                        if len(db_files) > 0:
                            scenarios.append({
                                'name':
                                p,
                                'path':
                                os.path.join(scenarios_path, p, db_files[0])
                            })

            oc = Object_cnt.get_or_none()

            info = {
                'name':
                m.project_name,
                'description':
                oc.name,
                'file_path':
                current_path,
                'last_modified':
                utils.json_encode_datetime(
                    datetime.fromtimestamp(os.path.getmtime(project_db))),
                'is_lte':
                m.is_lte,
                'status': {
                    'imported_weather':
                    climate.Weather_sta_cli.select().count() > 0
                    and climate.Weather_wgn_cli.select().count() > 0,
                    'wrote_inputs':
                    m.input_files_last_written is not None,
                    'ran_swat':
                    m.swat_last_run is not None,
                    'imported_output':
                    m.output_last_imported is not None,
                    'using_gis':
                    m.gis_version is not None
                },
                'simulation':
                model_to_dict(simulation.Time_sim.get_or_none()),
                'total_area':
                connect.Rout_unit_con.
                select(fn.Sum(connect.Rout_unit_con.area)).scalar(
                ),  #gis.Gis_subbasins.select(fn.Sum(gis.Gis_subbasins.area)).scalar(),
                'totals': {
                    'hru': connect.Hru_con.select().count(),
                    'lhru': connect.Hru_lte_con.select().count(),
                    'rtu': connect.Rout_unit_con.select().count(),
                    'mfl': connect.Modflow_con.select().count(),
                    'aqu': connect.Aquifer_con.select().count(),
                    'cha': connect.Channel_con.select().count(),
                    'res': connect.Reservoir_con.select().count(),
                    'rec': connect.Recall_con.select().count(),
                    'exco': connect.Exco_con.select().count(),
                    'dlr': connect.Delratio_con.select().count(),
                    'out': connect.Outlet_con.select().count(),
                    'lcha': connect.Chandeg_con.select().count(),
                    'aqu2d': connect.Aquifer2d_con.select().count(),
                    'lsus': regions.Ls_unit_def.select().count(),
                    'subs': gis.Gis_subbasins.select().count()
                },
                'editor_version':
                m.editor_version,
                'gis_version':
                gis_text,
                'charts': {
                    'landuse': [{
                        'name': o.name,
                        'y': o.y
                    } for o in landuse_distrib]
                },
                'scenarios':
                scenarios
            }

            return info
        except Project_config.DoesNotExist:
            abort(404,
                  message="Could not retrieve project configuration data.")
Beispiel #8
0
	def updates_for_1_1_0(self, project_db, datasets_db, rollback_db):
		try:
			conn = lib.open_db(project_db)
			aquifer_cols = lib.get_column_names(conn, 'aquifer_aqu')
			aquifer_col_names = [v['name'] for v in aquifer_cols]
			if 'gw_dp' not in aquifer_col_names:
				sys.exit('It appears some of your tables may have already been migrated even though your project version is still listed at 1.0.0. Please check your tables, restart the upgrade using the backup database in the DatabaseBackups folder, or contact support.')

			self.emit_progress(10, 'Running migrations...')
			base.db.create_tables([aquifer.Initial_aqu]) 
			migrator = SqliteMigrator(SqliteDatabase(project_db))
			migrate(
				migrator.rename_column('aquifer_aqu', 'gw_dp', 'dep_bot'),
				migrator.rename_column('aquifer_aqu', 'gw_ht', 'dep_wt'),
				migrator.drop_column('aquifer_aqu', 'delay'),
				migrator.add_column('aquifer_aqu', 'bf_max', DoubleField(default=1)),
				migrator.add_column('aquifer_aqu', 'init_id', ForeignKeyField(aquifer.Initial_aqu, aquifer.Initial_aqu.id, on_delete='SET NULL', null=True)),
				
				migrator.drop_column('codes_bsn', 'atmo_dep'),
				migrator.add_column('codes_bsn', 'atmo_dep', CharField(default='a')),

				migrator.drop_column('cal_parms_cal', 'units'),
				migrator.add_column('cal_parms_cal', 'units', CharField(null=True)),
				migrator.rename_table('codes_cal', 'codes_sft'),
				migrator.rename_column('codes_sft', 'landscape', 'hyd_hru'),
				migrator.rename_column('codes_sft', 'hyd', 'hyd_hrulte'),
				migrator.rename_table('ls_parms_cal', 'wb_parms_sft'),
				migrator.rename_table('ch_parms_cal', 'ch_sed_parms_sft'),
				migrator.rename_table('pl_parms_cal', 'plant_parms_sft'),
				
				migrator.drop_column('channel_cha', 'pest_id'),
				migrator.drop_column('channel_cha', 'ls_link_id'),
				migrator.drop_column('channel_cha', 'aqu_link_id'),
				migrator.drop_column('initial_cha', 'vol'),
				migrator.drop_column('initial_cha', 'sed'),
				migrator.drop_column('initial_cha', 'ptl_n'),
				migrator.drop_column('initial_cha', 'no3_n'),
				migrator.drop_column('initial_cha', 'no2_n'),
				migrator.drop_column('initial_cha', 'nh4_n'),
				migrator.drop_column('initial_cha', 'ptl_p'),
				migrator.drop_column('initial_cha', 'sol_p'),
				migrator.drop_column('initial_cha', 'secchi'),
				migrator.drop_column('initial_cha', 'sand'),
				migrator.drop_column('initial_cha', 'silt'),
				migrator.drop_column('initial_cha', 'clay'),
				migrator.drop_column('initial_cha', 'sm_agg'),
				migrator.drop_column('initial_cha', 'lg_agg'),
				migrator.drop_column('initial_cha', 'gravel'),
				migrator.drop_column('initial_cha', 'chla'),
				migrator.drop_column('initial_cha', 'sol_pest'),
				migrator.drop_column('initial_cha', 'srb_pest'),
				migrator.drop_column('initial_cha', 'lp_bact'),
				migrator.drop_column('initial_cha', 'p_bact'),
				migrator.add_column('initial_cha', 'org_min_id', ForeignKeyField(init.Om_water_ini, init.Om_water_ini.id, on_delete='SET NULL', null=True)),
				migrator.add_column('initial_cha', 'pest_id', ForeignKeyField(init.Pest_water_ini, init.Pest_water_ini.id, on_delete='SET NULL', null=True)),
				migrator.add_column('initial_cha', 'path_id', ForeignKeyField(init.Path_water_ini, init.Path_water_ini.id, on_delete='SET NULL', null=True)),
				migrator.add_column('initial_cha', 'hmet_id', ForeignKeyField(init.Hmet_water_ini, init.Hmet_water_ini.id, on_delete='SET NULL', null=True)),
				migrator.add_column('initial_cha', 'salt_id', ForeignKeyField(init.Salt_water_ini, init.Salt_water_ini.id, on_delete='SET NULL', null=True)),

				migrator.add_column('d_table_dtl', 'file_name', CharField(null=True)),
				migrator.add_column('d_table_dtl_act', 'const2', DoubleField(default=0)),
				migrator.rename_column('d_table_dtl_act', 'application', 'fp'),
				migrator.rename_column('d_table_dtl_act', 'type', 'option'),

				migrator.drop_column('exco_om_exc', 'sol_pest'),
				migrator.drop_column('exco_om_exc', 'srb_pest'),
				migrator.drop_column('exco_om_exc', 'p_bact'),
				migrator.drop_column('exco_om_exc', 'lp_bact'),
				migrator.drop_column('exco_om_exc', 'metl1'),
				migrator.drop_column('exco_om_exc', 'metl2'),
				migrator.drop_column('exco_om_exc', 'metl3'),
				migrator.rename_column('exco_om_exc', 'ptl_n', 'orgn'),
				migrator.rename_column('exco_om_exc', 'ptl_p', 'sedp'),
				migrator.rename_column('exco_om_exc', 'no3_n', 'no3'),
				migrator.rename_column('exco_om_exc', 'sol_p', 'solp'),
				migrator.rename_column('exco_om_exc', 'nh3_n', 'nh3'),
				migrator.rename_column('exco_om_exc', 'no2_n', 'no2'),
				migrator.rename_column('exco_om_exc', 'bod', 'cbod'),
				migrator.rename_column('exco_om_exc', 'oxy', 'dox'),
				migrator.rename_column('exco_om_exc', 'sm_agg', 'sag'),
				migrator.rename_column('exco_om_exc', 'lg_agg', 'lag'),
				migrator.drop_column('exco_pest_exc', 'aatrex_sol'),
				migrator.drop_column('exco_pest_exc', 'aatrex_sor'),
				migrator.drop_column('exco_pest_exc', 'banvel_sol'),
				migrator.drop_column('exco_pest_exc', 'banvel_sor'),
				migrator.drop_column('exco_pest_exc', 'prowl_sol'),
				migrator.drop_column('exco_pest_exc', 'prowl_sor'),
				migrator.drop_column('exco_pest_exc', 'roundup_sol'),
				migrator.drop_column('exco_pest_exc', 'roundup_sor'),
				migrator.drop_column('exco_path_exc', 'fecals_sol'),
				migrator.drop_column('exco_path_exc', 'fecals_sor'),
				migrator.drop_column('exco_path_exc', 'e_coli_sol'),
				migrator.drop_column('exco_path_exc', 'e_coli_sor'),
				migrator.drop_column('exco_hmet_exc', 'mercury_sol'),
				migrator.drop_column('exco_hmet_exc', 'mercury_sor'),
				migrator.drop_column('exco_salt_exc', 'sodium_sol'),
				migrator.drop_column('exco_salt_exc', 'sodium_sor'),
				migrator.drop_column('exco_salt_exc', 'magnesium_sol'),
				migrator.drop_column('exco_salt_exc', 'magnesium_sor'),

				migrator.drop_column('fertilizer_frt', 'p_bact'),
				migrator.drop_column('fertilizer_frt', 'lp_bact'),
				migrator.drop_column('fertilizer_frt', 'sol_bact'),
				migrator.add_column('fertilizer_frt', 'pathogens', CharField(null=True)),

				migrator.drop_column('hru_data_hru', 'soil_nut_id'),
				migrator.add_column('hru_data_hru', 'soil_plant_init_id', ForeignKeyField(init.Soil_plant_ini, init.Soil_plant_ini.id, null=True, on_delete='SET NULL')),

				migrator.drop_column('hydrology_hyd', 'dp_imp'),

				migrator.rename_table('pest_soil_ini', 'pest_hru_ini'),
				migrator.rename_table('pest_soil_ini_item', 'pest_hru_ini_item'),
				migrator.rename_table('path_soil_ini', 'path_hru_ini'),
				migrator.rename_table('hmet_soil_ini', 'hmet_hru_ini'),
				migrator.rename_table('salt_soil_ini', 'salt_hru_ini'),

				migrator.add_column('plant_ini', 'rot_yr_ini', IntegerField(default=1)),
				migrator.rename_column('plants_plt', 'plnt_hu', 'days_mat'),

				migrator.drop_column('recall_dat', 'sol_pest'),
				migrator.drop_column('recall_dat', 'srb_pest'),
				migrator.drop_column('recall_dat', 'p_bact'),
				migrator.drop_column('recall_dat', 'lp_bact'),
				migrator.drop_column('recall_dat', 'metl1'),
				migrator.drop_column('recall_dat', 'metl2'),
				migrator.drop_column('recall_dat', 'metl3'),

				migrator.drop_column('reservoir_res', 'pest_id'),
				migrator.drop_column('wetland_wet', 'pest_id'),
				migrator.drop_column('initial_res', 'vol'),
				migrator.drop_column('initial_res', 'sed'),
				migrator.drop_column('initial_res', 'ptl_n'),
				migrator.drop_column('initial_res', 'no3_n'),
				migrator.drop_column('initial_res', 'no2_n'),
				migrator.drop_column('initial_res', 'nh3_n'),
				migrator.drop_column('initial_res', 'ptl_p'),
				migrator.drop_column('initial_res', 'sol_p'),
				migrator.drop_column('initial_res', 'secchi'),
				migrator.drop_column('initial_res', 'sand'),
				migrator.drop_column('initial_res', 'silt'),
				migrator.drop_column('initial_res', 'clay'),
				migrator.drop_column('initial_res', 'sm_agg'),
				migrator.drop_column('initial_res', 'lg_agg'),
				migrator.drop_column('initial_res', 'gravel'),
				migrator.drop_column('initial_res', 'chla'),
				migrator.drop_column('initial_res', 'sol_pest'),
				migrator.drop_column('initial_res', 'srb_pest'),
				migrator.drop_column('initial_res', 'lp_bact'),
				migrator.drop_column('initial_res', 'p_bact'),
				migrator.add_column('initial_res', 'org_min_id', ForeignKeyField(init.Om_water_ini, init.Om_water_ini.id, on_delete='SET NULL', null=True)),
				migrator.add_column('initial_res', 'pest_id', ForeignKeyField(init.Pest_water_ini, init.Pest_water_ini.id, on_delete='SET NULL', null=True)),
				migrator.add_column('initial_res', 'path_id', ForeignKeyField(init.Path_water_ini, init.Path_water_ini.id, on_delete='SET NULL', null=True)),
				migrator.add_column('initial_res', 'hmet_id', ForeignKeyField(init.Hmet_water_ini, init.Hmet_water_ini.id, on_delete='SET NULL', null=True)),
				migrator.add_column('initial_res', 'salt_id', ForeignKeyField(init.Salt_water_ini, init.Salt_water_ini.id, on_delete='SET NULL', null=True)),
				migrator.add_column('sediment_res', 'carbon', DoubleField(default=0)),
				migrator.add_column('sediment_res', 'bd', DoubleField(default=0)),

				migrator.drop_column('rout_unit_ele', 'hyd_typ'),
				migrator.rename_column('rout_unit_ele', 'rtu_id', 'old_rtu_id'),
				migrator.drop_index('rout_unit_ele', 'rout_unit_ele_rtu_id'),
				migrator.add_column('rout_unit_ele', 'rtu_id', ForeignKeyField(connect.Rout_unit_con, connect.Rout_unit_con.id, on_delete='SET NULL', null=True)),

				migrator.drop_not_null('soils_sol', 'texture'),
			)

			self.emit_progress(30, 'Updating rout_unit_ele foreign keys...')
			# Move foreign key from rout_unit_rtu to rout_unit_con
			lib.execute_non_query(base.db.database, 'UPDATE rout_unit_ele SET rtu_id = old_rtu_id')
			migrate(
				migrator.drop_column('rout_unit_ele', 'old_rtu_id')
			)

			self.emit_progress(35, 'Drop and re-creating recall, change, init, lte, constituents, dr, irr ops, and exco tables...')
			# Drop and re-create recall tables since they had no data and had significant structure changes
			base.db.drop_tables([recall.Recall_rec, recall.Recall_dat])
			base.db.create_tables([recall.Recall_rec, recall.Recall_dat])

			# Drop and re-create calibration tables
			base.db.drop_tables([change.Calibration_cal]) 
			base.db.create_tables([change.Calibration_cal, change.Calibration_cal_cond, change.Calibration_cal_elem, change.Water_balance_sft, change.Water_balance_sft_item, change.Plant_gro_sft, change.Plant_gro_sft_item, change.Ch_sed_budget_sft, change.Ch_sed_budget_sft_item])

			# Drop and re-create irrigation ops table
			base.db.drop_tables([ops.Irr_ops])
			base.db.create_tables([ops.Irr_ops])
			lib.copy_table('irr_ops', datasets_db, project_db)

			# Drop and re-create init tables since they had no data and had significant structure changes
			base.db.drop_tables([init.Pest_hru_ini, init.Pest_hru_ini_item, init.Pest_water_ini, init.Path_hru_ini, init.Path_water_ini, init.Hmet_hru_ini, init.Hmet_water_ini, init.Salt_hru_ini, init.Salt_water_ini])
			base.db.create_tables([init.Om_water_ini, init.Pest_hru_ini, init.Pest_hru_ini_item, init.Pest_water_ini, init.Path_hru_ini, init.Path_water_ini, init.Hmet_hru_ini, init.Hmet_water_ini, init.Salt_hru_ini, init.Salt_water_ini, init.Soil_plant_ini])
			
			lib.bulk_insert(base.db, init.Om_water_ini, init.Om_water_ini.get_default_data())
			channel.Initial_cha.update({channel.Initial_cha.org_min: 1}).execute()
			reservoir.Initial_res.update({reservoir.Initial_res.org_min: 1}).execute()

			self.emit_progress(40, 'Updating channels tables...')
			base.db.drop_tables([channel.Channel_lte_cha])
			base.db.create_tables([channel.Hyd_sed_lte_cha, channel.Channel_lte_cha])
			hydrology_chas = []
			for hc in channel.Hydrology_cha.select():
				hyd_cha = {
					'id': hc.id,
					'name': hc.name,
					'order': 'first',
					'wd': hc.wd,
					'dp': hc.dp,
					'slp': hc.slp,
					'len': hc.len,
					'mann': hc.mann,
					'k': hc.k,
					'erod_fact': 0.01,
					'cov_fact': 0.005,
					'hc_cov': 0,
					'eq_slp': 0.001,
					'd50': 12,
					'clay': 50,
					'carbon': 0.04,
					'dry_bd': 1,
					'side_slp': 0.5,
					'bed_load': 0.5,
					't_conc': 10,
					'shear_bnk': 0.75,
					'hc_erod': 0.1,
					'hc_ht': 0.3,
					'hc_len': 0.3
				}
				hydrology_chas.append(hyd_cha)
			lib.bulk_insert(base.db, channel.Hyd_sed_lte_cha, hydrology_chas)

			channel_chas = []
			for cha in channel.Channel_cha.select():
				chan_cha = {
					'id': cha.id,
					'name': cha.name,
					'hyd': cha.hyd_id,
					'init': cha.init_id,
					'nut': cha.nut_id
				}
				channel_chas.append(chan_cha)
			lib.bulk_insert(base.db, channel.Channel_lte_cha, channel_chas)

			channel_cons = []
			channel_con_outs = []
			for cc in connect.Channel_con.select():
				chan_con = {
					'lcha': cc.cha_id,
					'id': cc.id,
					'name': cc.name,
					'gis_id': cc.gis_id,
					'lat': cc.lat,
					'lon': cc.lon,
					'elev': cc.elev,
					'wst': cc.wst_id,
					'area': cc.area,
					'ovfl': cc.ovfl,
					'rule': cc.rule
				}
				channel_cons.append(chan_con)

				for co in cc.con_outs:
					cha_out = {
						'id': co.id,
						'chandeg_con_id': co.channel_con.id,
						'order': co.order,
						'obj_typ': co.obj_typ,
						'obj_id': co.obj_id,
						'hyd_typ': co.hyd_typ,
						'frac': co.frac
					}
					channel_con_outs.append(cha_out)
			lib.bulk_insert(base.db, connect.Chandeg_con, channel_cons)
			lib.bulk_insert(base.db, connect.Chandeg_con_out, channel_con_outs)

			# Update from cha to sdc
			connect.Chandeg_con_out.update(obj_typ='sdc').where(connect.Chandeg_con_out.obj_typ=='cha').execute()
			connect.Hru_con_out.update(obj_typ='sdc').where(connect.Hru_con_out.obj_typ=='cha').execute()
			connect.Rout_unit_con_out.update(obj_typ='sdc').where(connect.Rout_unit_con_out.obj_typ=='cha').execute()
			connect.Aquifer_con_out.update(obj_typ='sdc').where(connect.Aquifer_con_out.obj_typ=='cha').execute()
			connect.Reservoir_con_out.update(obj_typ='sdc').where(connect.Reservoir_con_out.obj_typ=='cha').execute()
			connect.Recall_con_out.update(obj_typ='sdc').where(connect.Recall_con_out.obj_typ=='cha').execute()
			connect.Exco_con_out.update(obj_typ='sdc').where(connect.Exco_con_out.obj_typ=='cha').execute()
			connect.Delratio_con_out.update(obj_typ='sdc').where(connect.Delratio_con_out.obj_typ=='cha').execute()

			connect.Channel_con.delete().execute()
			connect.Channel_con_out.delete().execute()
			channel.Channel_cha.delete().execute()
			channel.Hydrology_cha.delete().execute()
			channel.Sediment_cha.delete().execute()
			
			# Drop and re-create all dr tables since not used previously
			base.db.drop_tables([dr.Dr_om_del, dr.Dr_pest_del, dr.Dr_path_del, dr.Dr_hmet_del, dr.Dr_salt_del, dr.Delratio_del])
			base.db.create_tables([dr.Dr_om_del, 
									dr.Dr_pest_del, dr.Dr_pest_col, dr.Dr_pest_val,
									dr.Dr_path_del, dr.Dr_path_col, dr.Dr_path_val,
									dr.Dr_hmet_del, dr.Dr_hmet_col, dr.Dr_hmet_val, 
									dr.Dr_salt_del, dr.Dr_salt_col, dr.Dr_salt_val, 
									dr.Delratio_del])

			# Drop and re-create exco tables since not used previously
			base.db.create_tables([exco.Exco_pest_col, exco.Exco_pest_val,
									exco.Exco_path_col, exco.Exco_path_val,
									exco.Exco_hmet_col, exco.Exco_hmet_val, 
									exco.Exco_salt_col, exco.Exco_salt_val])
			
			# Drop and re-create constituents.cs
			base.db.drop_tables([simulation.Constituents_cs])
			base.db.create_tables([simulation.Constituents_cs])

			# LTE tables
			base.db.drop_tables([hru.Hru_lte_hru])
			base.db.create_tables([hru.Hru_lte_hru, soils.Soils_lte_sol])

			self.emit_progress(50, 'Update aquifer, calibration parameters, fertilizer, and pesticides data...')
			aquifer.Initial_aqu.insert(name='initaqu1', org_min=1).execute()
			aquifer.Aquifer_aqu.update({aquifer.Aquifer_aqu.dep_bot: 10, aquifer.Aquifer_aqu.dep_wt: 5, aquifer.Aquifer_aqu.spec_yld: 0.05, aquifer.Aquifer_aqu.init: 1}).execute()

			lib.copy_table('cal_parms_cal', datasets_db, project_db)

			# Drop and re-create pesticide_pst
			base.db.drop_tables([hru_parm_db.Pesticide_pst])
			base.db.create_tables([hru_parm_db.Pesticide_pst])
			lib.copy_table('pesticide_pst', datasets_db, project_db)

			sp = init.Soil_plant_ini.create(
				name='soilplant1',
				sw_frac=0,
				nutrients=1
			)

			hru.Hru_data_hru.update({hru.Hru_data_hru.soil_plant_init: sp.id}).execute()

			hru_parm_db.Fertilizer_frt.delete().execute()
			lib.copy_table('fertilizer_frt', datasets_db, project_db, include_id=True)

			self.emit_progress(60, 'Update decision tables...')
			res_rels = {}
			for r in reservoir.Reservoir_res.select():
				res_rels[r.id] = r.rel.name

			decision_table.D_table_dtl.delete().execute()
			decision_table.D_table_dtl_cond.delete().execute()
			decision_table.D_table_dtl_cond_alt.delete().execute()
			decision_table.D_table_dtl_act.delete().execute()
			decision_table.D_table_dtl_act_out.delete().execute()
			lib.copy_table('d_table_dtl', datasets_db, project_db, include_id=True)
			lib.copy_table('d_table_dtl_cond', datasets_db, project_db, include_id=True)
			lib.copy_table('d_table_dtl_cond_alt', datasets_db, project_db, include_id=True)
			lib.copy_table('d_table_dtl_act', datasets_db, project_db, include_id=True)
			lib.copy_table('d_table_dtl_act_out', datasets_db, project_db, include_id=True)

			for r in reservoir.Reservoir_res.select():
				try:
					d_tbl_name = res_rels.get(r.id, None)
					if d_tbl_name is not None:
						r.rel_id = decision_table.D_table_dtl.get(decision_table.D_table_dtl.name == d_tbl_name).id
						r.save()
				except decision_table.D_table_dtl.DoesNotExist:
					pass

			self.emit_progress(70, 'Update management schedules...')
			lum.Management_sch.delete().execute()
			lum.Management_sch_auto.delete().execute()
			lum.Management_sch_op.delete().execute()
			lum.Landuse_lum.update(cal_group=None,mgt=None).execute()
			for lu in lum.Landuse_lum.select():
				plant_name = lu.name.replace('_lum', '')

				plant = hru_parm_db.Plants_plt.get_or_none(hru_parm_db.Plants_plt.name == plant_name)
				if plant is not None:
					new_d_table_id = None
					if plant.plnt_typ == 'warm_annual':
						new_d_table_id = GisImport.insert_decision_table(plant.name, 'pl_hv_corn')
					elif plant.plnt_typ == 'cold_annual':
						new_d_table_id = GisImport.insert_decision_table(plant.name, 'pl_hv_wwht') 

					if new_d_table_id is not None:
						mgt_name = '{plant}_rot'.format(plant=plant.name)
						
						mgt_id = lum.Management_sch.insert(
							name = mgt_name
						).execute()
						lum.Management_sch_auto.insert(
							management_sch=mgt_id,
							d_table=new_d_table_id
						).execute()

						lu.mgt = mgt_id
						lu.save()

			self.emit_progress(80, 'Update file_cio and print tables...')
			simulation.Print_prt_object.create(name='pest', daily=False, monthly=False, yearly=False, avann=False, print_prt_id=1)

			File_cio.delete().execute()
			file_cios = []
			for f in dataset_file_cio.select():
				file_cio = {
					'classification': f.classification.id,
					'order_in_class': f.order_in_class,
					'file_name': f.default_file_name
				}
				file_cios.append(file_cio)

			lib.bulk_insert(base.db, File_cio, file_cios)

			self.emit_progress(90, 'Update plants table to use days_mat column...')
			for p in hru_parm_db.Plants_plt:
				dp = dataset_plants.get_or_none(dataset_plants.name == p.name)
				if dp is not None:
					p.days_mat = dp.days_mat
				else:
					p.days_mat = 0
				p.save()
		except Exception as ex:
			if rollback_db is not None:
				self.emit_progress(50, "Error occurred. Rolling back database...")
				SetupProjectDatabase.rollback(project_db, rollback_db)
				self.emit_progress(100, "Error occurred.")
			sys.exit(str(ex))