Пример #1
0
def upgrade():
    logger.info("Create temporary departments table…")
    op.execute(f"""
        CREATE TABLE {schema}.{temp_table_name} (
            gid integer NOT NULL,
            id character varying(24),
            nom_dep character varying(30),
            nom_dep_m character varying(30),
            insee_dep character varying(3),
            insee_reg character varying(2),
            chf_dep character varying(5),
            geom public.geometry(MultiPolygon,2154),
            geojson character varying
        )
    """)
    op.execute(f"""
        ALTER TABLE ONLY {schema}.{temp_table_name}
            ADD CONSTRAINT {temp_table_name}_pkey PRIMARY KEY (gid)
    """)
    cursor = op.get_bind().connection.cursor()
    with open_remote_file(base_url, filename) as geofile:
        logger.info("Inserting departments data in temporary table…")
        cursor.copy_expert(f'COPY {schema}.{temp_table_name} FROM STDIN',
                           geofile)
    logger.info("Copy departments data in l_areas…")
    op.execute(f"""
        INSERT INTO {schema}.l_areas (id_type, area_code, area_name, geom, geojson_4326)
        SELECT {schema}.get_id_area_type('DEP') AS id_type, insee_dep, nom_dep, geom, geojson
        FROM {schema}.{temp_table_name}
    """)
    logger.info("Re-indexing…")
    op.execute(f'REINDEX INDEX {schema}.index_l_areas_geom')
    logger.info("Dropping temporary departments table…")
    op.execute(f'DROP TABLE {schema}.{temp_table_name}')
Пример #2
0
def upgrade():
    try:
        local_srid = context.get_x_argument(as_dictionary=True)['local-srid']
    except KeyError:
        raise Exception("Missing local srid, please use -x local-srid=...")
    with TemporaryDirectory() as temp_dir:
        with open_remote_file(base_url, archive_name,
                              open_fct=ZipFile) as archive:
            archive.extract(file_name, path=temp_dir)
        path = os.path.join(temp_dir, file_name)
        cmd = f"raster2pgsql -s {local_srid} -c -C -I -M -d -t 5x5 {path} ref_geo.dem | psql"
        db_uri = urlsplit(config['SQLALCHEMY_DATABASE_URI'])
        env = {
            'PGHOST': db_uri.hostname,
            'PGPORT': str(db_uri.port),
            'PGUSER': db_uri.username,
            'PGPASSWORD': db_uri.password,
            'PGDATABASE': db_uri.path.lstrip('/'),
        }
        subprocess.run(cmd,
                       stdout=subprocess.DEVNULL,
                       shell=True,
                       check=True,
                       env=env)
    logger.info("Refresh DEM spatial index…")
    op.execute("REINDEX INDEX ref_geo.dem_st_convexhull_idx")
Пример #3
0
def upgrade():
    create_temporary_grids_table(schema, temp_table_name)
    cursor = op.get_bind().connection.cursor()
    with open_remote_file(base_url, filename) as geofile:
        logger.info("Inserting grids data in temporary table…")
        cursor.copy_expert(f'COPY {schema}.{temp_table_name} FROM STDIN',
                           geofile)
    insert_grids_and_drop_temporary_table(schema, temp_table_name, area_type)
Пример #4
0
def upgrade():
    logger.info("Create temporary municipalities table…")
    op.execute(f"""
        CREATE TABLE {schema}.{temp_table_name} (
            gid integer NOT NULL,
            id character varying(24),
            nom_com character varying(50),
            nom_com_m character varying(50),
            insee_com character varying(5),
            statut character varying(24),
            insee_can character varying(2),
            insee_arr character varying(2),
            insee_dep character varying(3),
            insee_reg character varying(2),
            code_epci character varying(21),
            population bigint,
            type character varying(3),
            geom public.geometry(MultiPolygon,2154),
            geojson character varying
        )
    """)
    op.execute(f"""
        ALTER TABLE ONLY {schema}.{temp_table_name}
            ADD CONSTRAINT {temp_table_name}_pkey PRIMARY KEY (gid)
    """)
    cursor = op.get_bind().connection.cursor()
    with open_remote_file(base_url, filename) as geofile:
        logger.info("Inserting municipalities data in temporary table…")
        cursor.copy_expert(f'COPY {schema}.{temp_table_name} FROM STDIN', geofile)
    logger.info("Copy municipalities in l_areas…")
    op.execute(f"""
        INSERT INTO {schema}.l_areas (id_type, area_code, area_name, geom, geojson_4326)
        SELECT {schema}.get_id_area_type('COM') AS id_type, insee_com, nom_com, geom, geojson
        FROM {schema}.{temp_table_name}
    """)
    logger.info("Copy municipalities in li_municipalities…")
    op.execute(f"""
        INSERT INTO ref_geo.li_municipalities
        (id_municipality, id_area, status, insee_com, nom_com, insee_arr, insee_dep, insee_reg, code_epci)
        SELECT id,  a.id_area, statut, insee_com, nom_com, insee_arr, insee_dep, insee_reg, code_epci
        FROM ref_geo.temp_fr_municipalities t
        JOIN ref_geo.l_areas a ON a.area_code = t.insee_com
    """)
    logger.info("Re-indexing…")
    op.execute(f'REINDEX INDEX {schema}.index_l_areas_geom')
    logger.info("Dropping temporary municipalities table…")
    op.execute(f'DROP TABLE {schema}.{temp_table_name}')
def upgrade():
    cursor = op.get_bind().connection.cursor()
    with open_remote_file(base_url, 'HABREF_50.zip',
                          open_fct=ZipFile) as archive:
        for table, filename in table_files.items():
            with archive.open(filename) as f:
                logger.info(f"Insert INPN data in {table}…")
                cursor.copy_expert(
                    f"""
                COPY ref_habitats.{table} FROM STDIN WITH CSV HEADER DELIMITER E';'
                """, f)

    logger.info("Populate table autocomplete_habitat…")
    op.execute("""
    INSERT INTO ref_habitats.autocomplete_habitat
    SELECT
    cd_hab,
    h.cd_typo,
    lb_code,
    lb_nom_typo,
    concat(lb_code, ' - ', lb_hab_fr, ' ', lb_hab_fr_complet)
    FROM ref_habitats.habref h
    JOIN ref_habitats.typoref t ON t.cd_typo = h.cd_typo
    """)
Пример #6
0
def upgrade():
    active = context.get_x_argument(as_dictionary=True).get('active')
    if active is not None:
        active = bool(strtobool(active))
    else:
        active = True
    conn = op.get_bind()
    metadata = sa.MetaData(bind=conn)
    sensitivity_rule = sa.Table('t_sensitivity_rules',
                                metadata,
                                schema='gn_sensitivity',
                                autoload_with=conn)
    cor_sensitivity_criteria = sa.Table('cor_sensitivity_criteria',
                                        metadata,
                                        schema='gn_sensitivity',
                                        autoload_with=conn)
    nomenclature = sa.Table('t_nomenclatures',
                            metadata,
                            schema='ref_nomenclatures',
                            autoload_with=conn)
    nomenclature_type = sa.Table('bib_nomenclatures_types',
                                 metadata,
                                 schema='ref_nomenclatures',
                                 autoload_with=conn)
    statut_biologique_nomenclature_type_id = conn.execute(
        func.ref_nomenclatures.get_id_nomenclature_type(
            'STATUT_BIO')).scalar()
    statut_biologique_nomenclatures = list(chain.from_iterable(conn.execute(
            sa.select([nomenclature.c.cd_nomenclature]) \
            .select_from(
                nomenclature.join(nomenclature_type, nomenclature.c.id_type==nomenclature_type.c.id_type)
            ) \
            .where(nomenclature_type.c.mnemonique=='STATUT_BIO'))))
    rules = []
    criterias = []
    with open_remote_file(base_url, filename, open_fct=lzmaopen) as csvfile:
        reader = csv.DictReader(csvfile, delimiter=',')
        for row in reader:
            assert (row['perimetre'] == 'France métropolitaine')
            id_nomenc = get_id_from_cd(row['codage'])
            if row['duree']:
                duration = int(row['duree'])
            else:
                duration = 10000
            rule = {
                'cd_nom': int(row['cd_nom']),
                'nom_cite': row['nom_cite'],
                'id_nomenclature_sensitivity': id_nomenc,
                'sensitivity_duration': duration,
                'sensitivity_territory': row['perimetre'],
                'id_territory': row['cd_sig'],
                'source': f'{source}',
                'comments': row['autre'],
                'date_min': row['date_min'] if row['date_min'] else None,
                'date_max': row['date_max'] if row['date_max'] else None,
                'active': active,
            }
            if row['cd_occ_statut_biologique']:
                if row['cd_occ_statut_biologique'] in statut_biologique_nomenclatures:
                    criterias.append(
                        (len(rules), row['cd_occ_statut_biologique']))
                    rules.append(rule)
                else:
                    # We ignore this rule with outdated nomenclature
                    logger.warn(
                        "Ignore rule {} with unknown nomenclature {}".format(
                            row['cd_sens'], row['cd_occ_statut_biologique']))
            else:
                rules.append(rule)
    results = conn.execute(sensitivity_rule \
                                .insert() \
                                .values(rules) \
                                .returning(sensitivity_rule.c.id_sensitivity))
    rules_indexes = [rule_index for rule_index, in results]
    conn.execute(cor_sensitivity_criteria.insert().values([{
        'id_sensitivity':
        rules_indexes[rule_idx],
        'id_criteria':
        func.ref_nomenclatures.get_id_nomenclature('STATUT_BIO', occ),
        'id_type_nomenclature':
        statut_biologique_nomenclature_type_id,
    } for rule_idx, occ in criterias]))

    op.execute(
        "REFRESH MATERIALIZED VIEW gn_sensitivity.t_sensitivity_rules_cd_ref")