def calc_landuse_prop(pc6_id, center_geom_txt, landuse_classes, buffer):
    cur_1, conn_1 = uf.connect()
    cur_1.execute("""select id, bg2015, st_area(ST_Intersection(geom, st_buffer(ST_GeomFromText(%s, 28992), %s))), st_area(st_buffer(ST_GeomFromText(%s, 28992), %s)) 
    from bbg2015 where bg2015 in %s and ST_Intersects(geom, st_buffer(ST_GeomFromText(%s, 28992), %s));""", ([center_geom_txt, buffer, center_geom_txt, buffer, landuse_classes, center_geom_txt, buffer]))

    result_set_1 = cur_1.fetchall()
    if  cur_1.rowcount==0:
        #print ('postcode = ', pc6_id), ' is not overlapped with any land use classes', landuse_classes
        return 0 
    
    landuse_area_sum  = 0.0
    for record in result_set_1:
        landuse_id  = record[0]
        landuse_type  = record[1]
        landuse_area = record[2]
        buffer_area = record[3]
        # print "landuse_id = ", landuse_id, " ; landuse_type = ", landuse_type, " landuse_area = ", landuse_area, " ; buffer_area = ", buffer_area, " ; prop = ", landuse_area / buffer_area
        landuse_area_sum = landuse_area_sum + landuse_area


    cur_1.close()
    conn_1.close()  
    
    landuse_prop = landuse_area_sum / buffer_area     
                      
    return  landuse_prop
Beispiel #2
0
def calc_bldg_area(id, geom_txt, buffer):
    cur1, conn1 = uf.connect()
    cur1.execute(
        """
    SELECT sum(st_area(ST_MakePolygon(ways.linestring)::geography)) AS res_bldg_area
    FROM ways
    WHERE ST_Intersects(ways.linestring, ST_Transform(ST_Buffer(ST_GeomFromText(%s, 28992), %s), 4326))
    and (ways.tags -> 'building'= 'apartments' or ways.tags -> 'building'= 'farm' or ways.tags -> 'building'= 'house' or ways.tags -> 'building'= 'detached' or ways.tags -> 'building'= 'residential' or ways.tags -> 'building'= 'dormitory' or ways.tags -> 'building'= 'terrace' or ways.tags->'building'='houseboat' or ways.tags->'building'='bungalow' or ways.tags->'building'='static_caravan' or ways.tags->'building'='cabin')
    and ST_IsEmpty(ways.linestring) IS false 
    AND ST_IsValid(ways.linestring) IS true
    and ST_IsClosed(ways.linestring);""", ([geom_txt, buffer]))

    # too slow with --WHERE ST_Intersects(ST_Transform(ways.linestring, 28992), ST_Buffer(ST_SetSRID(ST_Point(%s, %s), 28992), %s))
    # SELECT sum(st_area(ST_MakePolygon(ways.linestring)::geography)) AS res_bldg_area, json_agg(ways.id::text)
    result_set1 = cur1.fetchall()
    if cur1.rowcount == 0:
        print('id = ', id), ' is not overlapped with any buildings'
        return id, -2

    result = result_set1[0]
    bldg_area = result[0]
    #osm_ids = result[1]
    #print 'pc6 id = ', id, " ; osm ids = ", osm_ids

    if bldg_area is None:
        logger.info('id = ' + str(id) +
                    ' is not overlapped with any streets!' +
                    " query result = " + str(result))
        return id, 0

    cur1.close()
    conn1.close()

    return id, bldg_area
Beispiel #3
0
def calc_nox_idx(id, geom_txt, buffer):

    global buffer_area

    cur_nox, conn_nox = uf.connect()
    cur_nox.execute(
        """select nox_avg, ST_Area(ST_Intersection(geom,st_buffer(ST_GeomFromText(%s, 28992), %s)))
    from nox_nl_2km_2015 where ST_Intersects(geom, st_buffer(ST_GeomFromText(%s, 28992), %s));""",
        ([geom_txt, buffer, geom_txt, buffer]))

    result_set_nox = cur_nox.fetchall()
    if cur_nox.rowcount == 0:
        print('postcode pc6 = ', id), ' is not overlapped with any nox zones'
        return 0, 0

    nox_avg = 0.0
    rate_sum = 0.0

    for record in result_set_nox:
        # print "nox record = ", record
        nox = record[0]
        intsec_area = record[1]
        rate = intsec_area / buffer_area
        # print "nox record = ", record, " ; rate = ", rate

        nox_avg = nox_avg + nox * rate
        rate_sum = rate_sum + rate

    cur_nox.close()
    conn_nox.close()

    return nox_avg, rate_sum
Beispiel #4
0
def calc_nox_avg(gid, geom_txt, buffer):

    
    cur_1, conn_1 = uf.connect()
    cur_1.execute("""with clip_query as (SELECT ST_ValueCount(ST_Clip(st_union(rast), clipper.geom, false)) as pvc
     FROM nox_2009 INNER JOIN 
     (select st_buffer(ST_GeomFromText(%s, 28992), %s) as geom) AS clipper 
     ON ST_Intersects(nox_2009.rast, clipper.geom) GROUP BY clipper.geom)
     SELECT (pvc).value, SUM((pvc).count) As total from clip_query GROUP BY (pvc).value;""", ([geom_txt, buffer]))
    
    result_set = cur_1.fetchall()
    
    total_num = 0
    total_value = 0
    default_avg = -99999
    for record in result_set:
        value = record[0]
        count = record[1]
        total_num =  total_num + count
        total_value = total_value + value * count
    
    if total_num == 0:
        logger.info("pc6 gid = "+ str(gid) + " has no nox value!")
        return gid, default_avg
        
    avg_value = total_value / total_num

    #print('avg = ', avg_value, total_value, total_num)        
    cur_1.close()
    conn_1.close()         
                      
    return gid, avg_value
Beispiel #5
0
def calc(table, buffer):

    total_start_time = time.time()

    cur, conn = uf.connect()
    cur.execute("select gid, ST_AsText(st_centroid(geom)) from " +
                source_table + ";")
    record_result_set = cur.fetchall()

    result_all_list = []

    start_time = time.time()
    row_index = 0
    for row in record_result_set:
        row_index = row_index + 1
        id = row[0]
        center_geom_txt = row[1]

        result_one_tuple = ()
        addr_num = calc_address_num(id, center_geom_txt, buffer)

        result_one_tuple = (addr_num, id)
        result_all_list.append(result_one_tuple)

        if row_index % uf.batch_number == 0:

            update_all_idx(result_all_list, buffer)
            end_time = time.time()
            time_diff = end_time - start_time  # in seconds
            time_diff = round(time_diff, 2)
            time_diff_min = round(time_diff / 60, 2)  # in mins

            print("calc " + variable + " index for pc6, id = ", int(id),
                  " row_index = ", row_index, " result= ", result_one_tuple,
                  "; running time = ", time_diff_min, " mins! ", time_diff,
                  " sec! ", " buffer = ", buffer)
            logger.info("calc " + variable + " index for pc6, id = " +
                        str(int(id)) + " row_index = " + str(row_index) +
                        " result= " + str(result_one_tuple) +
                        "; running time = " + str(time_diff_min) + " mins! " +
                        str(time_diff) + " sec! buffer = " + str(buffer))

            # empty list
            start_time = time.time()
            result_all_list = []

    update_all_idx(result_all_list, buffer)
    cur.close()
    conn.close()

    total_end_time = time.time()
    total_time_diff = total_end_time - total_start_time  # in seconds
    total_time_diff = round(total_time_diff / (60 * 60), 2)  # in hours
    print(
        "finish calculating " + variable +
        " index...processed time in hours = ", time_diff, " buffer size = ",
        buffer)
    logger.info("finish calculating " + variable +
                " index...processed time in hours= " + str(time_diff) +
                " buffer size = " + str(buffer))
def calc(table, buffer):

    global buffer_area
    global pixel_area

    buffer_area = math.pi * buffer * buffer
    pixel_area = 10 * 10

    cur, conn = uf.connect()
    cur.execute("select gid, ST_AsText(st_centroid(geom)) from " +
                uf.source_table + ";")
    record_result_set = cur.fetchall()

    result_all_list = []

    start_time = time.time()
    row_index = 0
    for row in record_result_set:
        row_index = row_index + 1
        id = row[0]
        center_geom_txt = row[1]

        result_one_tuple = ()
        id, dn_1, dn_2, dn_3, dn_4, dn_5, dn_6 = calc_noise_distributions(
            id, center_geom_txt, buffer)
        dn_sum = dn_1 + dn_2 + dn_3 + dn_4 + dn_5 + dn_6
        if dn_sum - 1 > 0.0001:
            print("something wrong with calc area for pc6 ", id, "; dn_sum = ",
                  dn_sum)

        result_one_tuple = (dn_1, dn_2, dn_3, dn_4, dn_5, dn_6, id)
        result_all_list.append(result_one_tuple)

        if row_index % uf.batch_number == 0:
            update_all_noise(result_all_list, buffer)
            end_time = time.time()
            time_diff = end_time - start_time  # in seconds
            time_diff = round(time_diff, 2)
            time_diff_min = round(time_diff / 60, 2)  # in mins

            print("calc noise for pc6 id ", int(id), "row_index = ", row_index,
                  " result= ", result_one_tuple, "; running time = ",
                  time_diff_min, " mins! ", time_diff, " sec! ", " buffer = ",
                  buffer)
            logger.info("calc noise for pc6 id " + str(int(id)) +
                        " row_index = " + str(row_index) + " result= " +
                        str(result_one_tuple) + "; running time = " +
                        str(time_diff_min) + " mins! " + str(time_diff) +
                        " sec! buffer = " + str(buffer))

            # empty list
            start_time = time.time()
            result_all_list = []

    update_all_noise(result_all_list, buffer)
    cur.close()
    conn.close()
Beispiel #7
0
def calc(table, buffer):
    
    cur, conn = uf.connect()    
    cur.execute("select gid, ST_AsText(st_centroid(geom)) from " + source_table +";")
    record_result_set = cur.fetchall()
  
    result_all_list = []
  
    start_time =time.time()
    row_index = 0
    for row in record_result_set:
        row_index = row_index + 1
        id = row[0]
        center_geom_txt = row[1]
        
        if check_skip(id, buffer):
            continue
        
        
        result_one_tuple=()
        id, no2_avg = calc_no2_avg(id, center_geom_txt, buffer)
        id, nox_avg = calc_nox_avg(id, center_geom_txt, buffer)
        id, pm25_avg = calc_pm25_avg(id, center_geom_txt, buffer)
        id, pm10_avg = calc_pm10_avg(id, center_geom_txt, buffer)

    
        result_one_tuple = (no2_avg, nox_avg, pm25_avg, pm10_avg, id)
        result_all_list.append(result_one_tuple)
        
        
        if row_index % uf.batch_number == 0:
            
            update_all_air_idx(result_all_list, buffer)
            end_time = time.time()
            time_diff = end_time - start_time # in seconds
            time_diff = round(time_diff, 2)
            time_diff_min = round(time_diff/60, 2) # in mins
            
            print("calc air pollution index for pc6, id = ", int(id), " row_index = " , row_index, " result= " , result_one_tuple, "; running time = ", time_diff_min, " mins! ", time_diff, " sec! ", " buffer = ", buffer)
            logger.info("calc air pollution index for pc6, id = "+ str(int(id))+  " row_index = " + str(row_index) + " result= " +  str(result_one_tuple) + "; running time = " +  str(time_diff_min) + " mins! " + str(time_diff) + " sec! buffer = " + str(buffer))
        
            # empty list
            start_time = time.time()
            result_all_list = []
    
    update_all_air_idx(result_all_list, buffer)
    cur.close()
    conn.close()        
    
    

    
    
    
def calc(table, buffer):

    buffer_area = math.pi * buffer * buffer

    cur, conn = uf.connect()
    cur.execute("select gid, ST_AsText(st_centroid(geom)) from " +
                source_table + ";")
    record_result_set = cur.fetchall()

    result_all_list = []

    start_time = time.time()
    row_index = 0
    for row in record_result_set:
        row_index = row_index + 1
        id = row[0]
        center_geom_txt = row[1]

        result_one_tuple = ()
        id, street_length = calc_street_length(id, center_geom_txt, buffer)
        id, motorway_length = calc_motorway_length(id, center_geom_txt, buffer)

        street_density = street_length / buffer_area
        motorway_density = motorway_length / buffer_area

        result_one_tuple = (street_length, street_density, motorway_length,
                            motorway_density, id)
        result_all_list.append(result_one_tuple)

        if row_index % batch_num == 0:

            update_multiple_street_idx(result_all_list, buffer)
            end_time = time.time()
            time_diff = end_time - start_time  # in seconds
            time_diff = round(time_diff, 2)
            time_diff_min = round(time_diff / 60, 2)
            print("street index for pc6 id = ", int(id), " row_index",
                  row_index, " result= ", result_one_tuple,
                  "; running time = ", time_diff_min, " mins! ", time_diff,
                  " sec! ", " buffer = ", buffer)
            logger.info("street index for pc6 id = " + str(int(id)) +
                        " row_index" + str(row_index) + " result= " +
                        str(result_one_tuple) + "; running time = " +
                        str(time_diff_min) + " mins! " + str(time_diff) +
                        " sec! buffer = " + str(buffer))
            # empty list
            start_time = time.time()
            result_all_list = []

    update_multiple_street_idx(result_all_list, buffer)
    cur.close()
    conn.close()
def calc_landuse_index(pc6_id, center_geom_txt, buffer):

    global buffer_area
    global pixel_area

    global landuse_class_list
    landuse_class_num = len(landuse_class_list)

    cur_1, conn_1 = uf.connect()
    cur_1.execute(
        """with clip_query as (SELECT ST_ValueCount(ST_Clip(st_union(rast), clipper.geom, false)) as pvc
     FROM bbg2015_10m INNER JOIN 
     (select st_buffer(ST_GeomFromText(%s, 28992), %s) as geom) AS clipper 
     ON ST_Intersects(bbg2015_10m.rast, clipper.geom) GROUP BY clipper.geom)
     SELECT (pvc).value, SUM((pvc).count) As total from clip_query  GROUP BY (pvc).value;""",
        ([center_geom_txt, buffer]))

    result_set_1 = cur_1.fetchall()
    if cur_1.rowcount == 0:
        print('postcode = ', pc6_id,
              ' is not overlapped with any land use classes!')
        return 0, 0, 0

    landuse_class_idx = 0.0
    prop_sum = 0.0
    land_prop_dict = {}

    for record in result_set_1:

        landuse_type = record[0]
        landuse_count = record[1]

        for i in range(landuse_class_num):
            landuse_classes = landuse_class_list[i]
            if landuse_type in landuse_classes:
                if i in land_prop_dict:
                    land_prop_dict[i] = land_prop_dict[i] + landuse_count
                else:
                    land_prop_dict[i] = landuse_count
                break

    for k in land_prop_dict:
        prop = land_prop_dict[k] * pixel_area / buffer_area
        land_prop_dict[k] = prop
        prop_sum = prop_sum + prop
        if prop > 0:
            landuse_class_idx = landuse_class_idx + prop * math.log(
                prop) / math.log(landuse_class_num)

    landuse_class_idx = -landuse_class_idx
    return landuse_class_idx, land_prop_dict, prop_sum
def update_all_idx(records, buffer):
    
    try:
        cur_update, conn_update = uf.connect()
        cur_update.executemany("UPDATE " +target_table+"_"+ str(buffer) + " SET comm_bldg_ratio = %s, resi_bldg_ratio = %s where id = %s;",  
        records)
        conn_update.commit()
    except (Exception, psycopg2.Error) as error:
        print("Failed inserting record into table {}".format(error))
    finally:
        # closing database connection.
        if (conn_update):
            cur_update.close()
            conn_update.close()
def calc(table, buffer):

    global buffer_area
    global pixel_area

    cur, conn = uf.connect()
    cur.execute("select gid, ST_AsText(st_centroid(geom)) from " +
                source_table + ";")
    record_result_set = cur.fetchall()

    result_all_list = []

    start_time = time.time()
    row_index = 0
    for row in record_result_set:
        row_index = row_index + 1
        id = row[0]
        center_geom_txt = row[1]

        result_one_tuple = ()
        id, ndvi_avg = calc_ndvi_avg(id, center_geom_txt, buffer)
        result_one_tuple = (ndvi_avg, id)
        result_all_list.append(result_one_tuple)

        if row_index % uf.batch_number == 0:
            update_all_ndvi(result_all_list, buffer)
            end_time = time.time()
            time_diff = end_time - start_time  # in seconds
            time_diff = round(time_diff, 2)
            time_diff_min = round(time_diff / 60, 2)  # in mins

            print("calc ndvi for pc6 id ", int(id), "row_index = ", row_index,
                  " result= ", result_one_tuple, "; running time = ",
                  time_diff_min, " mins! ", time_diff, " sec! ", " buffer = ",
                  buffer)
            logger.info("calc ndvi for pc6 id " + str(int(id)) +
                        " row_index = " + str(row_index) + " result= " +
                        str(result_one_tuple) + "; running time = " +
                        str(time_diff_min) + " mins! " + str(time_diff) +
                        " sec! buffer = " + str(buffer))

            # empty list
            start_time = time.time()
            result_all_list = []

    update_all_ndvi(result_all_list, buffer)
    cur.close()
    conn.close()
def calc_motorway_length(id, geom_txt, buffer):

    try:
        cur1, conn1 = uf.connect()
        cur1.execute(
            """    
        SELECT sum(ST_Length(ST_Intersection(ways.linestring, ST_Transform(ST_Buffer(ST_GeomFromText(%s, 28992), %s), 4326))::geography)) AS motorway_length
        FROM ways
        WHERE ST_Intersects(ways.linestring, ST_Transform(ST_Buffer(ST_GeomFromText(%s, 28992), %s), 4326))
        and (tags -> 'highway' IS NOT NULL) 
        and (tags -> 'highway'= 'primary' or tags -> 'highway'= 'primary_link' or tags -> 'highway'= 'trunk' or tags -> 'highway' ='trunk_link' or tags -> 'highway' ='secondary' or tags -> 'highway'= 'secondary_link' or tags -> 'highway' ='tertiary' or tags -> 'highway'= 'tertiary_link' or tags -> 'highway' ='motorway'
        or tags -> 'highway' ='motorway_link') 
        and ST_IsEmpty(linestring) IS false 
        AND ST_IsValid(linestring) IS true
        and ((convert_to_integer(tags->'maxspeed') >= 50) 
        or (convert_to_integer(tags->'maxspeed:forward') >= 50) 
        or (convert_to_integer(tags->'maxspeed:backward') >= 50));""",
            ([geom_txt, buffer, geom_txt, buffer]))

        #or (convert_to_integer(tags->'maxspeed:backward') >= 50));""", ([geom_txt, buffer, geom_txt, buffer]));
        # too slow with --WHERE ST_Intersects(ST_Transform(ways.linestring, 28992), ST_Buffer(ST_SetSRID(ST_Point(%s, %s), 28992), %s))

        result_set1 = cur1.fetchall()
        if cur1.rowcount == 0:
            print('id = ', id), ' is not overlapped with any motorways'
            return id, -2

        result = result_set1[0]
        motorway_length = result[0]
        # osm_ids = result[1]
        # motorway_num = result[2]

        if motorway_length is None:
            print('id = ', id, " query result = ",
                  result), ' is not overlapped with any motorways'
            return id, 0

    except (Exception, psycopg2.Error) as error:
        print("error in caclculating motorway length".format(error))
        logger.info("error in caclculating motorway length, for pc6 " +
                    str(id))
    finally:
        # closing database connection.
        if (conn1):
            cur1.close()
            conn1.close()

    return id, motorway_length
def update_all_noise(records, buffer):

    try:
        cur_update, conn_update = uf.connect()
        cur_update.executemany(
            "UPDATE " + uf.target_table + "_" + str(buffer) +
            " SET dn_1 = %s, dn_2 = %s, dn_3 = %s, dn_4 = %s, dn_5 = %s, dn_6 = %s where id = %s",
            records)
        conn_update.commit()
    except (Exception, psycopg2.Error) as error:
        print("Failed inserting record into table {}".format(error))
    finally:
        # closing database connection.
        if (conn_update):
            cur_update.close()
            conn_update.close()
def update_multiple_street_idx(records, buffer):

    try:
        cur_update, conn_update = uf.connect()
        cur_update.executemany(
            "UPDATE " + target_table + "_" + str(buffer) +
            " set street_length = %s, street_density = %s, motorway_length = %s, motorway_density = %s where id = %s;",
            records)
        conn_update.commit()
    except (Exception, psycopg2.Error) as error:
        print("Failed inserting record into table {}".format(error))
    finally:
        # closing database connection.
        if (conn_update):
            cur_update.close()
            conn_update.close()
def calc_noise_distributions(id, geom_txt, buffer):

    global buffer_area
    global pixel_area

    cur_noise, conn_noise = uf.connect()
    cur_noise.execute(
        """with clip_query as (SELECT ST_ValueCount(ST_Clip(st_union(rast), clipper.geom, false)) as pvc
     FROM noise_2016_10m INNER JOIN 
     (select st_buffer(ST_GeomFromText(%s, 28992), %s) as geom) AS clipper 
     ON ST_Intersects(noise_2016_10m.rast, clipper.geom) GROUP BY clipper.geom)
     SELECT (pvc).value, SUM((pvc).count) As total from clip_query GROUP BY (pvc).value;""",
        ([geom_txt, buffer]))

    result_set_noise = cur_noise.fetchall()
    if cur_noise.rowcount == 0:
        print('id = ', id), ' is not overlapped with any noise areas'
        return id, -2, -2, -2, -2, -2, -2

    dn_1 = 0.0
    dn_2 = 0.0
    dn_3 = 0.0
    dn_4 = 0.0
    dn_5 = 0.0
    dn_6 = 0.0

    for record in result_set_noise:
        #print "noise record = ", record
        noise_dn = record[0]
        noise_count = record[1]
        if noise_dn == 1:
            dn_1 = noise_count * pixel_area / buffer_area
        elif noise_dn == 2:
            dn_2 = noise_count * pixel_area / buffer_area
        elif noise_dn == 3:
            dn_3 = noise_count * pixel_area / buffer_area
        elif noise_dn == 4:
            dn_4 = noise_count * pixel_area / buffer_area
        elif noise_dn == 5:
            dn_5 = noise_count * pixel_area / buffer_area
        else:
            dn_6 = dn_6 + noise_count * pixel_area / buffer_area

    cur_noise.close()
    conn_noise.close()

    return id, dn_1, dn_2, dn_3, dn_4, dn_5, dn_6
Beispiel #16
0
def check_skip(gid, buffer):
    cur_5, conn_5 = uf.connect()
    cur_5.execute("""select no2_avg notnull from """ +target_table+"_"+ str(buffer) + """ where id = %s """, ([gid]))
    
    result_set5 = cur_5.fetchall()
    result = result_set5[0]
    flag = result[0]
    
    skipped = True
    
    if flag:
        skipped = True
    else:
        skipped = False 

    cur_5.close()
    conn_5.close()
    
    return skipped
def calc_bldg_area(gid, geom_txt, buffer):

    
    cur_1, conn_1 = uf.connect()
    # skip connectivity=3; the 1-ways are typical for suburban areas,
    cur_1.execute("""with bldg_query as (
      select * from pand_dec_2016 where ST_Intersects(geom, st_buffer(ST_GeomFromText(%s, 28992), %s))
    )
    select a.gid, max(st_area(a.geom)), string_agg(b.gebruiksdo::text, ';') from bldg_query a, vobject_dec_2016 b where ST_Intersects(a.geom, b.geom) group by a.gid""", ([geom_txt, buffer]))
    
    
    total_commercial_area = 0
    total_residential_area = 0
    result_set_1 = cur_1.fetchall()
    
    # 
    if len(result_set_1)==0:
        logger.info('id = '+str(id)+' is not overlapped with any '+ variable +' features!')
        return 0, 0;
        
    for record in result_set_1:
        bldg_gid = record[0]
        bldg_area = record[1]
        bldg_functions = record[2]
        if bldg_functions == None:
            #logger.info('postcode id  = '+str(gid)+' has buildings with not functions')
            continue 
        bldg_functions = bldg_functions.split(";")
        bldg_function_set = set(bldg_functions)
        
        # check if commercial 
        if is_desired(bldg_function_set, commcercial_function):
            total_commercial_area = total_commercial_area + bldg_area
        
        # check if residential
        if is_desired(bldg_function_set, residential_function):
            total_residential_area = total_residential_area + bldg_area     
    
     
    cur_1.close()
    conn_1.close()         
                      
    return total_commercial_area, total_residential_area
def calc(table, buffer):
    
    cur, conn = uf.connect()    
    cur.execute("select gid, ST_AsText(st_centroid(geom)) from " + source_table +";")
    record_result_set = cur.fetchall()
  
    result_all_list = []
  
    start_time =time.time()
    row_index = 0
    for row in record_result_set:
        row_index = row_index + 1
        id = row[0]
        center_geom_txt = row[1]
        
        result_one_tuple=()
        
        landuse_idx, land_prop_list, prop_sum = calc_landuse_index(id, center_geom_txt, buffer)
        #if  landuse_idx < 0.1 or abs(prop_sum - 1) > 0.001: 
        #    print "not normal landuse index for pc6 = ", int(id),  " landuse index = ", landuse_idx, " land_prop_list = ", land_prop_list, " prop_sum = ", prop_sum
        #    logger.info('"not noraml landuse index for pc6 = ' + str(id) + " ; lanuse index number = " + str(landuse_idx) + "; prop_sum number = " + str(prop_sum) + " land_prop_list = "  +  str(land_prop_list).strip('[]'))
                         
                         
        result_one_tuple = (landuse_idx, id)
        result_all_list.append(result_one_tuple)
        
        if row_index % uf.batch_number == 0:
            update_multiple_landuse_idx(result_all_list, buffer)
            end_time = time.time()
            time_diff = end_time - start_time # in seconds
            time_diff = round(time_diff, 2)
            time_diff_min = round(time_diff/60, 2) # in mins
            
            print("calc landuse for pc6 id ", int(id), "row_index = " , row_index, " result= " , result_one_tuple, "; running time = ", time_diff_min, " mins! ", time_diff, " sec! ", " buffer = ", buffer)
            logger.info("calc landuse for pc6 id "+ str(int(id))+  " row_index = " + str(row_index) + " result= " +  str(result_one_tuple) + "; running time = " +  str(time_diff_min) + " mins! " + str(time_diff) + " sec! buffer = " + str(buffer))
        
            # empty list
            start_time =time.time()
            result_all_list = []
    
    update_multiple_landuse_idx(result_all_list, buffer)
    cur.close()
    conn.close()
def calc_street_length(id, geom_txt, buffer):

    cur1, conn1 = uf.connect()
    cur1.execute(
        """   
        SELECT sum(ST_Length(ST_Intersection(ways.linestring, ST_Transform(ST_Buffer(ST_GeomFromText(%s, 28992), %s), 4326))::geography)) AS street_length
        FROM ways
        WHERE ST_Intersects(ways.linestring, ST_Transform(ST_Buffer(ST_GeomFromText(%s, 28992), %s), 4326))
        and  (tags -> 'highway' IS NOT NULL) 
        and (tags -> 'highway'= 'primary' or tags -> 'highway'= 'primary_link' or tags -> 'highway'= 'trunk' or tags -> 'highway' ='trunk_link' or tags -> 'highway' ='secondary' or tags -> 'highway'= 'secondary_link' or tags -> 'highway' ='tertiary' or tags -> 'highway'= 'tertiary_link' or tags -> 'highway' ='footway' or tags -> 'highway' ='path' or tags -> 'highway' ='steps' or tags -> 'highway' ='pedestrian' or tags -> 'highway' ='living_street' or tags -> 'highway' ='track' or tags -> 'highway'= 'residential' or tags -> 'highway' ='service' or tags -> 'highway' ='unclassified' or tags -> 'highway'= 'road' or tags -> 'highway' ='cycleway' ) 
        and ST_IsEmpty(linestring) IS false 
        AND ST_IsValid(linestring) IS true
        and (not (ways.tags::hstore ? 'maxspeed') or convert_to_integer(tags->'maxspeed') < 50) 
        and (not (ways.tags::hstore ? 'maxspeed:forward') or convert_to_integer(tags->'maxspeed:forward') < 50) 
        and (not (ways.tags::hstore ? 'maxspeed:backward') or convert_to_integer(tags->'maxspeed:backward') < 50);""",
        ([geom_txt, buffer, geom_txt, buffer]))

    # SELECT sum(ST_Length(ST_Intersection(ways.linestring, ST_Transform(ST_Buffer(ST_GeomFromText(%s, 28992), %s), 4326))::geography)) AS street_length, json_agg(ways.id::text), count(*)
    # SELECT sum(ST_Length(ways.linestring::geography)) AS street_length, json_agg(ways.id::text)
    # and (not (ways.tags::hstore ? 'maxspeed:backward') or convert_to_integer(tags->'maxspeed:backward') < 50);""", ([geom_txt, buffer]));
    # too slow with --WHERE ST_Intersects(ST_Transform(ways.linestring, 28992), ST_Buffer(ST_SetSRID(ST_Point(%s, %s), 28992), %s))

    result_set1 = cur1.fetchall()
    if cur1.rowcount == 0:
        print('id = ', id), ' is not overlapped with any streets'
        return id, -2

    result = result_set1[0]
    street_length = result[0]
    #osm_ids = result[1]
    #street_num = result[2]

    if street_length is None:
        print('id = ', id, " query result = ",
              result), ' is not overlapped with any streets'
        return id, 0

    cur1.close()
    conn1.close()

    return id, street_length
def calc_no2_avg(id, geom_txt, buffer):

    
    cur_noise, conn_noise = uf.connect()
    cur_noise.execute("""with clip_query as (SELECT ST_ValueCount(ST_Clip(st_union(rast), clipper.geom, false)) as pvc
     FROM no2_2011_100m INNER JOIN 
     (select st_buffer(ST_GeomFromText(%s, 28992), %s) as geom) AS clipper 
     ON ST_Intersects(no2_2011_100m.rast, clipper.geom) GROUP BY clipper.geom)
     SELECT (pvc).value, SUM((pvc).count) As total from clip_query GROUP BY (pvc).value;""", ([geom_txt, buffer]))
    
    result_set = cur_noise.fetchall()
    if  cur_noise.rowcount==0:
        print ('id = ', id), ' is not overlapped with any no2 polluted areas'
        return id, -99999
    
    total_num = 0
    total_value = 0
    no2_avg = -99999
    for record in result_set:
        #print "noise record = ", record
        no2_value = record[0]
        no2_count = record[1]
        
        if no2_value > 0 :
            total_num =  total_num + no2_count
            total_value = total_value + no2_value * no2_count
            
            
        if total_num == 0:
            logger.info("pc6 gid = "+ str(id) + " has no no2 value !")
            return id, -99999    
        
    #print('avg = ', ndvi_avg, total_value, total_num)     

    no2_avg = total_value / total_num
    
       
    cur_noise.close()
    conn_noise.close()         
                      
    return id, no2_avg
def get_target_table(buffer):
    
    if not uf.create_tables:
        return uf.target_table+'_'+str(buffer)
    
    
    start_time =time.time()
    
    cur1, conn1 = uf.connect()
    # drop column if exist and then create route column 
    
    cur1.execute("""drop TABLE if exists """+ uf.target_table+'_'+str(buffer)+""";""")
    
    
    cur1.execute("""CREATE TABLE """ +uf.target_table+'_'+str(buffer)+""" (
      id integer primary key,
      pc6 character(6)
     );""")
    
    ## geometry of the source ojbect
    cur1.execute("""
    ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS geom;
    ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN geom geometry(MultiPolygon,28992);""")

    cur1.execute(""" INSERT INTO """ +uf.target_table+'_'+str(buffer)+"""(id, pc6, geom) 
    (SELECT gid, pc6, geom FROM pc6_2017_jan)""");


    ## copy geometry and id
    conn1.commit()
    cur1.close()
    conn1.close()
    
    
    end_time = time.time()
    time_diff = end_time - start_time # in seconds
    print("create table for ", uf.target_table+'_'+str(buffer), "; running time = ", (time_diff/60), " mins! ", time_diff, " sec! ", time_diff*1000, " million seconds!")
    logger.info("create table for "+ uf.target_table+'_'+str(buffer) + "; running time = " + str((time_diff/60)) + " mins! " + str(time_diff) + " sec! " + str(time_diff*1000) + " million seconds!")  
    
    return uf.target_table+'_'+str(buffer)
Beispiel #22
0
def calc_air_idx(id, geom_txt, buffer):

    global buffer_area

    cur_air, conn_air = uf.connect()
    cur_air.execute(
        """select no2_avg, pm25_avg, pm10_avg, ST_Area(ST_Intersection(geom,st_buffer(ST_GeomFromText(%s, 28992), %s))) 
    from air_pollution_nl_2015 where ST_Intersects(geom, st_buffer(ST_GeomFromText(%s, 28992), %s));""",
        ([geom_txt, buffer, geom_txt, buffer]))

    result_set_air = cur_air.fetchall()
    if cur_air.rowcount == 0:
        print('postcode pc6 = ',
              id), ' is not overlapped with any air pollution zones'
        return 0, 0, 0, 0

    no2_avg = 0.0
    pm25_avg = 0.0
    pm10_avg = 0.0
    rate_sum = 0.0

    for record in result_set_air:
        # print "air index record = ", record
        no2 = record[0]
        pm25 = record[1]
        pm10 = record[2]
        intsec_area = record[3]
        rate = intsec_area / buffer_area
        # print "air index record = ", record, ' rate = ', rate

        no2_avg = no2_avg + no2 * rate
        pm25_avg = pm25_avg + pm25 * rate
        pm10_avg = pm10_avg + pm10 * rate
        rate_sum = rate_sum + rate

    cur_air.close()
    conn_air.close()

    return no2_avg, pm25_avg, pm10_avg, rate_sum
def calc_crossings(gid, geom_txt, buffer):

    
    cur_1, conn_1 = uf.connect()
    # skip connectivity=3; the 1-ways are typical for suburban areas,
    cur_1.execute("""select count(*) from crossings_2016 where ST_Intersects(geom, st_buffer(ST_GeomFromText(%s, 28992), %s)) and edgecount !=3.0""", ([geom_txt, buffer]))
    
    result_set_1 = cur_1.fetchall()
    
    
    result = result_set_1[0]
    crossing_count = result[0]

       
    if crossing_count is None:
        logger.info('id = '+str(id)+' is not overlapped with any connectivity points!'+ " query result = " +str(result)) 
        return 0
    
     
    cur_1.close()
    conn_1.close()         
                      
    return crossing_count
Beispiel #24
0
def calc_address_num(gid, geom_txt, buffer):

    cur_1, conn_1 = uf.connect()
    # skip connectivity=3; the 1-ways are typical for suburban areas,
    cur_1.execute(
        """select count(*) from addr_dec_2016 where ST_Intersects(geom, st_buffer(ST_GeomFromText(%s, 28992), %s));""",
        ([geom_txt, buffer]))

    result_set_1 = cur_1.fetchall()

    result = result_set_1[0]
    addr_count = result[0]

    if addr_count is None:
        logger.info('id = ' + str(id) + ' is not overlapped with any ' +
                    variable + ' geometry!' + " query result = " +
                    str(result) + " within buffer = " + buffer)
        return 0

    cur_1.close()
    conn_1.close()

    return addr_count
def add_columns(buffer):
    
    cur1, conn1 = uf.connect()
     
    ## residential building area and density
    if uf.add_bldg_column:
        cur1.execute("""
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS res_bldg_area;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN res_bldg_area double precision default 0;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS res_bldg_density;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN res_bldg_density double precision default 0;""")
    
   
    if uf.add_street_column:
        ## motorway length and density
        cur1.execute("""
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS motorway_length;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN motorway_length double precision;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS motorway_density;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN motorway_density double precision;""")
        
        ## street length and density
        cur1.execute("""
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS street_length;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN street_length double precision;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS street_density;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN street_density double precision;""")
     
      
    ## noise column
    if uf.add_noise_column:
        cur1.execute("""
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS dn_1;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN dn_1 double precision;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS dn_2;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN dn_2 double precision;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS dn_3;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN dn_3 double precision;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS dn_4;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN dn_4 double precision;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS dn_5;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN dn_5 double precision;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS dn_6;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN dn_6 double precision;""")
       
    ## air pollution column
    if uf.add_airpollution_column:
        cur1.execute("""
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS no2_avg;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN no2_avg double precision;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS nox_avg;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN nox_avg double precision;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS pm25_avg;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN pm25_avg double precision;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS pm10_avg;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN pm10_avg double precision;""")   
       
    # landuse index 
    if uf.add_landuse_column:
        cur1.execute("""
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS landuse_idx;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN landuse_idx double precision;""")
        
        
    # ndvi index 
    if uf.add_ndvi_column:
        cur1.execute("""
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS ndvi_avg;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN ndvi_avg double precision;""") 
        
    # crossing index 
    if uf.add_crossing_column:
        cur1.execute("""
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS crossing_num;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN crossing_num double precision;""")
    
    
    if uf.add_commercial_column:
        cur1.execute("""
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS comm_bldg_ratio;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN comm_bldg_ratio double precision; 
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS resi_bldg_ratio;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN resi_bldg_ratio double precision;""")
        
    # degree of urbanization 
    if uf.add_addr_column:
        cur1.execute("""
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS addr_num;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN addr_num double precision;""")
    
    # no2 in 2011 
    if uf.add_no2_column:
        cur1.execute("""
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS no2_2011;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN no2_2011 double precision;""")         


    # rdvi index 
    if uf.add_rdvi_column:
        cur1.execute("""
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" DROP COLUMN IF EXISTS rdvi_avg;
        ALTER TABLE """ +uf.target_table+'_'+str(buffer)+""" ADD COLUMN rdvi_avg double precision;""")             
                          

    conn1.commit()
    cur1.close()
    conn1.close()
    
    return uf.target_table+'_'+str(buffer)
Beispiel #26
0
def calc(table, buffer):

    global buffer_area
    buffer_area = math.pi * buffer * buffer
    cur, conn = uf.connect()
    cur.execute("select gid, ST_AsText(st_centroid(geom)) from " +
                source_table + ";")
    record_result_set = cur.fetchall()

    result_all_list = []

    start_time = time.time()
    row_index = 0
    for row in record_result_set:
        row_index = row_index + 1
        id = row[0]
        center_geom_txt = row[1]

        result_one_tuple = ()
        no2_avg, pm25_avg, pm10_avg, rate_sum1 = calc_air_idx(
            id, center_geom_txt, buffer)
        nox_avg, rate_sum2 = calc_nox_idx(id, center_geom_txt, buffer)

        if rate_sum1 - 1 > 0.0001 or rate_sum2 - 1 > 0.0001:
            print("something wrong with calc air index for pc6 = ", id,
                  rate_sum1, rate_sum2)
            print("pc6 = ", id, "; area = ", buffer_area, "; no2_avg = ",
                  no2_avg, "; nox_avg = ", nox_avg, "; pm25_avg = ", pm25_avg,
                  "; pm10_avg = ", pm10_avg)
            logger.info('"something wrong with calc air index for pc6 = ' +
                        str(id) + " ; sum1 = " + str(rate_sum1) +
                        " ; sum2 = " + str(rate_sum2))
            logger.info("pc6 id = " + str(id) + "; area = " +
                        str(buffer_area) + "; no2_avg = " + str(no2_avg) +
                        "; nox_avg = " + str(nox_avg) + "; pm25_avg = " +
                        str(pm25_avg) + "; pm10_avg = " + str(pm10_avg))

        result_one_tuple = (no2_avg, nox_avg, pm25_avg, pm10_avg, id)
        result_all_list.append(result_one_tuple)

        if row_index % uf.batch_number == 0:

            update_all_air_idx(result_all_list, buffer)
            end_time = time.time()
            time_diff = end_time - start_time  # in seconds
            time_diff = round(time_diff, 2)
            time_diff_min = round(time_diff / 60, 2)  # in mins

            print("calc air pollution index for pc6, id = ", int(id),
                  " row_index = ", row_index, " result= ", result_one_tuple,
                  "; running time = ", time_diff_min, " mins! ", time_diff,
                  " sec! ", " buffer = ", buffer)
            logger.info("calc air pollution index for pc6, id = " +
                        str(int(id)) + " row_index = " + str(row_index) +
                        " result= " + str(result_one_tuple) +
                        "; running time = " + str(time_diff_min) + " mins! " +
                        str(time_diff) + " sec! buffer = " + str(buffer))

            # empty list
            start_time = time.time()
            result_all_list = []

    update_all_air_idx(result_all_list, buffer)
    cur.close()
    conn.close()