Example #1
0
def create_tables(database: SqliteUtil):
    database.drop_table('air_temperatures', 'temp_links', 'temp_parcels',
                        'temp_links_merged', 'temp_parcels_merged')
    query = '''
        CREATE TABLE air_temperatures(
            temperature_id MEDIUMINT UNSIGNED,
            temperature_idx SMALLINT UNSIGNED,
            time MEDIUMINT UNSIGNED,
            temperature FLOAT
        );  
    '''
    database.cursor.execute(query)
    query = '''
        CREATE TABLE temp_links(
            link_id VARCHAR(255),
            air_temperature MEDIUMINT UNSIGNED
        );
    '''
    database.cursor.execute(query)
    query = '''
        CREATE TABLE temp_parcels(
            apn VARCHAR(255),
            air_temperature MEDIUMINT UNSIGNED
        );
    '''
    database.cursor.execute(query)
    database.connection.commit()
Example #2
0
def create_tables(database: SqliteUtil):
    database.drop_table('nodes', 'links')
    query = '''
        CREATE TABLE nodes(
            node_id VARCHAR(255),
            maz SMALLINT UNSIGNED,
            point VARCHAR(255)
        );
    '''
    database.cursor.execute(query)
    query = '''
        CREATE TABLE links(
            link_id VARCHAR(255),
            source_node VARCHAR(255),
            terminal_node VARCHAR(255),
            length FLOAT,
            freespeed FLOAT,
            capacity FLOAT,
            permlanes FLOAT,
            oneway TINYINT UNSIGNED,
            modes VARHCAR(255),
            air_temperature INT UNSIGNED,
            mrt_temperature INT UNSIGNED
        );
    '''
    database.cursor.execute(query)
    database.connection.commit()
Example #3
0
def create_tables(database: SqliteUtil):
    database.drop_table('regions')
    query = '''
        CREATE TABLE regions(
            maz SMALLINT UNSIGNED,
            taz SMALLINT UNSIGNED,
            area FLOAT,
            center VARCHAR(255),
            region TEXT
        );  
    '''
    database.cursor.execute(query)
    database.connection.commit()
Example #4
0
def create_tables(database: SqliteUtil):
    database.drop_table('parcels')
    query = '''
        CREATE TABLE parcels(
            apn VARCHAR(255),
            maz SMALLINT UNSIGNED,
            type VARCHAR(255),
            cooling TINYINT UNSIGNED,
            air_temperature INT UNSIGNED,
            mrt_temperature INT UNSIGNED,
            center VARCHAR(255),
            region TEXT
        );
    '''
    database.cursor.execute(query)
    database.connection.commit()
Example #5
0
def parse_temperatures(database: SqliteUtil, tmin_files: List[str],
                       tmax_files: List[str], steps: int, day: int,
                       src_epsg: int, prj_epsg: int):

    log.info('Allocating tables for air temperatures.')
    create_tables(database)

    files = zip(tmax_files, tmin_files)
    profile_count = 0
    point_count = 0
    temperatures = []
    points = []
    profiles = {}
    n = 1

    transformer = Transformer.from_crs(f'epsg:{src_epsg}',
                                       f'epsg:{prj_epsg}',
                                       always_xy=True,
                                       skip_equivalent=True)
    project = transformer.transform

    def apply(id: int, temp: Callable):
        for step in range(steps):
            prop = step / steps
            row = (id, step, int(86400 * prop), temp(24 * prop))
            yield row

    log.info('Loading temperatures from netCDF4 files.')
    for tmax_file, tmin_file in files:
        tmaxnc = Dataset(tmax_file, 'r')
        tminnc = Dataset(tmin_file, 'r')

        lons = tmaxnc.variables['lon']
        lats = tmaxnc.variables['lat']
        shape = tmaxnc.variables['tmax'].shape

        tmaxs = tmaxnc.variables['tmax'][day]
        tmins = tminnc.variables['tmin'][day]

        for i in range(shape[1]):
            for j in range(shape[2]):
                tmax = tmaxs[i][j]
                tmin = tmins[i][j]

                if tmax != -9999.0:
                    x, y = project(lons[i][j], lats[i][j])
                    idx = f'{tmax}-{tmin}'

                    if idx not in profiles:
                        temp = iterpolation(tmin, tmax, 5, 15)
                        temperatures.extend(apply(profile_count, temp))
                        profiles[idx] = profile_count
                        profile_count += 1

                    profile = profiles[idx]
                    point = Point(point_count, x, y, profile)
                    points.append(point)
                    point_count += 1

                    if point_count == n:
                        log.info(
                            f'Loading air temperature reading {point_count}.')
                        n <<= 1

        tmaxnc.close()
        tminnc.close()

    if point_count != n >> 1:
        log.info(f'Loading air temperature reading {point_count}.')

    def load():
        for point in points:
            x, y = point.x, point.y
            yield (point.id, (x, y, x, y), point.profile)

    log.info('Starting network update for air temperatures.')
    log.info('Building spatial index from temperature profile locations.')
    index = Index(load())
    used = set()

    log.info('Loading network links.')
    links = load_links(database)

    log.info('Applying temperature profiles to links.')
    iter_links = counter(links, 'Applying profile to link %s.')
    for link in iter_links:
        result = index.nearest((link.x, link.y, link.x, link.y), objects=True)
        profile = next(result).object
        link.air_temperature = profile
        used.add(profile)

    def dump_links():
        for link in links:
            yield (link.id, link.air_temperature)

    log.info('Writing updated links to database.')
    database.insert_values('temp_links', dump_links(), 2)
    database.connection.commit()
    del links

    log.info('Loading network parcels.')
    parcels = load_parcels(database)

    residential = profile_count
    temperatures.extend(apply(profile_count, lambda x: 26.6667))
    profile_count += 1
    commercial = profile_count
    temperatures.extend(apply(profile_count, lambda x: 26.6667))
    profile_count += 1
    other = profile_count
    temperatures.extend(apply(profile_count, lambda x: 26.6667))
    profile_count += 1
    used.add(residential)
    used.add(commercial)
    used.add(other)

    log.info('Applying temperature profiles to parcels.')
    iter_parcels = counter(parcels, 'Applying profile to parcel %s.')
    for parcel in iter_parcels:
        if not parcel.cooling:
            x, y = xy(parcel.center)
            result = index.nearest((x, y, x, y), objects=True)
            profile = next(result).object
            parcel.air_temperature = profile
            used.add(profile)
        elif parcel.kind == 'residential':
            parcel.air_temperature = residential
        elif parcel.kind == 'commercial':
            parcel.air_temperature = commercial
        else:
            parcel.air_temperature = other

    def dump_parcels():
        for parcel in parcels:
            yield (parcel.apn, parcel.air_temperature)

    log.info('Writing updated parcels to database.')
    database.insert_values('temp_parcels', dump_parcels(), 2)
    database.connection.commit()
    del parcels

    def dump_temperatures():
        for temp in temperatures:
            if temp[0] in used:
                yield temp

    log.info('Writing parsed air temperatures to database.')
    database.insert_values('air_temperatures', dump_temperatures(), 4)
    database.connection.commit()
    del temperatures

    log.info('Merging, dropping and renaming old tables.')

    query = '''
        CREATE INDEX temp_links_link
        ON temp_links(link_id);
    '''
    database.cursor.execute(query)
    query = '''
        CREATE TABLE temp_links_merged
        AS SELECT
            links.link_id,
            links.source_node,
            links.terminal_node,
            links.length,
            links.freespeed,
            links.capacity,
            links.permlanes,
            links.oneway,
            links.modes,
            temp_links.air_temperature,
            links.mrt_temperature
        FROM links
        INNER JOIN temp_links
        USING(link_id);
    '''
    database.cursor.execute(query)
    query = '''
        CREATE INDEX temp_parcels_parcel
        ON temp_parcels(apn);
    '''
    database.cursor.execute(query)
    query = '''
        CREATE TABLE temp_parcels_merged
        AS SELECT
            parcels.apn,
            parcels.maz,
            parcels.type,
            parcels.cooling,
            temp_parcels.air_temperature,
            parcels.mrt_temperature,
            parcels.center,
            parcels.region
        FROM parcels
        INNER JOIN temp_parcels
        USING(apn);
    '''
    database.cursor.execute(query)

    original = database.count_rows('links')
    merged = database.count_rows('temp_links_merged')
    if original != merged:
        log.error('Original links and updated links tables '
                  'do not align; quiting to prevent data loss.')
        raise RuntimeError
    else:
        database.drop_table('links', 'temp_links')
        query = '''
            ALTER TABLE temp_links_merged
            RENAME TO links;
        '''
        database.cursor.execute(query)

    original = database.count_rows('parcels')
    merged = database.count_rows('temp_parcels_merged')
    if original != merged:
        log.error('Original parcels and updated parcels tables '
                  'do not align; quiting to prevent data loss.')
        raise RuntimeError
    else:
        database.drop_table('parcels', 'temp_parcels')
        query = '''
            ALTER TABLE temp_parcels_merged
            RENAME TO parcels;
        '''
        database.cursor.execute(query)

    database.connection.commit()

    log.info('Creating indexes on new tables.')
    create_indexes(database)

    log.info('Writing process metadata.')
Example #6
0
def create_tables(database: SqliteUtil):
    database.drop_table('trips', 'households', 'persons')
    query = '''
        CREATE TABLE trips(
            hhid_uniqueid INT UNSIGNED,
            uniqueid SMALLINT UNSIGNED,
            hhid MEDIUMINT UNSIGNED,
            pnum TINYINT UNSIGNED,
            personTripNum TINYINT UNISGNED,
            jointTripRole TINYINT UNSIGNED,
            party VARCHAR(255),
            origTaz SMALLINT UNSIGNED,
            origMaz SMALLINT UNSIGNED,
            destTaz SMALLINT UNSIGNED,
            destMaz SMALLINT UNSIGNED,
            origPurp SMALLINT UNSIGNED,
            destPurp SMALLINT UNSIGNED,
            mode TINYINT UNSIGNED,
            vehId TINYINT,
            isamAdjDepMin FLOAT,
            isamAdjArrMin FLOAT,
            isamAdjDurMin FLOAT
        );
    '''
    database.cursor.execute(query)
    query = '''
        CREATE TABLE households(
            hhid MEDIUMINT UNSIGNED,
            hhidAcrossSample MEDIUMINT UNSIGNED,
            pumsSerialNo FLOAT,
            homeTaz SMALLLINT UNSIGNED,
            homeMaz SMALLINT UNSIGNED,
            hhsize TINYINT UNSINGED,
            numFtWorkers TINYINT UNSIGNED,
            numPtWorkers TINYINT UNSIGNED,
            numUnivStuds TINYINT UNSIGNED,
            numNonWorkers TINYINT UNSIGNED,
            numRetired TINYINT UNSIGNED,
            numDrivAgeStuds TINYINT UNSIGNED,
            numPreDrivStuds TINYINT UNSIGNED,
            numPreschool TINYINT UNSIGNED,
            hhIncomeDollar MEDIUMINT UNSIGNED,
            hhNumAutos TINYINT UNSIGNED,
            dwellingType TINYINT UNSIGNED,
            ifAvHousehold TINYINT UNSIGED
        );
    '''
    database.cursor.execute(query)
    query = '''
        CREATE TABLE persons(
            hhid MEDIUMINT UNSIGNED,
            pnum MEDIUMINT UNSIGNED,
            pumsSerialNo FLOAT,
            persType TINYINT UNSIGNED,
            personTypeDetailed TINYINT UNSIGNED,
            age TINYINT UNSIGNED,
            gender TINYINT UNSIGNED,
            industry TINYINT UNSIGNED,
            schlGrade TINYINT UNSIGNED,
            educLevel TINYINT UNSIGNED,
            workPlaceType TINYINT UNSIGNED,
            workPlaceTaz SMALLINT UNISNGED,
            workPlaceMaz SMALLINT UNISNGED,
            schoolType TINYINT UNSIGNED,
            schoolTaz SMALLINT UNSIGNED,
            schoolMaz SMALLINT UNISGNED,
            campusBusinessTaz SMALLINT UNSIGNED,
            campusBusinessMaz SMALLINT UNSIGNED,
            usualCarID TINYINT UNSIGNED,
            dailyActivityPattern TINYINT UNSIGNED,
            specEvtParticipant TINYINT UNSIGNED,
            jointTour1Role TINYINT UNSIGNED,
            jointTour2Role TINYINT UNSIGNED,
            obPeChauffBund1 TINYINT UNSIGNED,
            obPeChauffBund2 TINYINT UNSIGNED,
            obPeChauffBund3 TINYINT UNSIGNED,
            obRsChauffBund TINYINT UNSIGNED,
            obPePassBund TINYINT UNSIGNED,
            obRsPassBund TINYINT UNSIGNED,
            ibPeChauffBund1 TINYINT UNSIGNED,
            ibPeChauffBund2 TINYINT UNSIGNED,
            ibPeChauffBund3 TINYINT UNSIGNED,
            ibRsChauffBund TINYINT UNSIGNED,
            ibPePassBund TINYINT UNSIGNED,
            ibRsPassBund TINYINT UNSIGNED,
            studentDorm TINYINT UNSIGNED,
            studentRent TINYINT UNSIGNED,
            activityString VARCHAR(255),
            transitPass TINYINT UNSIGNED
        );
    '''
    database.cursor.execute(query)
    database.connection.commit()
Example #7
0
def parse_mrt(database: SqliteUtil, path: str, src_epsg: int, prj_epsg: int,
        bounds:int = 30, steps: int = 96):
    log.info('Allocating tables for MRT temperature profiles.')
    create_tables(database)

    log.info('Loading network nodes from database.')
    nodes: Dict[str,Node]
    nodes = load_nodes(database)

    log.info('Loading network links from database.')
    links: Dict[str,Link] 
    links= load_links(database, nodes)

    log.info(f'Searching for mrt files in {path}')
    csvfiles = iter(glob(f'{path}/**/*.csv', recursive=True))

    log.info('Handling initial dataset for profile construction.')
    points: List[Point]
    time: int 
    points, time = parse_points(next(csvfiles), src_epsg, prj_epsg)
    
    log.info('Building spatial index on MRT points.')
    index = Index((point.entry() for point in points))

    log.info('Scanning link bounds and building profiles.')
    mapping: Dict[FrozenSet[int],int] = {}
    count = 0
    empty = 0
    iter_links = counter(links.values(), 'Scanning link %s.')
    for link in iter_links:
        d = link.terminal_node.x * link.source_node.y - \
            link.source_node.x * link.terminal_node.y
        dx = link.terminal_node.x - link.source_node.x
        dy = link.terminal_node.y - link.source_node.y
        l = sqrt(dy * dy + dx * dx)

        nearby = index.intersection(link.bounds(bounds))
        contained = []
        for uuid in nearby:
            point = points[uuid]
            x = point.x
            y = point.y
            if l > 0:
                dist = abs(dy * x - dx * y + d ) / l
            else:
                px = point.x - link.source_node.x
                py = point.y - link.source_node.y
                dist = sqrt(px * px + py * py)
            if dist <= bounds:
                contained.append(point.id)
        
        if contained:
            profile = frozenset(contained)
            if profile in mapping:
                link.profile = mapping[profile]
            else:
                mapping[profile] = count
                link.profile = count
                count += 1
        else:
            empty += 1

    profiles: List[Tuple[int]]
    profiles = [tuple(key) for key in mapping.keys()]

    if empty:
        log.warning(f'Found {empty} links without any MRT temperature profile.')

    def dump_points():
        idx = time // (86400 // steps)
        for uuid, profile in enumerate(profiles):
            mrt, pet, utci = 0, 0, 0
            count = len(profile)
            for ptid in profile:
                point = points[ptid]
                mrt += point.mrt
                pet += point.pet
                utci += point.utci
            yield (uuid, idx, time, mrt / count, pet / count, utci / count)

    def dump_links():
        for link in links.values():
            yield (link.id, link.profile)

    log.info('Writing link updates and temperatures to dataabse.')

    database.insert_values('mrt_temperatures', dump_points(), 6)
    database.insert_values('temp_links', dump_links(), 2)

    log.info('Merging, dropping and renaming old tables.')

    query = '''
        CREATE INDEX temp_links_link
        ON temp_links(link_id);
    '''
    database.cursor.execute(query)
    query = '''
        CREATE TABLE temp_links_merged
        AS SELECT
            links.link_id,
            links.source_node,
            links.terminal_node,
            links.length,
            links.freespeed,
            links.capacity,
            links.permlanes,
            links.oneway,
            links.modes,
            links.air_temperature,
            temp_links.mrt_temperature
        FROM links
        INNER JOIN temp_links
        USING(link_id);
    '''
    database.cursor.execute(query)

    original = database.count_rows('links')
    merged = database.count_rows('temp_links_merged')
    if original != merged:
        log.error('Original links and updated links tables '
            'do not align; quiting to prevent data loss.')
        raise RuntimeError
    
    database.drop_table('links', 'temp_links')
    query = '''
        ALTER TABLE temp_links_merged
        RENAME TO links;
    '''
    database.cursor.execute(query)

    database.connection.commit()

    del links
    del nodes
    del index
    del mapping
    del points

    log.info('Handling remain temperatures with defined profile.')

    def dump_temperaures(time: int, temperatures: List[Tuple[float,float,float]]):
        idx = time // (86400 // steps)
        for uuid, profile in enumerate(profiles):
            mrt, pet, utci = 0, 0, 0
            count = len(profile)
            for tempid in profile:
                temp = temperatures[tempid]
                mrt += temp[0]
                pet += temp[1]
                utci += temp[2]
            yield (uuid, idx, time, mrt / count, pet / count, utci / count)

    for csvfile in csvfiles:
        time: int
        temperatures: List[Tuple[float,float,float]]
        temperatures, time = parse_temperatures(csvfile)

        log.info('Writing temperature data to database.')
        database.insert_values('mrt_temperatures', 
            dump_temperaures(time, temperatures), 6)
        database.connection.commit()

    log.info('Creating indexes on new/updated tables.')
    create_indexes(database)