Esempio n. 1
0
    def parse_mazs(self, filepath, bin_size=10000):
        pr.print(f'Beginning network MAZ parsing from {filepath}.', time=True)
        pr.print('MAZ Parsing Progress',
                 persist=True,
                 replace=True,
                 frmt='bold',
                 progress=0)

        parser = shapefile.Reader(filepath)
        target = len(parser)
        mazs = []
        count = 0

        for item in parser:
            mazs.append(
                (item.record.MAZ_ID_10, item.record.TAZ_2015,
                 item.record.Sq_miles, self.encode_poly(item.shape.points)))
            count += 1
            if count % bin_size == 0:
                pr.print(f'Pushing {bin_size} MAZs to database.', time=True)
                self.database.push_mazs(mazs)
                mazs = []
                pr.print('Resuming MAZ parsing.', time=True)
                pr.print('MAZ Parsing Progress',
                         persist=True,
                         replace=True,
                         frmt='bold',
                         progress=count / target)

        pr.print(f'Pushing {count % bin_size} MAZs to database.', time=True)
        self.database.push_mazs(mazs)
        pr.print('MAZ Parsing Progress',
                 persist=True,
                 replace=True,
                 frmt='bold',
                 progress=1)
        pr.push()
        pr.print('Network MAZ parsing complete.', time=True)
Esempio n. 2
0
    def run(self, config):
        pr.print('Prallocating process files and tables.', time=True)
        force = config['run']['force']
        self.create_tables('trips', 'temp_trips', force=force)

        pr.print('Creating temporary tables.', time=True)
        self.database.create_temp()

        pr.print(f'Loading process metadata and resources.', time=True)
        trips_path = config['run']['trips_file']
        bin_size = config['run']['bin_size']

        target = sum(1 for l in open(trips_path, 'r')) - 1
        tripsfile = open(trips_path, 'r', newline='')
        parser = csv.reader(tripsfile, delimiter=',', quotechar='"')
        top = next(parser)
        cols = {key: val for key, val in zip(top, range(len(top)))}

        trips = []
        parties = {}
        trip_id = 0
        party_id = 1
        
        pr.print('Starting trips CSV file iteration.', time=True)
        pr.print('Trips Parsing Progress', persist=True, replace=True,
            frmt='bold', progress=trip_id/target)

        household = None

        for trip in parser:
            prev_household = household

            vehicle = int(trip[cols['vehId']])
            household = int(trip[cols['hhid']])
            role = int(trip[cols['jointTripRole']])

            party_hash = self.hash_party(trip[cols['party']], 
                trip[cols['isamAdjDepMin']])

            if prev_household != household:
                parties = {}

            if party_hash is None:
                party = 0
                party_idx = 0
            else:
                if party_hash in parties:
                    party = parties[party_hash][0]
                    party_idx = parties[party_hash][1]
                    parties[party_hash][1] += 1
                else:
                    parties[party_hash] = [party_id, 2]
                    party = party_id
                    party_idx = 1
                    party_id += 1

            trips.append((
                trip_id,
                household,
                int(trip[cols['uniqueid']]),
                int(trip[cols['pnum']]),
                int(trip[cols['personTripNum']]) - 1,
                party,
                party_idx,
                role,
                int(trip[cols['origTaz']]),
                int(trip[cols['origMaz']]),
                int(trip[cols['destTaz']]),
                int(trip[cols['destMaz']]),
                int(trip[cols['origPurp']]),
                int(trip[cols['destPurp']]),
                int(trip[cols['mode']]),
                vehicle if vehicle > 0 else 0,
                self.adj_time(trip[cols['isamAdjDepMin']]) + 16200,
                self.adj_time(trip[cols['isamAdjArrMin']]) + 16200,
                self.adj_time(trip[cols['isamAdjDurMin']])))
            trip_id += 1

            if trip_id % bin_size == 0:
                pr.print(f'Pushing {bin_size} trips to the database.', time=True)
                self.database.write_trips(trips)

                pr.print('Resuming CSV file parsing.', time=True)
                pr.print('Trips Parsing Progress', persist=True, replace=True,
                    frmt='bold', progress=trip_id/target)
                trips = []

        pr.print(f'Pushing {trip_id % bin_size} trips to the database.', time=True)
        self.database.write_trips(trips)

        pr.print('Trips Parsing Progress', persist=True, replace=True,
            frmt='bold', progress=1)
        pr.push()
        pr.print('ABM trip data parsing complete.', time=True)

        pr.print('Merging tables and dropping temporaries.', time=True)
        pr.silence()
        self.database.drop_table('trips')
        self.database.create_all_idxs('temp_trips')
        self.database.join_trips()
        self.database.drop_table('temp_trips')
        del self.database.tables['temp_trips']
        pr.unsilence()

        if config['run']['create_idxs']:
            pr.print(f'Creating all indexes in database '
                f'{self.database.db}.', time=True)
            self.create_idxs()
            pr.print(f'Index creating complete.', time=True)
Esempio n. 3
0
    def parse(self, config):
        pr.print('Prallocating process files and tables.', time=True)
        force = config['run']['force']
        self.create_tables('agents', force=force)

        pr.print(f'Loading process metadata and resources.', time=True)
        agents_path = config['run']['agents_file']
        bin_size = config['run']['bin_size']

        target = sum(1 for l in open(agents_path, 'r')) - 1
        agentsfile = open(agents_path, 'r', newline='')
        parser = csv.reader(agentsfile, delimiter=',', quotechar='"')
        top = next(parser)
        cols = {key: val for key, val in zip(top, range(len(top)))}

        agents = []
        agent_id = 0

        pr.print('Starting agents CSV file iteration.', time=True)
        pr.print('Agents Parsing Progress',
                 persist=True,
                 replace=True,
                 frmt='bold',
                 progress=agent_id / target)

        for agent in parser:
            agents.append(
                (agent_id, int(agent[cols['hhid']]), int(agent[cols['pnum']]),
                 float(agent[cols['pumsSerialNo']]),
                 int(agent[cols['persType']]),
                 int(agent[cols['persTypeDetailed']]), int(agent[cols['age']]),
                 int(agent[cols['gender']]), int(agent[cols['industry']]),
                 int(agent[cols['schlGrade']]), int(agent[cols['educLevel']]),
                 int(agent[cols['workPlaceType']]),
                 int(agent[cols['workPlaceTaz']]),
                 int(agent[cols['workPlaceMaz']]),
                 int(agent[cols['schoolType']]), int(agent[cols['schoolTaz']]),
                 int(agent[cols['schoolMaz']]),
                 int(agent[cols['campusBusinessTaz']]),
                 int(agent[cols['campusBusinessMaz']]),
                 int(agent[cols['dailyActivityPattern']])))
            agent_id += 1

            if agent_id % bin_size == 0:
                pr.print(f'Pushing {bin_size} agents to database.', time=True)
                self.database.write_agents(agents)

                pr.print('Resuming agent CSV file parsing.', time=True)
                pr.print('Agent Parsing Progress',
                         persist=True,
                         replace=True,
                         frmt='bold',
                         progress=agent_id / target)
                agents = []

        pr.print(f'Pushing {agent_id % bin_size} agents to database.',
                 time=True)
        self.database.write_agents(agents)

        pr.print('Agent Parsing Progress',
                 persist=True,
                 replace=True,
                 frmt='bold',
                 progress=1)
        pr.push()
        pr.print('ABM agent data parsing complete.', time=True)

        if config['run']['create_idxs']:
            pr.print(
                f'Creating all indexes in database '
                f'{self.database.db}.',
                time=True)
            self.create_idxs()
            pr.print(f'Index creating complete.', time=True)
Esempio n. 4
0
    def run(self, config):
        pr.print('Prallocating process files and tables.', time=True)
        force = config['run']['force']
        self.create_tables('households', force=force)

        pr.print(f'Loading process metadata and resources.', time=True)
        households_path = config['run']['households_file']
        bin_size = config['run']['bin_size']

        target = sum(1 for l in open(households_path, 'r')) - 1
        householdsfile = open(households_path, 'r', newline='')
        parser = csv.reader(householdsfile, delimiter=',', quotechar='"')
        top = next(parser)
        cols = {key: val for key, val in zip(top, range(len(top)))}

        households = []
        household_id = 0
        vehicles = []
        vehicle_id = 0

        hhid = 0

        pr.print('Starting households CSV file iteration.', time=True)
        pr.print('Households Parsing Progress',
                 persist=True,
                 replace=True,
                 frmt='bold',
                 progress=hhid / target)

        for household in parser:
            household_id = int(household[cols['hhid']])
            households.append(
                (household_id, float(household[cols['pumsSerialNo']]),
                 int(household[cols['homeTaz']]),
                 int(household[cols['homeMaz']]),
                 int(household[cols['hhsize']]),
                 int(household[cols['numFtWorkers']]),
                 int(household[cols['numPtWorkers']]),
                 int(household[cols['numUnivStuds']]),
                 int(household[cols['numNonWorkers']]),
                 int(household[cols['nunmRetired']]),
                 int(household[cols['numDrivAgeStuds']]),
                 int(household[cols['numPreDrivStuds']]),
                 int(household[cols['numPreshcool']]),
                 int(household[cols['hhIncomeDollars']]),
                 int(household[cols['hhNumAutos']]),
                 int(household[cols['dwellingType']]),
                 int(household[cols['ifAvHousehold']])))
            hhid += 1

            for vehicle in range(int(household[cols['hhNumAutos']])):
                vehicles.append((vehicle_id, household_id, vehicle + 1))
                vehicle_id += 1

            if hhid % bin_size == 0:
                pr.print(f'Pushing {bin_size} households to database.',
                         time=True)
                self.database.write_households(households)
                self.database.write_vehicles(vehicles)

                pr.print('Resuming household CSV file parsing.', time=True)
                pr.print('Household Parsing Progress',
                         persist=True,
                         replace=True,
                         frmt='bold',
                         progress=hhid / target)
                households = []
                vehicles = []

        pr.print(f'Pushing {hhid % bin_size} households to database.',
                 time=True)
        self.database.write_households(households)

        pr.print('ABM household data parsing complete.', time=True)
        pr.print('Household Parsing Progress',
                 persist=True,
                 replace=True,
                 frmt='bold',
                 progress=1)
        pr.push()

        if config['run']['create_idxs']:
            pr.print(
                f'Creating all indexes in database '
                f'{self.database.db}.',
                time=True)
            self.create_idxs()
            pr.print(f'Index creating complete.', time=True)