def get_planet_info(self): try: reader = oreader(self.planet_path) header = reader.header() timestamp = header.get("osmosis_replication_timestamp") reader.close() except Exception as e: logging.critical( f'There was a problem while parsing planet file. Pheraps the folder provided is wrong', exc_info=True) raise e if timestamp is None: logging.critical( f'Timestamp from planet header is None. It is impossible to continue.' ) raise Exception( 'Timestamp from planet header is None. It is impossible to continue.' ) logging.info( f'The headers of planet are valid and we found the following timestamp: {timestamp}' ) timestamp_py = OSM.timestamp_to_datetime(timestamp) state_planet = OSM.get_closest_state(self.base_url, self.mode, timestamp_py) logging.info( f'We were able to find the planet state: {state_planet["sequenceNumber"]} - Header timestamp: {timestamp_py}, state timestamp: {state_planet["timestamp"]}' ) return timestamp_py, state_planet
class OSM_strategy(Policy): def __init__(self, observation_space, action_space, config): Policy.__init__(self, observation_space, action_space, config) self.osm = OSM(config['alpha'], config['gamma'], config['blocks']) self.osm.MDP_matrix_init() P, R = self.osm.get_MDP_matrix() solver = mdptoolbox.mdp.PolicyIteration(P, R, 0.99) solver.run() self.blocks = config['blocks'] self.optimal_policy = solver.policy def OSM_act(self, s): curr_s = list(s) if s[3] == constants.NORMAL: curr_s[3] = 'normal' elif s[3] == constants.FORKING: curr_s[3] = 'forking' else: curr_s[3] = 'catch up' smaller_state = curr_s[:2] + [curr_s[3]] smaller_state = tuple(smaller_state) if curr_s[0] >= self.blocks or curr_s[1] >= self.blocks: if curr_s[0] > curr_s[1]: return 1 else: return 0 if smaller_state in self.osm._state_dict: return self.optimal_policy[self.osm._name_to_index(smaller_state)] else: if curr_s[0] > curr_s[1]: return 1 else: return 0 def compute_actions(self, obs_batch, state_batches, prev_action_batch=None, prev_reward_batch=None, info_batch=None, episodes=None, **kwargs): actions = [] for obs in obs_batch: a = int(round(obs[0])) h = int(round(obs[1])) o = int(round(obs[2])) f = int(round(obs[3])) actions.append(self.OSM_act([a, h, o, f])) return actions, [], {} def learn_on_batch(self, samples): pass def get_weights(self): pass def set_weights(self, weights): pass
def __init__(self, observation_space, action_space, config): Policy.__init__(self, observation_space, action_space, config) self.osm = OSM(config['alpha'], config['gamma'], config['blocks']) self.osm.MDP_matrix_init() P, R = self.osm.get_MDP_matrix() solver = mdptoolbox.mdp.PolicyIteration(P, R, 0.99) solver.run() self.blocks = config['blocks'] self.optimal_policy = solver.policy
def __init__(self): config = ConfigParser() config.read("OSM_2_OA.cfg") self.__createLogger() pgString = "postgres://" pgString += "%s:%s" % (config.get("OA", "user"), config.get( "OA", "pwd")) pgString += "@%s:%s" % (config.get( "OA", "dbServer"), config.get("OA", "port")) pgString += "/%s" % (config.get("OA", "database")) sqlFile = os.path.join(os.getcwd(), "OSM_2_OA.sql") self.oa = OA(pgString, sqlFile) self.osm = OSM(config.get("OSM", "XAPI"), config.get("OSM", "bbox"), config.get("OSM", "maxSize"))
def __init__(self): config = ConfigParser() config.read("OSM_2_OA.cfg") self.__createLogger() pgString = "postgres://" pgString += "%s:%s" % (config.get("OA", "user"), config.get("OA", "pwd")) pgString += "@%s:%s" % (config.get("OA", "dbServer"), config.get("OA", "port")) pgString += "/%s" % (config.get("OA", "database")) sqlFile = os.path.join(os.getcwd(), "OSM_2_OA.sql") self.oa = OA(pgString, sqlFile) self.osm = OSM(config.get("OSM", "XAPI"), config.get("OSM", "bbox"), config.get("OSM", "maxSize"))
class OSM_2_OA: """main class to synchronize data from OSM to OA does the orchestration""" def __init__(self): config = ConfigParser() config.read("OSM_2_OA.cfg") self.__createLogger() pgString = "postgres://" pgString += "%s:%s" % (config.get("OA", "user"), config.get( "OA", "pwd")) pgString += "@%s:%s" % (config.get( "OA", "dbServer"), config.get("OA", "port")) pgString += "/%s" % (config.get("OA", "database")) sqlFile = os.path.join(os.getcwd(), "OSM_2_OA.sql") self.oa = OA(pgString, sqlFile) self.osm = OSM(config.get("OSM", "XAPI"), config.get("OSM", "bbox"), config.get("OSM", "maxSize")) def __createLogger(self): logging.basicConfig( level=logging.DEBUG, format='%(asctime)s | %(name)-13s | %(levelname)-6s | %(message)s', filename=os.path.join(os.getcwd(), self.__class__.__name__) + ".log") console = logging.StreamHandler() console.setLevel(logging.INFO) # set a format which is simpler for console use formatter = logging.Formatter( '%(asctime)s | %(name)-13s | %(levelname)-6s | %(message)s') # tell the handler to use this format console.setFormatter(formatter) # add the handler to the root logger logging.getLogger('').addHandler(console) self.logger = logging.getLogger(self.__class__.__name__) self.logger.info("Started") def execute(self): dateOfLastSynchro = self.oa.getDateOfLastUpdate() deltas = self.osm.getDelta(dateOfLastSynchro) ##debug #deltas=[] #for f in os.listdir(tempfile.gettempdir()): # if f.find(".osm")>-1: # deltas.append(os.path.join(tempfile.gettempdir(),f)) for delta in deltas: converter = OSMDelta_2_AddressRowConverter(delta) rows = converter.convert() self.oa.insert(rows) pass
class OSM_2_OA: """main class to synchronize data from OSM to OA does the orchestration""" def __init__(self): config = ConfigParser() config.read("OSM_2_OA.cfg") self.__createLogger() pgString = "postgres://" pgString += "%s:%s" % (config.get("OA", "user"), config.get("OA", "pwd")) pgString += "@%s:%s" % (config.get("OA", "dbServer"), config.get("OA", "port")) pgString += "/%s" % (config.get("OA", "database")) sqlFile = os.path.join(os.getcwd(), "OSM_2_OA.sql") self.oa = OA(pgString, sqlFile) self.osm = OSM(config.get("OSM", "XAPI"), config.get("OSM", "bbox"), config.get("OSM", "maxSize")) def __createLogger(self): logging.basicConfig( level=logging.DEBUG, format="%(asctime)s | %(name)-13s | %(levelname)-6s | %(message)s", filename=os.path.join(os.getcwd(), self.__class__.__name__) + ".log", ) console = logging.StreamHandler() console.setLevel(logging.INFO) # set a format which is simpler for console use formatter = logging.Formatter("%(asctime)s | %(name)-13s | %(levelname)-6s | %(message)s") # tell the handler to use this format console.setFormatter(formatter) # add the handler to the root logger logging.getLogger("").addHandler(console) self.logger = logging.getLogger(self.__class__.__name__) self.logger.info("Started") def execute(self): dateOfLastSynchro = self.oa.getDateOfLastUpdate() deltas = self.osm.getDelta(dateOfLastSynchro) ##debug # deltas=[] # for f in os.listdir(tempfile.gettempdir()): # if f.find(".osm")>-1: # deltas.append(os.path.join(tempfile.gettempdir(),f)) for delta in deltas: converter = OSMDelta_2_AddressRowConverter(delta) rows = converter.convert() self.oa.insert(rows) pass
def main(): moods_dir = ROOT_DIR + '\\ShapesStructure\\Moods' moods = [ f for f in os.listdir(moods_dir) if os.path.isdir(os.path.join(moods_dir, f)) ] for mood_dir in moods: print(mood_dir) is_using_route_opt = input( '\nWould you like SAM to use route optimization? (Y/N): ').lower() if is_using_route_opt == 'y' or is_using_route_opt == 'yes': is_using_route_opt = True else: is_using_route_opt = False mood = '' while mood not in MOODS: mood = input( '\nEnter a mood from above for SAM to create an image on a map of Portland: ' ).lower() if mood not in MOODS: print('Sorry, \'' + mood + '\' is not a supported mood. Please enter a valid mood.') else: image = ip.load_shape_from_mood(mood) # ranges for middle chunk of Portland roughly... geo_image = ip.points_to_lat_long(image, 45.53353, 45.54680, -122.65153, -122.635) SLC = OSM.obtain_map('Salt Lake City') Portland = OSM.obtain_map('Portland') portland_square = OSM.obtain_square_portion(45.4, 45.9, -122.7, -122.2) print( '\nSAM is creating the route for you! This might take a minute or two...' ) start_time = time.time() # TODO: implement route optimization techniques (in route_optimizer.py) through rotation, scaling, and comparing to road coords from OSM using Hausdorff if is_using_route_opt: geo_image = ro.optimize_route(geo_image, portland_square) end_time = time.time() print('Time taken to create route: ' + str(round(end_time - start_time, 2)) + ' seconds') # construct coords to be placed into URL # TODO: still need to optimize which points are cut from each image (in ip.trim_points) if len(geo_image) > 25: print('Trimming image down to 25 points...') ip.trim_points(geo_image) url_end = '' for point in geo_image: url_end += str(point[0]) + ',' + str(point[1]) + '/' print('\nOpening SAM\'s sketch in Google Maps...') # showing route that will look like an apple in Google maps... webbrowser.open('https://www.google.com/maps/dir/' + url_end)
def update(self): latest_info_seq_num, latest_info_timestamp = self.get_latest_state_info( ) print( f'Latest state sequence number "{latest_info_seq_num}" on date {latest_info_timestamp.strftime("%Y/%m/%dT%H:%M:%SZ")}' ) print( f'We need to download {latest_info_seq_num - self.current_seq_num} states' ) if latest_info_seq_num - self.current_seq_num == 0: return print('Up to date!') elif latest_info_seq_num - self.current_seq_num < 0: return print('Your version is newer that the one on the server') for state_num in range(self.current_seq_num + 1, latest_info_seq_num + 1): xml = self.download_state(state_num) modifies = xml.findall('modify') adds = xml.findall('add') deletes = xml.findall('delete') modified_nodes = [ x for modify in modifies for x in OSM.get_nodes(modify) ] added_nodes = [x for add in adds for x in OSM.get_nodes(add)] deleted_nodes = [ x for delete in deletes for x in OSM.get_nodes(delete) ] fountains_modified = [ x for x in modified_nodes if self.filter.apply(x['tags']) ] fountains_added = [ x for x in added_nodes if self.filter.apply(x['tags']) ] fountains_deleted = [ x for x in deleted_nodes if self.filter.apply(x['tags']) ] for n in fountains_modified: self.psql.add_job( 'update', { "id": n['id'], "timestamp": dt.datetime.strptime(n['timestamp'], '%Y-%m-%dT%H:%M:%SZ'), "lat": n['lat'], "lon": n['lon'], "uid": n['uid'], "user": n['user'], "version": n['version'], "tags": n['tags'] }) for n in fountains_added: self.psql.add_job( 'update', { "id": n['id'], "timestamp": dt.datetime.strptime(n['timestamp'], '%Y-%m-%dT%H:%M:%SZ'), "lat": n['lat'], "lon": n['lon'], "uid": n['uid'], "user": n['user'], "version": n['version'], "tags": n['tags'] }) for n in fountains_deleted: self.psql.add_job( 'update', { "id": n['id'], "timestamp": dt.datetime.strptime(n['timestamp'], '%Y-%m-%dT%H:%M:%SZ'), "lat": n['lat'], "lon": n['lon'], "uid": n['uid'], "user": n['user'], "version": n['version'], "tags": n['tags'] }) sequence_number, state_timestamp = OSM.get_state_info( self.base_url, self.mode, state_num) self.psql.save_update(state_num, state_timestamp) print( f'Updated database to version {state_num}. Added {len(fountains_added)}, updated {len(fountains_modified)} and deleted {len(fountains_deleted)} fountains.' )
def download_state(self, state_number): return OSM.download_state(self.base_url, self.mode, state_number)
def get_state_info(self, state_number): return OSM.get_state_info(self.base_url, self.mode)
def get_latest_state_info(self): return OSM.get_latest_state_info(self.base_url, self.mode)
EXPR_PARAM_PICKLE_FILE, EXPR_PROGRESS_FILE, EXPR_RESULT_FILE) from functools import reduce from itertools import (chain, takewhile) from ray.rllib.agents.ppo import PPOTrainer #from OSM import OSM import os import csv import math import time import constants import mdptoolbox from OSM import OSM from BitcoinEnv import BitcoinEnv from bitcoin_game import OSM_strategy ''' blocks = 5 osm_space = spaces.Box(low=np.zeros(4), high=np.array([blocks + 4, blocks + 4, blocks + 4, 3.])) osm = OSM_strategy(osm_space, spaces.Discrete(4), {'alpha':.15, 'gamma':0,'blocks':5}) print(osm.OSM_act([1, 1, 1, 0])) ''' osm = OSM(.15, .5, 5) osm.MDP_matrix_init() P, R = osm.get_MDP_matrix() solver = mdptoolbox.mdp.PolicyIteration(P, R, 0.99) solver.run() print(solver.V)