def g_end(text, timer_name=None): """Ends a global timer measurement and prinfs it""" if timer_name is None: timer_name = 'default' if timer_name not in timers.keys(): prinw('%s timer_name not started yet.', timer_name) return -1 sec = time.time() - timers[timer_name] ms = round(sec * 1000, 2) prinf('%s ms %s', ms, text) return ms
def get_utc_now(): """Returns current UTC time if global DEBUG variable contains 'simulate_time', the time will be shifted 12 days to the future for the offline database update triggering """ global DEBUG utc_now = arrow.utcnow() prinf('%s utc_now', utc_now) if 'simulate_time' in DEBUG: # simulating different time utc_now = utc_now.shift(days=+12, hours=+4) prinf('%s simulated utc_now', utc_now) return utc_now
def init_currency_symbols(self): """Parses currency symbol string (ccode_str) and generates conversion dictionary [currency_symbols] - if the redis is used, the [currency_symbols] is stored in it otherwise it uses plain dictionary """ if self.r is not None and self.r.csc.initialized == True: self.currency_symbols = self.r.csc.currency_symbols self.r.csc.initialized = True prinf('currency_symbols loaded from redis') else: twin_del = ',' dict_del = ':' self.currency_symbols = { key: value for key,value in [key_value.split(dict_del) for key_value in self.ccode_str.split(twin_del)] } prinf('currency_symbols = %s', self.currency_symbols) if self.r: self.r.csc.currency_symbols = self.currency_symbols prinf('currency_symbols saved in redis') prinf(self.r.csc.currency_symbols) self.r.csc.initialized = True
def destroy_entities(self, cat_list=None, skip_cat_list=None): for ent_cat, ent_list in self.entities.items(): delete = False if cat_list is None and skip_cat_list is None: delete = True else: if cat_list is None: if ent_cat not in skip_cat_list: delete = True else: if ent_cat in cat_list: delete = True if delete: prinf("Clearing entities of " + ent_cat) for ent in ent_list: self.destroy_created_entity(ent, 0) self.entities[ent_cat].clear() for r in self.robots: r.reset_ultrasounds()
def acquire_rates_data(self): """Request rates data from the site and processes them returns True on successful receiving and data processing """ prinf('%s params: %s', self.base_url, self.my_params) g_start() try: self.response_data = requests.get(self.base_url, params=self.my_params, timeout=self.timeout) except OSError: prinw('%s host not available', self.name) return False g_end('request responded') if not self.response_data: return False else: status_code = self.response_data.status_code prinf(status_code ) if status_code > 400 : prinw('%s currency converter site response not found. %s', self.nam, status_code) return False elif status_code == 200: prinf('%s response ok', self.name) self.update_rates_valid_data() self.in_ccode = self.response_data.json()[self.strs[jpn.key_in_ccode]] self.rates = self.response_data.json()[self.strs[jpn.key_output]] return True
def update_rates_data_if_needed(self): """Main decision tree for rates database update from internet""" if self.valid_database_exists: # we need only new data (fresher then database) only_sites_fresher_than_db = True else: # no database - just try to get data only_sites_fresher_than_db = False # get sites ordered retrospectively from the one with freshest last_updated utc time sorted_sites = self.get_retrospectively_sorted_sites( only_sites_fresher_than_db) if len(sorted_sites) == 0 and self.valid_database_exists: prinf('No fresher site than database - using sql database rates.') return # try to update offline database starting with the freshest any_success = self.try_update_database_from_sites(sorted_sites) if not any_success: prinw( 'Conversion rates data were not available in any cconversion site!' )
def __init__( self, robot, us_id, name, open_angle=None, distance_range=None, category="ultrasound", mass=None, us_pos=None, ): self.name = name self.r = robot self.us_id = us_id self.open_angle = open_angle self.distance_range = distance_range self.detected = False self.us_pos = us_pos prinf("UltrasoundSensor created and entangled: %s %s", us_id, name) opa = 242 self.colors = {True: (255, 0, 0, opa), False: (0, 255, 0, opa)} self.v_inds = {"left": 8, "middle": 11, "right": 14}
def _load_sites_from_pickle_(self): """Loads exchange rates pandas DataFrame from local pickle if it does not exist - get the data from """ if Path(self._sites_file_).is_file(): with open(self._sites_file_, 'rb') as input: self.sites = pickle.load(input) prinf('%s loaded', self._sites_file_) prinf(self.sites) else: self.request_site_from_internet() prinf(self.sites) with open(self._sites_file_, 'wb') as output: pickle.dump(self.sites, output, pickle.HIGHEST_PROTOCOL) prinf('%s saved', self._sites_file_)
def _init_redis_(self, use_redis): """Initialize redis variables and connections - even if use_redis is True, the first_contact_max_sec test is still used ::WINDOWS NOTE:: the first access to redis has about 1 second time penalty on windows platform. idiocracy """ self.use_redis = use_redis self.first_contact_max_sec = 2.5 # should be at most 300 ms to be better than hdd sql database self.redis_host = 'localhost' self.redis_port = 6379 self.r = None if not self.use_redis: return g_start() redis_db = redis.StrictRedis(host="localhost", port=6379, db=0) g_end('redis strictRedis initialized') g_start() redis_db.set('redis', 'running') # takes ~ 1000 ms on windows first_contact = g_end('first redis access') g_start() redis_running = redis_db.get( 'redis') == 'running' # takes ~ 0.1 ms on windows g_end('second redis access', 'd') redis_running = redis_db.info()['loading'] == 0 if first_contact > self.first_contact_max_sec: # we don't want the redis to actually slow things down on Windows redis_running = False if redis_running: g_start() self.r = Root(host="localhost", port=6379, db=0) g_end('redisworks root initialized') prinf('Redis works root server running.')
def update_rates_valid_data(self): """Updates validation utc times from site response cet time""" date_cet = str(self.response_data.json()[self.strs[jpn.key_date]]) fmt = self.strs[jpn.key_date_format] date_time = arrow.get(date_cet, fmt) self.valid_from_utc = self.__class__.stamp_time(date_time) self.valid_to_utc = self.__class__.stamp_valid_to(self.valid_from_utc) prinf('%s valid_from_utc', self.valid_from_utc) prinf('%s valid_to_utc', self.valid_to_utc) prinf('%s now', arrow.utcnow())
def convert(self, amount, input_cur, output_cur=None): """Converts amount of money in <input_cur> currency to <output_cur> currency amount <float> - Amount which we want to convert input_cur <str> - Input currency - 3 letters name or currency symbol [output_cur <str>] - Output currency - 3 letters name or currency symbol - if omitted - amount converted to all availible currencies exchange rates are acquired from sql database (get_rates_from_sql) """ self.in_amount = amount self.in_currency = input_cur self.out_currency = output_cur if not all([self.in_amount, self.in_currency]): prine( 'Arguments [amount] and [input_cur] are both required to convert!' ) return None prinf('converting in=%s, out=%s', self.in_currency, self.out_currency) self.convert_to_ccode() prinf('converted in=%s, out=%s', self.in_code, self.out_code) if self.in_code == self.out_code: self.out_amount = self.in_amount else: rates = self.get_rates_from_sql(self.in_code, self.out_code) prinf('rate = %s', rates) if type(rates) is not dict: self.out_amount = round(self.in_amount * rates, self.out_digits) self.print_json() else: self.out_amount = None rates = { key: round(self.in_amount * value, self.out_digits) for key, value in rates.items() } self.print_json(rates_dict=rates)
def clear_maps(self): prinf("Deleting maps in " + self.__svg_map_dir__) # shutil.rmtree(self.__svg_map_dir__) files = glob.glob(self.__svg_map_dir__ + "*") for f in files: os.remove(f)