def send_notifications(self): self.last_notification_dtm = dt.datetime.now() if not self.chat_ids: return new_sales = self.db.fetch_new_sales(self.last_sale_id) if not new_sales: return new_last_sale_id = max(row['sale_id'] for row in new_sales) chat_ids = self.chat_ids.copy() random_chat_id = chat_ids.pop() for row in new_sales: current_price = row['current_price'] previous_price = row['previous_price'] currency = row['currency'] discount = (current_price - previous_price ) / previous_price if previous_price != 0 else 0.0 caption = self.TMPL_NOTIF_CAPTION.format( brand_name=row['brand_name'], gender=row['gender'], name=row['name'], url=row['url'], current_price=round(current_price), previous_price=strike(round(previous_price)), currency=currency, currency_strike=strike(currency), discount=discount) img_url = row['img_url'] if img_url: try: img_req = self.session.get('http://' + img_url) img_content = img_req.content msg_obj = self.send_photo(random_chat_id, img_content, caption, parse_mode='Markdown') if msg_obj['ok']: img_url = msg_obj['result']['photo'][0]['file_id'] except: log_message('Failed to download image {}'.format( row['img_url'])) log_exception() img_url = None else: self.send_message(random_chat_id, caption, parse_mode='Markdown', disable_web_page_preview=True) for chat_id in chat_ids: if img_url: self.send_photo(chat_id, img_url, caption, parse_mode='Markdown') else: self.send_message(chat_id, caption, parse_mode='Markdown', disable_web_page_preview=True) self.last_sale_id = new_last_sale_id
def get_monster_id(self, n): if not n in self.monsteridx: log_message( "Error: missing monster %s when building fantasy grounds module." % n) return 0 return self.monsteridx[n]
def evaluate_expression(this_val: gdb.Value, c_type_name: str, c_type: str, expr: str): try: content = _get_content(c_type_name, c_type, expr) tu = _prepare_clang(content) test_method = find_test_method(tu.cursor) if test_method is None: return None statement = get_first_statement(test_method) if statement is None: return None return ClangExpressionEvaluator(this_val, content).get_value(next(statement.get_children())) except gdb.MemoryError as e: return str(e) except ParserError as e: logger.log_message( "Failed to evaluate '{}': {}".format(expr, "".join(traceback.format_exception_only(type(e), e)))) raise except Exception as e: exc_type, exc_value, exc_tb = sys.exc_info() logger.log_message( "Failed to evaluate '{}': {}".format(expr, "".join(traceback.format_exception(type(e), e, exc_tb)))) return None
def lambda_handler(event, context): print(event) fragment = event['fragment'] logger.log_message(logging.INFO, fragment) resources = fragment['Resources'].copy() for name, resource in filter(is_tagged, resources.items()): params = resource.pop('Replicates') replicates = params['Elements'] defaults = params.get('Defaults', {}) # check if we need to get data from the Mappings section if isinstance(replicates, str): replicates = fragment['Mappings'][replicates] resources = Substitutor(name, resource, defaults).process(replicates) # add replicated resources fragment['Resources'].update(resources) # remove the replicating resource del fragment['Resources'][name] logger.log_message(logging.INFO, fragment) processed = { 'requestId': event['requestId'], 'status': 'success', 'fragment': fragment } print(processed) return processed
def extract_table(soup, table_class): dataset = [] crs_range = '' candidate_count = '' try: # Extract the table table = soup.find("table", attrs={"class": table_class}) # The first tr contains the field names. headings = [th.get_text() for th in table.find("tr").find_all("th")] dataset.append(headings) # Iterate through the rows of the table for row in table.find_all("tr")[1:]: # Skip the first row for th in row.find_all("th"): crs_range = th.get_text() for td in row.find_all("td"): candidate_count = td.get_text() data = [crs_range, candidate_count] dataset.append(data) # Log result count logger.log_message(inspect.currentframe().f_code.co_name + ' returns datasets with count ' + str(len(dataset))) # Return result return dataset except: # logging.exception(error) raise
def get_spell(key): global spells_db global spells if not key in spells: if not key in spell_db: log_message("Warning: Spell %s not found." % key) spells[key] = deepcopy(spell_db['Nosuch']) spells[key]['name'] = 'Missing ' + key else: spells[key] = spell_db[key] spells[key]['label'] = format_label(spells[key]['name']) if not 'mechanism' in spells[key]: spells[key]['mechanism'] = None lvl = spells[key]['level'] if lvl == 0: spells[key]['lvlline'] = "%s cantrip" % spells[key]['school'] else: if lvl == 1: fx = 'st' elif lvl == 2: fx = 'nd' elif lvl == 3: fx = 'rd' else: fx = 'th' spells[key]['lvlline'] = "%d%s level %s" % (lvl, fx, spells[key]['school']) return spells[key]
def add_new_chat(self, chat_obj): chat_id = chat_obj['id'] first_name = chat_obj.get('first_name') username = chat_obj.get('username') self.send_message(chat_id, self.TMPL_HELLO) self.new_chats.add((chat_id, first_name, username)) self.chat_ids.add(chat_id) log_message('New user: {} ({}) with id {}'.format( username, first_name, chat_id))
def get_sidebar(key): global sidebars if key in sidebars: return sidebars[key] else: log_message("Warning: Sidebar %s not found." % key) tmp = deepcopy(sidebars['Missing Sidebar']) tmp['title'] = key return tmp
def add_magicitem_databases(): global items for f in glob.glob("./items.*.json"): try: with open(f,'r') as fin: lst = json.load(fin) items.update(lst) except Exception as ex: log_message("Warning: Failure to load author item database %s." % f) log_message(" Try using https://jsonlint.com to check the format of your file.")
def __import_lift_types(filename, sheet_name=None): # TODO: refactor all accessory code into this one call (e.g. calc_) try: if not sheet_name: sheet_name = 0 lift_types = pd.read_excel(filename, sheetname=sheet_name, header=0) log_message('Loaded {} lifts from lift types'.format(len(sheet_name))) return lift_types except Exception as e: log_err(e, "Could not import {} from database file {}".format(sheet_name, filename))
def __import_weight_log(filename, sheet_name=None): # TODO: refactor all accessory code into this one call (e.g. calc_) try: if not sheet_name: sheet_name = 0 weight_log = pd.read_excel(filename, sheetname=sheet_name, header=0, index_col="Date", parse_dates=[0]) log_message('Loaded {} records from weight log'.format(len(weight_log))) return weight_log except Exception as e: log_err(e, "Could not import {} database file {}".format(sheet_name, filename))
def log(self, level, msg, suffix=None): if self.map_log_to_debug: level = 'debug' if suffix is not None: if self.name == '': name = suffix else: name = self.name + '.' + suffix else: name = self.name log_message(level, msg, name=name)
def log(self, level, msg, suffix=None): if self.map_log_to_debug: level = 'debug' if suffix is not None: if self.name=='': name = suffix else: name = self.name+'.'+suffix else: name = self.name log_message(level, msg, name=name)
def get_monster(mname): global monster_manual global monster_db if mname in monster_manual: return monster_manual[mname] elif mname in monster_db: denormalize_monster(mname) return monster_manual[mname] log_message("Warning: Missing Monster %s." % mname) alias_monster("Missing Monster", mname, False) return monster_manual[mname]
def __init__(self, config): super().__init__(config.get_bot_token()) self.db = SqliteStorage(config) self.last_sale_id = self.db.get_bot_last_sale_id() self.upd_offset = self.db.get_bot_upd_offset() self.chat_ids = self.db.get_chat_ids() self.new_chats = set() self.last_notification_dtm = None self.templates_path = config.get_templates_path() self.TMPL_NOTIF_CAPTION = self.get_template('notification_caption') self.TMPL_HELLO = self.get_template('hello_message') log_message('Bot has started')
def __init__(self, filename): if os.path.isfile(filename): self.__backup_file(filename) else: log_message('Database file not found: {}\n'.format(filename) + 'Building a new one from scratch') self.__build_dataframe() try: self.__sql_engine = create_engine('sqlite:///' + filename) except Exception as e: log_err(e, 'Could not create database engine with: {}'.format(filename)) self.__load_data()
def __init__(self, filename): self.filename = filename if os.path.isfile(filename): self.__backup_file(filename) else: log_message('Database file not found: {}\n'.format(filename) + 'Building a new one from scratch') self.__build_file() self.workout_log, self.weight_log, self.lift_types = self.__load_data() # TODO: Refactor these calls self.calc_weight_data() self.calc_workout_data()
def load_html(html): try: # Load the html soup = BeautifulSoup(html, 'html.parser') # Log the result logger.log_message(inspect.currentframe().f_code.co_name + ' Loaded html: ' + soup.prettify()) # return the result return soup except: # logging.exception(error) raise
def __load_data(self): self.data = None try: self.data = pd.read_sql_table('log', self.__sql_engine, index_col='Date', parse_dates=['Date']) except Exception as e: log_err(e, "Could not read database file") # See if the workout log contains data, if not prepare it to take some try: log_message('Loaded {} records from db'.format(len(self.data))) except TypeError: self.__build_dataframe()
def get_date_modified(soup): try: # Get the last modified date modified_date = soup.find('time').contents # Log result logger.log_message(inspect.currentframe().f_code.co_name + ' returns modified date: ' + str(modified_date)) # return result return modified_date except: # logging.exception(error) raise
def add_npc_databases(): global npcs_db for f in glob.glob("./npcs.*.json"): try: with open(f, 'r') as fin: lst = json.load(fin) npcs_db.update(lst) except Exception as ex: log_message("Warning: Failed to load author NPC database %s." % f) log_message( " Try using https://jsonlint.com to check the format of your file." )
def extract_div(soup, div_class, div_count): try: # Extract the table div = soup.find_all("div", attrs={"class": div_class})[div_count] # Log result logger.log_message(inspect.currentframe().f_code.co_name + ' returns result from div ' + div.prettify()) # return result return div except: # logging.exception(error) raise
def add_sidebar_databases(): global sidebars for f in glob.glob("./sidebars.*.json"): try: with open(f, 'r') as fin: lst = json.load(fin) sidebars.update(lst) except Exception as ex: log_message("Warning: Unable to load author sidebar file %s" % f) log_message( " Try using https://jsonlint.com to check the format of your file." )
def get(url, resource, id): try: request_url = url + id + resource # Invoke the REST API call response = requests.get(request_url, verify=False) # returns the status code json encoded content of response, if any response_list = [response.status_code, response.text] # Log response logger.log_message('get return code: ' + str(response.status_code)) logger.log_message('get response text: ' + response.text) return response_list except: # logging.exception(error) raise
def add_monster_databases(): global monster_db files = glob.glob("./monsterdb.*.json") for f in files: try: with open(f, 'r') as fin: mlst = json.load(fin) monster_db.update(mlst) except: log_message("Warning: Failed to load author monster database %s." % f) log_message( " Try using https://jsonlint.com to check the format of your file." )
def add_bib_databases(): global bibliography for f in glob.glob("./bibliography.*.json"): try: with open(f, 'r') as fin: lst = json.load(fin) bibliography.update(lst) except Exception as ex: log_message( "Warning: Failed to load author bibliographic database %s." % f) log_message( " Try using https://jsonlint.com to check the format of your file." )
def get_citation(key): global bibliography if key in bibliography: bib = bibliography[key] if bib['site'] == 'DMsGuild' and bib['url'].find('?') == -1: bib['link'] = bib['url'] + '?affiliate_id=33042' elif bib['site'] == 'Amazon' and bib['url'].find('?') == -1: bib['link'] = bib['url'] + '?tag=dndadventure-20' else: bib['link'] = bib['url'] return bib else: log_message('No such key as %s in bibliography.' % key) return bibliography['nosuch']
def process_updates(self): updates = self.get_updates(offset=self.upd_offset) if not updates['ok']: print('Failed to fetch updates') return updates = updates['result'] for update in updates: self.upd_offset = update['update_id'] + 1 try: message = update['message'] text = message['text'] if text == '/start': self.add_new_chat(message['chat']) except: log_message('Failed to process update') log_exception()
def send_sms(message, taccount_sid, tauth_token): try: # Your Account Sid and Auth Token from twilio.com/console account_sid = taccount_sid auth_token = tauth_token client = Client(account_sid, auth_token) message = client.messages \ .create( body=message, from_='+12264076158', to='+14372294588' ) # Log the result logger.log_message(inspect.currentframe().f_code.co_name + ' sms sent ' + str(message.sid)) except: # logging.exception(error) raise
async def on_message(self, message): if (isinstance(message.channel, discord.DMChannel)): logger.log_message(message) return if ('bambi' in message.content.lower() and admin_file.get('reaction_bambi', False)): emojis = list(message.guild.emojis) for emoji in emojis: if (emoji.name.lower() == 'bambi'): await message.add_reaction(emoji) return if ('temp' in message.content.lower() and admin_file.get('reaction_tempia', False)): emojis = list(message.guild.emojis) for emoji in emojis: if (emoji.name.lower() == 'ayaya'): await message.add_reaction(emoji) return
def get_magicitem(key): global items if key in items: it = deepcopy(items[key]) if 'need' not in it: it['need'] = [] if 'attunement' not in it: it['attunement'] = False if 'rarity' not in it: it['rarity'] = 'Unique' if 'consumable' not in it: it['consumable'] = False if 'spell' not in it: it['spell'] = None return it else: log_message('Warning: No such item as %s.' % key) it = deepcopy(items['missing']) it['name'] = "Missing " + key return it
def get_npc(key): global npcs_db global npcs if not key in npcs: if not key in npcs_db: log_message("NPC %s not found in database." % key) npcs[key] = deepcopy(npcs_db['No Such Person']) npcs[key]['name'] = key + ' not found' npcs[key] = npcs_db[key] npcs[key]['gender'] = npcs[key]['gender'].capitalize() npcs[key]['race'] = npcs[key]['race'].capitalize() npcs[key]['title'] = npcs[key]['title'].capitalize() if 'stats' not in npcs[key]: npcs[key]['sblock'] = None else: if npcs[key]['stats'] != npcs[key]['name']: alias_monster(npcs[key]['stats'], npcs[key]['name']) npcs[key]['sblock'] = get_monster(npcs[key]['name']) npcs[key]['label'] = format_label(npcs[key]['name']) return npcs[key]
def from_fitnotes(from_file, to_file): try: fitnotes = pd.DataFrame.from_csv(from_file) new_log = pd.DataFrame(columns=Backend.excel_format[Backend.wl_name]) new_log = new_log.set_index('Date') new_log.Lift = fitnotes.Exercise new_log.Weight = fitnotes["Weight (lbs)"] new_log.Reps = fitnotes.Reps idx = 0 for date, line in fitnotes.Comment.iteritems(): if line is not np.NaN: RPE, comment = _parse_RPE(line) new_log.RPE.iloc[idx] = RPE new_log.Comments.iloc[idx] = comment idx += 1 # TODO: parse comments to RPE's print(new_log.tail(10)) new_log.to_excel(to_file) log_message("Imported {} lines from {} to {}".format(len(new_log), from_file, to_file)) except Exception as e: log_err(e, "Error import from FitNotes")
def content(self): refs = [] for tag in self.tbd: if not tag in self.tags: if not tag in PAGE_INDEX: log_message( "Error: Reference to unknown fantasy grounds page: %s." % tag) self.tags.append(tag) refs.append({'text': tag, 'id': PAGE_INDEX[tag]}) return { 'name': self.name, 'id': self.pageid, 'text': self.text, 'next': self.nextpage, 'prev': self.prevpage, 'links': self.links, 'prelinks': self.prelinks, 'references': refs }
def get_generator(cls, klass, url, to_dict=False, params=None, retries=None): """ Generator for managing GET requests. For each GET request it will yield the next object in the results until there are no more objects. If the GET response contains a 'next' value in the 'paging' section, the generator will automatically fetch the next set of results and continue the process until the total limit has been reached or there is no longer a 'next' value. :param klass: The class to use for the generator. :type klass: class :param url: The URL to send the GET request to. :type url: str :param to_dict: Return a dictionary instead of an instantiated class. :type to_dict: bool :param params: The GET parameters to send in the request. :type params: dict :param retries: Number of retries before stopping. :type retries: int :returns: Generator """ if not klass: raise pytxValueError('Must provide a valid object to query.') if not params: params = dict() next_ = True while next_: results = cls.get(url, params, retries) if do_log(): try: before = results[t.PAGING][p.CURSORS].get(pc.BEFORE, 'None') after = results[t.PAGING][p.CURSORS].get(pc.AFTER, 'None') count = len(results[t.DATA]) log_message( 'Cursor: BEFORE: %s, AFTER: %s, LEN: %d' % (before, after, count ) ) except Exception, e: log_message('Missing key in response: %s' % e) for data in results[t.DATA]: if to_dict: yield data else: yield cls.get_new(klass, data) try: next_ = results[t.PAGING][t.NEXT] except: log_message('No next in Pager to follow.') next_ = False if next_: url = next_ params = {}
def extract_text_from_div(div): try: # Extract the table result_list = div.find_all('p') # Log result logger.log_message(inspect.currentframe().f_code.co_name + ' returns result text list count ' + str(len(result_list))) output_list = [] # Add heading row = [result_list[0].contents, ''] output_list.append(row) # Get the formatted output in a list for item in result_list[1:]: # skip the first row if 'strong' in str(item.contents): # # Extract text from string # temp = str(item.contents[0]).replace('/', '') # field = temp.split('<strong>')[1] # # Log field text # logger.log_message(inspect.currentframe().f_code.co_name + 'field text: ' + field) # Add row to the list if len(item.contents) > 1: row = [item.contents[0], item.contents[1]] output_list.append(row) else: row = [item.contents[0], ''] output_list.append(row) # Log result logger.log_message(inspect.currentframe().f_code.co_name + ' returns output list count ' + str(len(output_list))) # return result return output_list except: # logging.exception(error) raise
def invoke_endpoint(url): try: logger.log_message('endpoint url: ' + url) # Invoke the endpoint response = requests.get(url, verify=False) # Log response logger.log_message('invoke_endpoint return code: ' + response.status_code.__str__()) logger.log_message('invoke_endpoint reponse text: ' + response.text) # Return the html return response.text except: # logging.exception(error) raise
def __str__(self): log_message(self.message) return self.message