def show_house(bot, update): """ """ log.info(log_msg(update)) update.callback_query.answer() reply_markup = InlineKeyboardMarkup([[InlineKeyboardButton('Меню', callback_data='_menu')]]) if update.callback_query.data == 'show_this_house': # if user want see selected house user_query = Show.get(user_id=update.effective_user.id) else: # if user want see own house and have one user_query = chosen_owns(update) neighbors = [] sections = Own.select(Own.section).where(Own.house == user_query.house, Own.section).distinct().order_by( Own.section) query = Own.select().where(Own.house == user_query.house, Own.section).order_by(Own.floor) for i in sections: neighbors.append(f'\n{"📭 <b>Секція".rjust(30, " ")} {i.section}</b>\n') for user in query.where(Own.section == i.section): neighbors.append(f'{user.user} {user}\n') show_list = (f'<b>Мешканці будинку № {user_query.house}</b>:\n' + '{}' * len(neighbors)).format(*neighbors) if len(show_list) < 6200: bot.sendMessage(chat_id=update.effective_user.id, parse_mode=ParseMode.HTML, text=show_list, reply_markup=reply_markup) else: part_1, part_2, part_3 = show_list.partition('📭 <b>Секція 4'.rjust(30, ' ') + '</b>' + '\n') bot.sendMessage(chat_id=update.effective_user.id, parse_mode=ParseMode.HTML, text=part_1[:-2]) # to do: remove "." from 2nd msg. Without that dot, rjust not works bot.sendMessage(chat_id=update.effective_user.id, parse_mode=ParseMode.HTML, text='.' + part_2 + part_3, reply_markup=reply_markup)
def get(self, storagemodel:object, hide = 0) -> StorageQueueModel: """ lookup the next message in queue """ modelname = storagemodel.__class__.__name__ if isinstance(storagemodel, StorageQueueModel): if (modelname in self._models): """ get first message in queue """ try: if hide > 0: messages = self._service.get_messages(storagemodel._queuename, num_messages=1, visibility_timeout = hide) else: messages = self._service.get_messages(storagemodel._queuename, num_messages=1) """ parse retrieved message """ for message in messages: storagemodel.mergemessage(message) """ no message retrieved ?""" if storagemodel.id is None: storagemodel = None except AzureException as e: log.error('can not get queue message: queue {} with message {} because {!s}'.format(storagemodel._queuename, storagemodel.content, e)) storagemodel = None else: log.info('please register model {} first'.format(modelname)) storagemodel = None else: log.info('model {} is not a Queue Model'.format(modelname)) storagemodel = None return storagemodel
async def feed_fetcher(database): """Fetch all the feeds""" client = ClientSession(auth=BasicAuth(API_KEY)) sem = Semaphore(MAX_CONCURRENT_REQUESTS) queue = await connect_redis() while True: log.info("Beginning run.") tasks = [] async with client as session: while True: try: job = await dequeue(queue, 'fetcher') feed = await database.feeds.find_one( {'advert_id': job['_id']}) task = ensure_future( throttle(sem, session, feed, client, database, queue)) tasks.append(task) except Exception: log.error(format_exc()) break responses = gather(*tasks) await responses log.info("Run complete, sleeping %ds...", CHECK_INTERVAL) await sleep(1) queue.close() await queue.wait_closed()
def revert(self, args): """ * write a lock file * reset to last or specified tag * call sync hook with the prefix (repo) and tag info """ if not self._check_lock(): raise SartorisError(message=exit_codes[30]) # This will be the tag on the revert commit revert_tag = self._make_tag() # Extract tag on which to revert if hasattr(args, 'tag'): tag = args.tag else: # revert to previous to current tag repo_tags = self._dulwich_get_tags() if len(repo_tags) >= 2: tag = repo_tags.keys()[-2] else: raise SartorisError(message=exit_codes[36], exit_code=36) # Ensure tag to revert to was set if tag == '': raise SartorisError(message=exit_codes[13], exit_code=13) # # Rollback to tag: # # 1. get a commit list # 2. perform no-commit reverts # 3. commit # log.info(__name__ + ' :: revert - Attempting to revert to tag: {0}'. format(tag)) tag_commit_sha = self._get_commit_sha_for_tag(tag) commit_sha = None for commit_sha in self._git_commit_list(): if commit_sha == tag_commit_sha: break self._git_revert(commit_sha) # Ensure the commit tag was matched if commit_sha != tag_commit_sha or not commit_sha: self._dulwich_reset_to_tag() raise SartorisError(message=exit_codes[35], exit_code=35) self._dulwich_commit(self._make_author(), message='Rollback to {0}.'.format(tag)) log.info(__name__ + ' :: revert - Reverted to tag: "{0}", ' 'call "git deploy sync" to persist'.format(tag)) if args.auto_sync: return self._sync(revert_tag, args.force) return 0
def welcome(username): ''' User is redirected to Welcome screen on successful login ''' log.info("User %s redirected to Welcome screen" %(username,)) return render_template('user.html', username = username)
def do_tweet(debug, tweet=None, script=None): if script is not None: msg = script if not debug: api = get_twitter_api() api.update_status(status=tweet.content) msg = f'tweeted: {tweet}' log.info(msg) print(msg) return if tweet is None: tweet = PendingTweet.query.order_by(PendingTweet.added_at.asc()).first() if not tweet: msg = 'has no pending tweet. generate..' print(msg) log.warn(msg) do_generate(debug, 1) tweet = PendingTweet.query.order_by(PendingTweet.added_at.asc()).first() if not debug: api = get_twitter_api() api.update_status(status=tweet.content) db.session.delete(tweet) db.session.commit() msg = f'tweeted: {tweet}' log.info(msg) print(msg)
def fake(): log.info('STARTING -- fake') sw = SeanWeather() sw.weather_data = [ {'temp': u'85', 'feel': u'83', 'pop': u'0', 'icon_pos': 10, 'feel_c': u'28', 'temp_c': u'29', 'date': 1498334400000, 'icon': u'http://icons.wxug.com/i/c/k/partlycloudy.gif'}, {'temp': u'85', 'feel': u'83', 'pop': u'0', 'icon_pos': 10, 'feel_c': u'28', 'temp_c': u'29', 'date': 1498345200000, 'icon': u'http://icons.wxug.com/i/c/k/clear.gif'}, {'temp': u'79', 'feel': u'79', 'pop': u'1', 'icon_pos': 10, 'feel_c': u'26', 'temp_c': u'26', 'date': 1498356000000, 'icon': u'http://icons.wxug.com/i/c/k/nt_clear.gif'}, {'temp': u'78', 'feel': u'78', 'pop': u'2', 'icon_pos': 10, 'feel_c': u'26', 'temp_c': u'26', 'date': 1498366800000, 'icon': u'http://icons.wxug.com/i/c/k/nt_clear.gif'}, {'temp': u'77', 'feel': u'77', 'pop': u'2', 'icon_pos': 10, 'feel_c': u'25', 'temp_c': u'25', 'date': 1498377600000, 'icon': u'http://icons.wxug.com/i/c/k/nt_clear.gif'}, {'temp': u'75', 'feel': u'75', 'pop': u'2', 'icon_pos': 10, 'feel_c': u'24', 'temp_c': u'24', 'date': 1498388400000, 'icon': u'http://icons.wxug.com/i/c/k/nt_clear.gif'}, {'temp': u'74', 'feel': u'74', 'pop': u'3', 'icon_pos': 10, 'feel_c': u'23', 'temp_c': u'23', 'date': 1498399200000, 'icon': u'http://icons.wxug.com/i/c/k/nt_clear.gif'}, {'temp': u'73', 'feel': u'73', 'pop': u'3', 'icon_pos': 10, 'feel_c': u'23', 'temp_c': u'23', 'date': 1498410000000, 'icon': u'http://icons.wxug.com/i/c/k/nt_clear.gif'}] sw.data_string = jsonify(sw.weather_data) sw.icon = 'http://icons.wxug.com/i/c/k/nt_clear.gif' sw.user_input = 'chilled' sw.update_current_conditions() log.info('FINISHED with %s -- fake', sw.user_input) return render_template('weather_form.html', sw=sw)
def back_process(self, *args, **kwargs): is_success = False super(ConvertProcessor, self).back_process(*args, **kwargs) s3_operator = kwargs.pop('s3_operator') # S3Operator实例,pop下车~~('_')~~ s3_local_file = kwargs.pop('s3_local_file') # 文件从s3下载后,保存在本地文件的路径 object_key = kwargs['object_key'] # 对象的s3主键(基于本地路径的部分路径) action_type = kwargs[ 'action_type'] # 具体转换动作,如'convert_to_pdf', 'check_is_sexy' new_file_name = kwargs['new_name'] # 转换之后保存的格式,如 'filename.pdf' object_dir = '/'.join(object_key.split('/')[:-1]) # 对象的目录 local_dir = '/'.join(s3_local_file.split('/')[:-1]) # 本地保存的目录 if not new_file_name: new_file_name = get_new_name_by_type(s3_local_file, action_type) generate_path = os.path.join(local_dir, new_file_name) # 转换生成的文件路径 new_object_key = os.path.join(object_dir, new_file_name) # 生成的文件对应s3上的key generate_file_path = self.common_convert(action_type=action_type, source_path=s3_local_file, generate_path=generate_path, generate_dir=local_dir) if os.path.isfile(generate_file_path): if s3_operator.upload_to_s3(new_object_key, generate_file_path): # 上传到s3中 is_success = True # 删除生成的文件 log.info("****remove generate_file_path:%s" % generate_file_path) os.remove(generate_file_path) return is_success
async def fetcher(database): """Fetch all the feeds""" # disable certificate validation to cope with self-signed certificates in some feed back-ends client = ClientSession(connector=TCPConnector(verify_ssl=False)) sem = Semaphore(MAX_CONCURRENT_REQUESTS) queue = await connect_redis() while True: log.info("Beginning run.") tasks = [] threshold = datetime.now() - timedelta(seconds=FETCH_INTERVAL) async with ClientSession() as session: while True: try: job = await dequeue(queue, 'fetcher') feed = await database.feeds.find_one({'_id': job['_id']}) last_fetched = feed.get('last_fetched', threshold) if last_fetched <= threshold: task = ensure_future( throttle(sem, session, feed, client, database, queue)) tasks.append(task) except Exception: log.error(format_exc()) break responses = gather(*tasks) await responses log.info("Run complete, sleeping %ds...", CHECK_INTERVAL) await sleep(CHECK_INTERVAL) queue.close() await queue.wait_closed()
def make_bars(prepared_data): """create bars for houses sections count""" log.info('this func has no update') # to do: enable time.sleep(0.5) if async will make unexpected charts # time.sleep(0.5) values_ = prepared_data['bars_values'] def autolabel(rects, height_factor): for i, rect in enumerate(rects): height = rect.get_height() label = '%d' % int(height) ax3.text(rect.get_x() + rect.get_width() / 2., height_factor * height, '{}'.format(label), ha='center', va='bottom') for house in values_: sections = [f'Сек{i[-1]}' for i in houses_arr[f'house_{house}']] values = [values_[house].get(int(i[-1]), 0) for i in sections] fig3, ax3 = plt.subplots() ax3.bar(sections, values) ax3.set_title(f'Будинок {house}') autolabel(ax3.patches, height_factor=0.85) img_path = os.path.join('img', 'charts', f'bar{house}.png') fig3.savefig(img_path, dpi=200) fig3.clf() plt.close()
def gen_shard(sess, input_base_dir, image_filenames, output_filename, image_texts): """Create a TFRecord file from a list of image filenames""" writer = tf.python_io.TFRecordWriter(output_filename) for item, filename in enumerate(image_filenames): path_filename = os.path.join(input_base_dir, filename) if os.stat(path_filename).st_size == 0: log.warning('Skipping empty files: %s' % (filename, )) continue try: image_data, height, width = get_image(sess, path_filename) text, labels = get_text_and_labels(image_texts[item]) if is_writable(width, text): #查看文本和标签 # print(text,labels) if len(labels) == 0: print(text, labels) else: example = make_example(filename, image_data, labels, text, height, width) writer.write(example.SerializeToString()) else: log.info('Skipping Image with too short width: %s' % (filename, )) except Exception as e: # Some files have bogus payloads, catch and note the error, moving on log.warning('Error occured during processing file %s' % (filename, )) log.error(e) writer.close()
def get_discography(artist_id, session, ignore_cache=False): """get all albums and singles from artist id""" check_dir() discography = None # check if artist is cached if ignore_cache == False and os.path.exists( cache_artist.format(artist_id)): # load with pickle with open(cache_artist.format(artist_id), 'rb') as f: discography = pickle.load(f) # check if older than 24hours tstamp = os.path.getmtime(cache_artist.format(artist_id)) if time.time() > tstamp + 24 * 60 * 60: discography = None log.info('Removed discography cache because it expired.') # check version if discography.version < LAYOUT_VERSION: discography = None log.info('Removed discography cache because older layout version.') if discography is None: # get albums and save albums = session.get_albums_from_artist(artist_id) discography = Discography( str(session).lower(), albums[0].artists[0].name, artist_id, albums) with open(cache_artist.format(artist_id), 'wb') as f: pickle.dump(discography, f) return discography
def __copy(self, file_fullnames): output_file = '' for input_file in file_fullnames: log.info('Sampling ' + input_file) try: if self.__OutputFormat == 2: # 分目录保存样本文件 if win: curr_path = self.__OutputPath + os.sep + os.path.split( input_file)[0].replace(':', '_') else: curr_path = self.__OutputPath + os.path.split( input_file)[0] os.makedirs(curr_path, exist_ok=True) output_file = os.path.join(curr_path, os.path.split(input_file)[1]) else: # 保存在同一目录中,文件名中体现原目录结构 file_name = input_file.replace(os.sep, '_').replace(':', '_') output_file = self.__OutputPath + '/' + file_name with open(output_file, 'w', encoding='utf-8') as fp: for line in self.__readLine(self, input_file): fp.write(line) except Exception as err: log.warning(input_file + '\t' + str(err)) if os.path.exists(output_file): os.remove(output_file) continue
def make_bars(prepared_data): """create bars for houses sections count""" log.info('this func has no update') values_ = prepared_data['bars_values'] def autolabel(rects, height_factor): for i, rect in enumerate(rects): height = rect.get_height() label = '%d' % int(height) ax.text(rect.get_x() + rect.get_width() / 2., height_factor * height, '{}'.format(label), ha='center', va='bottom') mpl.rcParams.update({'font.size': 15}) for house in values_: sections = [f'Подъезд {i[-1]}' for i in houses_arr[f'house_{house}']] values = [values_[house].get(int(i[-1]), 0) for i in sections] plt.bar(sections, values) ax = plt.gca() ax.set_title(f'Дом {house}') autolabel(ax.patches, height_factor=0.85) img_path = os.path.join('img', 'charts', f'bar{house}.png') plt.savefig(img_path, dpi=200) plt.clf() plt.close()
def building(bot, update): """CallbackQueryHandler. pattern ^building$""" log.info(log_msg(update)) update.callback_query.answer() reply_markup = InlineKeyboardMarkup([[InlineKeyboardButton('Меню', callback_data='_menu')]]) bot.sendMessage(chat_id=update.effective_user.id, text=building_msg, parse_mode=ParseMode.HTML, disable_web_page_preview=True, reply_markup=reply_markup)
def update_units(self): self.units = Units.get(self.previous.units, Units.F) request_units = request.args.get('new_units') log.info('self.units: %s, request units: %s', self.units, request_units) self.units = Units.get(request_units, self.units) log.info('new units: %s', self.units)
def _sync(self, kwargs): """ This method makes calls to specialized drivers to perform the deploy. * Check for sync script * default sync if one is not specified """ if kwargs['dryrun']: logline = 'SYNC -> dryrun.' log.info(__name__ + ' :: ' + logline) DeployDriverDryRun().sync(kwargs) else: logline = 'SYNC - calling default sync.' log.info(__name__ + ' :: ' + logline) self.deploy_log.log('user(' + self.config['user.name'] + ') ' + logline) DeployDriverDefault().sync(kwargs) # Clean-up self.deploy_log.log_archive() if self._locker.check_lock(): self._locker.remove_lock() logline = 'SYNC successful!' self.deploy_log.log('user(' + self.config['user.name'] + ') ' + logline) return 0
def db_link_authors(words: List[List[str]]) -> None: """ Create relations between words and their authors (WID <-> AID) in DB These connections locate in 't_connect_authors' table :param words: List of words' data received from a text file using function convert_file_to_list :return: None """ log.info("Start to link words with their authors") all_authors = Author.query.all() author_by_abbr = {author.abbreviation: author for author in all_authors} # Get a dictionary with a list of abbreviations of authors of each word by id_old dict_of_authors_data_as_dict = { int(word_data[0]): word_data[5].split(" ", 1)[0].split("/") for word_data in words } for word in Word.query.all(): authors_abbreviations = dict_of_authors_data_as_dict[word.id_old] word.add_authors([ author_by_abbr[abbreviation] for abbreviation in authors_abbreviations ]) log_text = f"{word.name} {' '*(26-len(word.name))}-> {'/'.join(authors_abbreviations)}" log.debug(log_text) db.session.commit() log.info("Finish to link words with their authors")
def save_user_data(bot, update): """callbackQuery handler. pattern: ^_apart_reject$|^_section_reject$ AND integer text handler""" log.info(log_msg(update)) if update.callback_query: update.callback_query.answer() user = chosen_owns(update) user_mode = Show.get(user_id=update.effective_user.id) text = f'В базе {"ОБНОВЛЕН" if user.updated else "СОЗДАН"} пользователь:\n' if user_mode.msg_apart_mode and update.message: apartment = int(update.message.text) user.apartment = apartment else: if update.callback_query.data == '_apart_reject': user.apartment = None user.updated = datetime.now().strftime('%y.%m.%d %H:%M:%S.%f')[:-4] user.save() user_mode.msg_apart_mode = False user_mode.save() user_created_report(bot, update, created_user=user, text=text) new_neighbor_report(bot, update, created_user=user) text_success = '<b>Дякую, Ваші дані збережені</b>. Бажаєте подивитись сусідів?' bot.sendMessage(text=text_success, chat_id=update.effective_user.id, parse_mode=ParseMode.HTML) menu_kbd(bot, update) prepared_data = prepare_data() make_pie(prepared_data) make_bars(prepared_data)
def save_user_data(update, context): """callbackQuery from reject. save user data""" log.info(log_msg(update)) user = chosen_owns(update) if not user.updated: text = 'В базу ДОБАВЛЕН:\n' else: text = 'В базе ОБНОВЛЕН:\n' if update.callback_query.data == '_apart_reject': user_mode = Show.get(user_id=update.effective_user.id) user_mode.msg_apart_mode = False user.apartment = None user_mode.save() update.callback_query.message.reply_text( text='Квартиру не сохраняем пока что.') user.updated = datetime.now().strftime('%y.%m.%d %H:%M:%S.%f')[:-4] user.save() # TODO: properly do user_created_report # user_created_report(update, context, created_user=user, text=text) new_neighbor_report(update, context, created_user=user) reply_markup = InlineKeyboardMarkup( [[InlineKeyboardButton('Меню', callback_data='_menu')]]) update.callback_query.message.reply_text(text="Данные сохранены.", parse_mode=ParseMode.HTML, reply_markup=reply_markup)
def start_command(update, context): """handle /start command""" log.info(log_msg(update)) is_changed(update) if update.callback_query: update.callback_query.answer() menu_kbd(update, context)
def login(self): """ Login to google servers Raises: UnknownLoginLocation: User is trying to in from an unauthorized unknown device BadCredentials: Username or password is incorrect """ log.info('Logging in to googlemail server as {}'.format( self.GMAIL_USERNAME)) try: self.server.login(self.GMAIL_USERNAME, self.GMAIL_PASSWORD) except SMTPAuthenticationError as ex: if ex.smtp_code == 534: raise UnknownLoginLocation if ex.smtp_code == 535: log.error("Username or password is incorrect") raise BadCredentials except SMTPSenderRefused as ex: log.error( "Google blocking login. Go to your gmail and allow access from this location" ) raise else: self.is_loggedin = True
def do_generate(debug, count, id=None, is_response=False): tweets = [] for _ in range(count): if is_response: script = ResponseScript.query.filter_by(id=id).first() else: if not id: ind = random.randrange(0, PeriodicScript.query.count()) script = PeriodicScript.query[ind] else: script = PeriodicScript.query.filter_by(id=id).first() content = script.content.replace('%{상대}', '(상대ID)') tweet = compile_script(content) pending_tweet = PendingTweet(content=tweet, image_keyword=script.image_keyword) if not debug: db.session.add(pending_tweet) msg = f'pending tweet: {tweet}' log.info(msg) print(msg) tweets.append(tweet) if not debug: db.session.commit() log.info('done') print('done') return tweets
def speech(update, context): log.info( f'user_id: {update.effective_user.id} name: {update.effective_user.full_name}' ) text = update.message.text track_count = len(os.listdir(tracks_dir)) response = client.synthesize_speech(OutputFormat='mp3', Text=text, TextType='text', VoiceId=langs_arr.get( update.effective_user.id, 'Maxim')) if "AudioStream" in response: try: with closing(response["AudioStream"]) as stream: output = os.path.join(tracks_dir, str(track_count) + " track-boto.mp3") # Open a file for writing the output as a binary stream with open(output, "wb") as file: file.write(stream.read()) track = open(output, 'rb') context.bot.sendAudio(chat_id=update.message.chat_id, audio=track) except IOError as error: log.info( f'user_id: {update.effective_user.id} name: {update.effective_user.full_name} {error} appear' ) # print(error) sys.exit(-1)
def _sync(self, tag, force): repo_name = self.config['repo_name'] sync_script = '{0}/{1}.sync'.format(self.config["sync_dir"], repo_name) if os.path.exists(sync_script): log.info('{0} :: Calling sync script at {1}'.format(__name__, sync_script)) proc = subprocess.Popen([sync_script, '--repo="{0}"'.format(repo_name), '--tag="{0}"'.format(tag), '--force="{0}"'.format(force)]) proc_out = proc.communicate()[0] log.info(proc_out) if proc.returncode != 0: exit_code = 40 log.error("{0} :: {1}".format(__name__, exit_codes[exit_code])) return exit_code else: # In absence of a sync script -- Tag the repo log.debug(__name__ + ' :: Calling default sync.') try: self._dulwich_tag(tag, self._make_author()) except Exception as e: log.error(str(e)) raise SartorisError(message=exit_codes[12], exit_code=12) self._default_sync() self._remove_lock() return 0
def sendtel(self, to_list): url = "http://cf.51welink.com/submitdata/Service.asmx/g_Submit" log.info("发送短信给: %s" % to_list) for i in range(len(to_list)): sname = config.smsname spwd = config.smspass sprdid = config.smsid # 没有的可以忽略这个 content = '001110' # 电话代号,只提供叫醒 param = { 'sname': '%s' % sname, # 账号 'spwd': '%s' % spwd, # 密码 'scorpid': '', # 企业代码 'sprdid': '%s' % sprdid, # 产品编号 'sdst': '%s' % to_list[i], # 手机号码 'smsg': '%s' % content } # 短信内容 data = urllib.parse.urlencode(param).encode(encoding='UTF8') # 定义头 headers = {"Content-Type": "application/x-www-form-urlencoded"} # 开始提交数据 req = urllib.request.Request(url, data, headers) response = urllib.request.urlopen(req) # 获取返回数据 result = response.read().decode('utf8') # 自行解析返回结果xml,对应结果参考文档 dom = xml.dom.minidom.parseString(result) root = dom.documentElement State = root.getElementsByTagName("State") MsgID = root.getElementsByTagName("MsgID") MsgState = root.getElementsByTagName("MsgState") Reserve = root.getElementsByTagName("Reserve") print(State[0].firstChild.data) # State值为0表示提交成功 print(MsgID[0].firstChild.data) print(MsgState[0].firstChild.data) print(Reserve[0].firstChild.data)
def store_to_db(directory_date, directory_base, directory_checksum, exiftools_metadata, openmaps_urls, locations): """ Stores date, directory name, directory checksum, exiftool metadata, openmaps URLs and locations dictionary in the databaase Args: directory_date: directory original name containing just the date in format yyyy_mm_dd directory_base: basename of the photo directory directory_checksum: computed checksum of the directory exiftools_metadata: photos metadata openmaps_urls: list containing all the called openmaps URLs for this directory locations: dictionary containing the locations Returns: True - if successfull; None - if exception occurs """ try: with MongoConnector() as mongo: db_entry = { "date": directory_date, "directory": directory_base, "directory_checksum": directory_checksum, "exiftools_metadata": exiftools_metadata, "openmaps_urls": openmaps_urls, "locations": locations } if mongo.find_one({"date": directory_date}): mongo.update({"date": directory_date}, {"$set": db_entry}) else: mongo.insert(db_entry) return True except pymongo_errors.DuplicateKeyError as py_e: log.info("Cannot insert {} in the DB: {}".format(db_entry, str(py_e)))
def load_from_db(directory, directory_checksum): """ Checks if the directory location information is already stored in the DB. Checksum is also computed and compares with the one stored in the DB, in case there are changes in the directory Args: directory: absolute path of the photo directory directory_checksum: computed checksum of the directory Returns: database content corresponding to 'locations' key of the directory None - if not present """ try: with MongoConnector() as mongo: directory_base = os.path.basename(directory) # Check if and entry with the directory name is present in DB db_dir_metadata = mongo.find_one({'directory': directory_base }) or None # Check if directory has an entry in the db and if so # if the checksum from db is the same as the computed one if db_dir_metadata and directory_checksum == db_dir_metadata[ 'directory_checksum']: log.info("Loading data from DB...") # log.debug (json.dumps(db_dir_metadata['locations'],indent=1)) return db_dir_metadata['locations'] except KeyError as e: log.warning("Check DB structure! Key {} is missing. \ Re-computing result!".format(e)) except Exception as e: log.error(e)
def sample_words(): text_dir = '../data/train/txt_train' count = set([ '', ]) for txtname in os.listdir(text_dir): with open(text_dir + '/' + txtname, 'r', encoding="utf-8") as f: for line in f.readlines(): text = line.split(',')[-1].strip() if text == '###': continue textSet = set(text) count.update(textSet) log.debug('textSet now: %s' % (textSet, )) log.info('%s char found.' % (len(count), )) # for txtname in os.listdir(text_dir): # with open(os.path.join(text_dir, txtname), 'r', encoding="utf-8") as f: # for line in f.readlines(): # text = line.split(',')[-1].strip() # textSet = set(text) # for c in textSet: # assert c in count for character in count: log.debug(character) with open(WORDDICT, 'w', encoding='utf-8') as f: for character in count: f.write(character) f.write('\n')
def notifications_kbd(update, _): """callbackQuery handler. pattern:^notifications$. Show notifications keyboard settings""" log.info(log_msg(update)) keyboard = [[ InlineKeyboardButton('В моем доме 🏠', callback_data='_notify_house') ], [ InlineKeyboardButton('В моем подъезде 🔢', callback_data='_notify_section') ], [ InlineKeyboardButton('Выключить оповещения 🔕', callback_data='_notify_OFF') ], [InlineKeyboardButton('Меню', callback_data='_menu')]] reply_markup = InlineKeyboardMarkup(keyboard) user = Show.get(user_id=update.effective_user.id) _dict = { None: 'Выключено', '_notify_OFF': 'Выключено', '_notify_section': 'В моем подъезде 🔢', '_notify_house': 'В моєм доме 🏠' } text = f'Сейчас оповещения установлены в режим\n' \ f'<b>{_dict[user.notification_mode]}</b>\nПолучать оповещения ' \ f'когда появится новый сосед:' update.message.reply_text(parse_mode=ParseMode.HTML, text=text, reply_markup=reply_markup, message_id=update.effective_message.message_id) update.callback_query.answer()
def set_floor_kbd(update, _): """callbackQuery from set_section_kbd(). Show keyboard to chose its own floor""" log.info(log_msg(update)) user = chosen_owns(update) user.section = int(update.callback_query.data[2]) user.save() floors = houses_arr['house_' + str(user.house)]['section_' + str(user.section)] # TODO: add proper floor count keyboard = [] count_ = len(floors) while count_ > 0: floor = [] for i in range(3): if count_ == 0: break floor.append( InlineKeyboardButton(str(floors[-count_]), callback_data='_f' + str(floors[-count_]))) count_ -= 1 keyboard.append(floor) reply_markup = InlineKeyboardMarkup(keyboard) update.callback_query.message.reply_text('На каком этаже?', reply_markup=reply_markup) update.callback_query.answer()
def login(): ''' Matches the username and password provided by user with the username and encrypted password in stored in database ''' error = None if request.method == 'POST': user = User.query.filter_by(username=request.form['username']).first() if not user: error = 'Invalid username' log.error(request.form['username']) log.error(error) elif (base64.decodestring(user.password) != request.form['password']): error = 'Invalid password' log.error(request.form['password']) log.error(error) else: session['logged_in'] = True log.info("Successful Login") return redirect( url_for('welcome', username=request.form['username'])) return render_template('login.html', error=error)
def show_section(update, _, some_section=False): """Here need some documentation""" # TODO: rework that func completely log.info(log_msg(update)) reply_markup = InlineKeyboardMarkup( [[InlineKeyboardButton('Меню', callback_data='_menu')]]) if not some_section: user_query = chosen_owns(update) else: user_query = Show.get(user_id=update.effective_user.id) query = User.select().where(User.house == user_query.house, User.section == user_query.section).order_by( User.floor) neighbors = [str(user) + '\n' for user in query] show_list = ('<b>Жители подъезда № ' + str(user_query.section) + ' Дома № ' + str(user_query.house) + '</b>:\n' + '{}' * len(neighbors)).format(*neighbors) update.message.reply_text(parse_mode=ParseMode.HTML, disable_web_page_preview=True, text=show_list, reply_markup=reply_markup) update.callback_query.answer()
def jubilee(bot, update, created_user): """Check if new added user is 'hero of the day' i.e some round number in db""" log.info(log_msg(update)) celebration_count = [i for i in range(0, 2000, 50)] query = Own.select().where(Own.house, Own.section) check_list = [query.where(Own.house == i).count() for i in range(1, 5)] total = query.count() text = f'сусідів 🎇 🎈 🎉 🎆 🍹\nВітаємо\n{created_user.joined_str}' for count, house in enumerate(check_list, start=1): if house in celebration_count: x, created = Jubilee.get_or_create(house=count, count=house) if created: text = f'В будинку № {count} Вже зареєстровано {house} ' + text try: bot.sendMessage(chat_id=-1001076439601, text=text, parse_mode=ParseMode.HTML) # test chat except BadRequest: bot.sendMessage(chat_id=-1001307649156, text=text, parse_mode=ParseMode.HTML) return if total in celebration_count: text = f'Вже зареэстровано {total} сусідів 🎇 🎈 🎉 🎆 🍹\nВітаємо\n{created_user.joined_str}' x, created = Jubilee.get_or_create(house=0, count=total) if created: try: bot.sendMessage(chat_id=-1001076439601, text=text, parse_mode=ParseMode.HTML) # test chat except BadRequest: bot.sendMessage(chat_id=-1001307649156, text=text, parse_mode=ParseMode.HTML)
def new_neighbor_report(bot, update, created_user): """Send message for users who enabled notifications""" log.info(log_msg(update)) # query for users who set notifications as _notify_house query_params = Show.select(Show.user_id).where(Show.notification_mode == '_notify_house') query_users = Own.select(Own.user).where(Own.house == created_user.house) query = query_params & query_users # prevent telegram blocking spam for i, user in enumerate(query): if i % 29 == 0: time.sleep(1) try: bot.sendMessage(chat_id=user.user_id, parse_mode=ParseMode.HTML, text=f'Новий сусід\n{created_user.joined_str}') except BadRequest as err: bot.sendMessage(chat_id=ADMIN_ID, text=f'failed to send notification for user {user.user_id} {err}', parse_mode=ParseMode.HTML) # query for users who set notifications as _notify_section query_params = Show.select(Show.user_id).where(Show.notification_mode == '_notify_section') query_users = query_users.where(Own.section == created_user.section) query = query_params & query_users for i, user in enumerate(query): if i % 29 == 0: time.sleep(1) try: bot.sendMessage(chat_id=user.user_id, parse_mode=ParseMode.HTML, text=f'Новий сусід\n{created_user.joined_str}') except BadRequest as err: bot.sendMessage(chat_id=ADMIN_ID, text=f'failed to send notification for user {user.user_id} {err}', parse_mode=ParseMode.HTML)
def update(self): log.info('STARTING') self.previous = CookieData(*session.get('sw2', _DEFAULT_COOKIE)) self.update_units() self.update_num_hours() self.update_weather_data() self.update_current_conditions() session['sw2'] = CookieData(units=self.units.name, user_input=self.user_input, num_hours=self.num_hours) log.info('FINISHED with %s' % self.user_input)
def update_weather_data(self, weather_getter=weather_for_user_input): self.user_input = request.args.get('user_input', self.previous.user_input) log.info('using weather API for %s', self.user_input) self.weather_data, self.location = weather_getter(self.user_input, API_KEY) if not self.weather_data: log.warning("didn't get any results from weather API") db.session.add(Lookup(self.user_input, self.location)) db.session.commit() self.data_string = jsonify(self.weather_data[:int(self.num_hours / 3)])
def update_num_hours(self): try: self.num_hours = int(request.args.get('num_hours', self.previous.num_hours)) except ValueError: flash('seanweather didnt like the number of hours, using %d' % _DEFAULT_NUM_HOURS) log.error('bad number of hours. request: %s, prev: %s', request.args.get('num_hours'), self.previous.num_hours) self.num_hours = _DEFAULT_NUM_HOURS log.info('num hours: %s', self.num_hours)
def __new__(cls, *args, **kwargs): """ This class is Singleton, return only one instance """ if not cls.__instance: cls.__instance = super(Sartoris, cls).__new__(cls, *args, **kwargs) # Call config cls.__instance._configure(**kwargs) log.info('{0} :: Config - {1}'.format(__name__, str(cls.__instance.config))) return cls.__instance
def _create_lock(self): """ Create a lock file Write the user name to the lock file in the dploy directory. """ log.info('{0} :: SSH Lock create.'.format(__name__)) cmd = "touch {0}{1}{2}".format( self.config['path'], self.DEPLOY_DIR, self._get_lock_file_name()) self.ssh_command_target(cmd)
def _dulwich_stage_all(self): """ Stage modified files in the repo """ _repo = Repo(self.config['top_dir']) # Iterate through files, those modified will be staged for elem in os.walk(self.config['top_dir']): relative_path = elem[0].split('./')[-1] if not search(r'\.git', elem[0]): files = [relative_path + '/' + filename for filename in elem[2]] log.info(__name__ + ' :: Staging - {0}'.format(files)) _repo.stage(files)
def stop_start_test(): # dunno why DirSrv.start|stop writes to dirsrv error-log conn.errlog = "/tmp/dirsrv-errlog" open(conn.errlog, "w").close() DirSrvTools.stop(conn) log.info("server stopped") DirSrvTools.start(conn) log.info("server start") time.sleep(5) # save and restore conn settings after restart tmp = conn.added_backends, conn.added_entries setup() conn.added_backends, conn.added_entries = tmp assert conn.search_s( *utils.searches['NAMINGCONTEXTS']), "Missing namingcontexts"
def abort(self, _): """ * reset state back to start tag * remove lock file """ logline = 'ABORTING git deploy' log.info(__name__ + ' :: ' + logline) self.deploy_log.log('user(' + self.config['user.name'] + ') ' + logline) self.deploy_log.log_archive() # Remove lock file self._locker.remove_lock() return 0
def drop_added_entries(conn): while conn.added_entries: try: e = conn.added_entries.pop() log.info("removing entries %r" % conn.added_backends) conn.delete_s(e) except ldap.NOT_ALLOWED_ON_NONLEAF: log.error("Entry is not a leaf: %r" % e) except ldap.NO_SUCH_OBJECT: log.error("Cannot remove entry: %r" % e) log.info("removing backends %r" % conn.added_backends) for suffix in conn.added_backends: try: drop_backend(conn, suffix) except: log.exception("error removing %r" % suffix) for r in conn.added_replicas: try: drop_backend(conn, suffix=None, bename=r) except: log.exception("error removing %r" % r)
def login_1(): ''' Matches username and password with the credentials stored in data.xml ''' error = None if request.method == 'POST': data = getrecords() if request.form['username'] not in data.keys(): error = 'Invalid username' log.error(request.form['username']) log.error(error) elif data[request.form['username']] != request.form['password'] : error = 'Invalid password' log.error(request.form['password']) log.error(error) else: session['logged_in'] = True log.info("Successful Login") return redirect(url_for('welcome', username = request.form['username'])) return render_template('login.html', error=error)
def login(): ''' Matches the username and password provided by user with the username and encrypted password in stored in database ''' error = None if request.method == 'POST': user = User.query.filter_by(username=request.form['username']).first() if not user: error = 'Invalid username' log.error(request.form['username']) log.error(error) elif (base64.decodestring(user.password)!= request.form['password']): error = 'Invalid password' log.error(request.form['password']) log.error(error) else: session['logged_in'] = True log.info("Successful Login") return redirect(url_for('welcome', username = request.form['username'])) return render_template('login.html', error=error)
def _default_sync(self): # # Call deploy hook on client # # {% PATH %}/.git/deploy/hooks/default-client-push origin master # log.info('{0} :: Calling default sync - ' 'pushing changes ... '.format(__name__)) proc = subprocess.Popen(['{0}{1}{2}'.format( self.config['client_path'], self.config['hook_dir'], DEFAULT_CLIENT_HOOK), self.config['remote'], self.config['branch']], stdout=subprocess.PIPE, stderr=subprocess.PIPE) log.info('PUSH -> ' + '; '.join( filter(lambda x: x, proc.communicate()))) # # Call deploy hook on remote # # ssh user@target {% PATH %}/.git/deploy/hooks/default-client-pull \ # origin master # log.info('{0} :: Calling default sync - ' 'pulling to target'.format(__name__)) cmd = '{0}{1}{2} {3} {4}'.format(self.config['path'], self.config['hook_dir'], DEFAULT_TARGET_HOOK, self.config['remote'], self.config['branch']) ret = self.ssh_command_target(cmd) log.info('PULL -> ' + '; '.join( filter(lambda x: x, ret['stdout'])))
def setupSSL_test(): ssl_args = { 'secport': 636, 'sourcedir': None, 'secargs': {'nsSSLPersonalitySSL': 'localhost'}, } cert_dir = conn.getDseAttr('nsslapd-certdir') assert cert_dir, "Cannot retrieve cert dir" log.info("Initialize the cert store with an empty password: %r", cert_dir) fd_null = open('/dev/null', 'w') open('%s/pin.txt' % cert_dir, 'w').close() cmd_initialize = 'certutil -d %s -N -f %s/pin.txt' % (cert_dir, cert_dir) Popen(cmd_initialize.split(), stderr=fd_null) log.info("Creating a self-signed cert for the server in %r" % cert_dir) cmd_mkcert = 'certutil -d %s -S -n localhost -t CTu,Cu,Cu -s cn=localhost -x' % cert_dir Popen(cmd_mkcert.split(), stdin=open("/dev/urandom"), stderr=fd_null) log.info("Testing ssl configuration") ssl_args.update({'dsadmin': conn}) DSAdminTools.setupSSL(**ssl_args)
def revert(self, args): """ * write a lock file * reset to last or specified tag * call sync hook with the prefix (repo) and tag info """ if not self._locker.check_lock(): raise GitDeployError(message=exit_codes[30]) # Extract tag on which to revert tag = '' if hasattr(args, 'tag'): tag = args.tag if not hasattr(args, 'tag') or not tag: # revert to previous to current tag repo_tags = GitMethods()._get_deploy_tags() if len(repo_tags) >= 2: tag = repo_tags[-2] else: raise GitDeployError(message=exit_codes[36], exit_code=36) logline = 'REVERT -> no tag specified, using: \'{0}\''.format(tag) log.info(__name__ + ' :: ' + logline) self.deploy_log.log('user(' + self.config['user.name'] + ') ' + logline) # # Rollback to tag: # # 1. get a commit list # 2. perform no-commit reverts # 3. commit # logline = 'REVERT -> Attempting to revert to tag: \'{0}\''.format(tag) log.info(__name__ + ' :: ' + logline) self.deploy_log.log('user(' + self.config['user.name'] + ') ' + logline) tag_commit_sha = GitMethods()._get_commit_sha_for_tag(tag) commit_sha = None for commit_sha in GitMethods()._git_commit_list(): if commit_sha == tag_commit_sha: break GitMethods()._git_revert(commit_sha) # Ensure the commit tag was matched if commit_sha != tag_commit_sha or not commit_sha: GitMethods()._dulwich_reset_to_tag() raise GitDeployError(message=exit_codes[35], exit_code=35) GitMethods()._dulwich_commit(GitMethods()._make_author(), message='Rollback to {0}.'.format(tag)) logline = 'REVERT -> Reverted to tag: \'{0}\', call "git deploy ' \ 'sync" to persist'.format(tag) log.info(__name__ + ' :: ' + logline) self.deploy_log.log('user(' + self.config['user.name'] + ') ' + logline) if args.auto_sync: return self._sync(args) return 0
def dummy(self, args): """ dummy method to test the entry point. """ log.info(__name__ + " :: CLI ars -> {0}".format(args)) log.info(__name__ + " :: Passive call to the CLI.")
def status_test(): status = conn.replica.status(conn.agreement_dn) log.info(status) assert status
#!/usr/bin/python import sys import re import config from config import log # $1 status lines like '2 servers, 0 dead, 8.5000 average load' # $2 table lines statusStr=sys.argv[1] if(statusStr.find('0 dead') != -1): log.info('Hbase status ok') else: log.error('Hbase status error') #if error found , no table checking will be done. sys.exit(1) tblString=sys.argv[2] tblList=re.split('\s+', tblString) errorCount=0 tblNotFound=[] for tbl in config.tables: if(tbl in tblList): continue else: errorCount +=1 tblNotFound.append(tbl) if(errorCount == 0): log.info('hbase tables ok')
print 'usage: host.py <src hosts file> <target hosts file>' sys.exit(1) srcFile=sys.argv[1] destFile=sys.argv[2] #print "comparing file " + srcFile + " with " + destFile hMap=fileUtil.readFileAsDict(destFile) cfgMap=fileUtil.readFileAsDict(srcFile) #print hMap errorCount=0 for k in cfgMap.keys(): if(k in hMap and cfgMap[k] == hMap[k]): continue else: logging.error('ip host map %s -> %s not found in %s', k, cfgMap[k], destFile) errorCount+=1 guessHost='localhost' if(destFile.endswith('.hosts')): guessHost=destFile[0:destFile.index('.hosts')] if(errorCount == 0): log.info('host file on %s : ok', guessHost) else: log.error('host file on %s : error', guessHost)
#!/usr/bin/python import sys import re import xml.etree.ElementTree as ET import config from config import log if len(sys.argv) < 2: print "usage: checkCoprocessor.py <src hosts file> <target hosts file>" sys.exit(1) srcFile = sys.argv[1] # check if coprocessor configed tree = ET.parse(srcFile) root = tree.getroot() classes = [] for prop in root: nameTxt = prop[0].text if nameTxt == "hbase.coprocessor.region.classes": classes = re.split("\s*,\s*", prop[1].text) break diffSet = set(config.coprocessors).difference(set(classes)) if len(diffSet) == 0: log.info("Coprocessor: ok") else: log.error("Coprocessor: not ok, different with config.ini: %s", diffSet)