def run(self): self.init_main_city_location() self.init_wipe_out_land_info() while True: # 数据重置 self.reset_data() # 征兵 self.conscription() log.info(self.army_troops_list) for index in range(0, config.army_count): self.hero_wipe_out_analysis(index, self.army_troops_list[index]) min_value = min(self.min_wait_duration_list) if min_value == 12 * 50 * 50: sleep_duration = 20 else: sleep_duration = max(min_value, 20) log.info("睡眠" + str(sleep_duration) + "秒") time.sleep(sleep_duration)
def send_email(email, message): log.info("Sending email to {}".format(email)) message['To'] = email with smtplib.SMTP_SSL(config.mail_smtp, config.ssl_port) as server: server.login(config.sender_mail_address, config.sender_mail_password) server.sendmail(config.sender_mail_address, email, message.as_string()) log.info("Sent")
def send_twitter_pending_notifications(alert): while True: time.sleep(alert.frequency) log.info( "{} - {} seconds passed. Checking pending notifications".format( alert.name, alert.frequency)) for email in pending_notifications: body = "" for notification in pending_notifications[email]: if alert.name == notification.alert.name: body = body + BASIC_BODY.format( user=notification.tweet.get_username(), alias=notification.tweet.get_alias(), date=notification.tweet.get_created_at(), keyword=notification.keyword, tweet=notification.tweet.get_text(), tweet_link="https://twitter.com/{}/status/{}".format( notification.tweet.get_username(), notification.tweet.get_id())) pending_notifications[email].remove(notification) if body: message = MIMEMultipart("alternative") message['Subject'] = BATCH_NOTIFICATION_TITLE.format( alert.name, alert.frequency / 60) message['From'] = config.sender_mail_address batch_body = BATCH_BODY.format(tweets=body) message.attach(MIMEText(batch_body, 'html', 'utf-8')) log.info("Sending pending notifications for {}".format(email)) send_email(email, message)
def create_pending_notification(notification): for email in notification.alert.mail_list: log.info("Creating pending notification for {}".format(email)) if email in pending_notifications: pending_notifications[email].append(notification) else: pending_notifications[email] = [notification]
def write(self): """ write database to yml file """ if self.modified: with open(self.dbFile, 'w') as db: log.info("Saving database '%s' ..." % self.name) yaml.dump(dict(self), db)
def fit_conscription(self, physical): # 最大时长 max_duration = 0 max_index = -1 for index in range(0, 3): max_index += 1 duration = assistant.get_hero_conscription_duration( self.hwnd, index) log.info("武将 " + str(max_index + 1) + "征兵时长:" + str(duration)) if duration > max_duration: max_duration = duration log.info("最大征兵时长:%d" % max_duration) # 可用时长,即体力满之前的时长 enable_duration = (130 - physical) * 50 * 50 / 20 log.info("可用征兵时长:%d" % enable_duration) # 总时长 if max_duration > enable_duration and max_duration != 0: percent = enable_duration / max_duration log.info("征兵时间大于剩余体力恢复:占比:%s" % str(percent)) event.click_hero_conscription_percent(self.hwnd, max_index, percent) else: log.info("征兵时间小于剩余体力恢复")
def new_posts(feed): if feed.recent_media_ids is None or __users_feeds[feed.userid].recent_media_ids is None or \ feed.recent_media_ids == __users_feeds[feed.userid].recent_media_ids: log.info("No changes detected for user {}".format(feed.username)) else: log.info("Feed changes detected for user {}".format(feed.username)) __users_feeds[feed.userid] = feed return True
def update_datas(self, new_datas: List[dict], collection: Collection) -> None: """ Atualização de dados de uma determinada collection.\n :param collection: collection onde os dados serão atualizados. :param new_datas: dados a serem inseridos. """ collection_datas: List[dict] = remove_element( self.__get_documents(collection, field='delegacia'), '_id') to_update: List[dict] = [ new_datas[value] for value in range(len(collection_datas)) if new_datas[value] != collection_datas[value] ] old_datas: List[dict] = [ collection_datas[value] for value in range(len(collection_datas)) if collection_datas[value] != new_datas[value] ] if len(to_update) != 0: info( f'Iniciando atualização de {len(to_update)} dado(s) na collection {collection.name}.' ) for value in range(len(to_update)): collection.update_one(old_datas[value], {'$set': to_update[value]}) write.write_json( collection.name, remove_element(self.__get_documents(collection), '_id')) info("Dados atualizados com sucesso.") else: info(f'Não há dados a serem atualizados.') if len(new_datas) != len(collection_datas): to_insert: List[dict] = [] info( f'Encontrado {len(new_datas) - len(collection_datas)} novo(s) dado(s) obtido durante o scraping.' ) for value in range(len(new_datas)): datas: dict = self.__get_documents( collection, {'delegacia': new_datas[value]['delegacia']}) if datas is None: to_insert.append(new_datas[value]) self.insert(to_insert, collection) info( f'{len(new_datas) - len(collection_datas)} novo(s) dado(s) inserido na collection {collection.name}.' )
def __init__( self, name='DB', dbFile=None, outDir="", version="", verbose=False, stream=sys.stdout, outExt=".h5", inputPath=None, localityFeature="", numSlices=1, localityFeatureRange=(), sidebandMargin=0.25, logLevel="INFO", ): """ """ super(Database, self).__init__() self.name = name self.verbose = verbose self.stream = stream self.version = version self.outExt = outExt self.dbFile = dbFile self.outDir = outDir self.localityFeature = localityFeature self.inputPath = inputPath self.numSlices = numSlices self.localityFeatureRange = localityFeatureRange self.sidebandMargin = sidebandMargin ## determine file format self.pathExt = None regX = re.compile("^.*\.(?P<ext>\w+)$") match = re.match(regX, self.inputPath) if match: self.pathExt = match.group("ext") ## setup logging log.setLevel(logLevel) log.debug("Initialzing Database ...") ## where to put the database yml file # if dbFile is None: # dbFile = __HERE # self.dbFile = os.path.join(dbFile, '%s%s.yml' % (self.name, self.version)) if os.path.isfile(self.dbFile): with open(self.dbFile) as db: log.info("Loading database '%s' ..." % self.dbFile) d = yaml.load(db) if d: self.update(d) self.modified = False
def army_expedition(self, point): log.info("定位到指定位置") self.location_jump(point) log.info("地图放大") event.map_enlarge(self.hwnd) log.info("点击土地") event.click_center(self.hwnd) log.info("地图还原") event.map_reduction(self.hwnd) log.info("点击出征菜单按钮") event.click_army_expedition_menu(self.hwnd)
def wipe_out(self, point): log.info("定位到指定位置") self.location_jump(point) log.info("地图放大") event.map_enlarge(self.hwnd) log.info("点击土地") event.click_center(self.hwnd) log.info("地图还原") event.map_reduction(self.hwnd) log.info("点击扫荡菜单按钮") event.click_wipe_out_menu(self.hwnd)
def send_instagram_notification(feed, alert): log.info("Sending instagram notification") message = MIMEMultipart("alternative") message['Subject'] = INSTA_EMAIL_SUBJECT.format(alert.name, feed.username) message['From'] = config.sender_mail_address body = INSTA_EMAIL_BODY.format(user=feed.username, image_url=feed.get_image_url(), caption=feed.get_caption(), date=time.asctime()) message.attach(MIMEText(body, 'html', 'utf-8')) for email in alert.mail_list: send_email(email, message)
def main(): if get_alerts_by_type(TYPE_INSTA): log.info("*** STARTING INSTAGRAM MONITORING THREAD***") t = Thread(target=instagram_thread, daemon=True) t.start() if get_alerts_by_type(TYPE_TWITTER): log.info("*** STARTING TWITTER MONITORING ***") api = auth() stream_listener = MyStreamListener() stream = tweepy.Stream(auth=api.auth, listener=stream_listener) stream.filter(follow=get_user_ids(api), is_async=True, stall_warnings=True) setup_periodic_notifications()
def get_user_media(username): result = {} r = requests.get('https://www.instagram.com/' + username) data_search = re.search( '<script type="text/javascript">window._sharedData = (.*);</script>', r.text, re.IGNORECASE) if data_search: tmp = data_search.group(1) data = json.loads(tmp) try: user = data['entry_data']['ProfilePage'][0]['graphql']['user'] result['user_id'] = user['id'] result['user_username'] = user['username'] result['follower'] = user['edge_followed_by']['count'] result['follows'] = user['edge_follow']['count'] result['media_count'] = user['edge_owner_to_timeline_media'][ 'count'] result['media'] = [] result['media_ids'] = set() for post in user['edge_owner_to_timeline_media']['edges']: post = { 'id': post['node']['id'], 'timestamp': post['node']['taken_at_timestamp'], 'is_video': post['node']['is_video'], 'caption': post['node']['edge_media_to_caption']['edges'][0]['node'] ['text'] if post['node']['edge_media_to_caption']['edges'] else "Could not find caption", 'thumbnail': post['node']['thumbnail_src'], 'image': post['node']['display_url'] } result['media'].append(post) result['media_ids'].add(post['id']) except KeyError as exception: log.error( 'Unexpected response retrieving {} info: {!r}\n\nData: {}'. format(username, exception, data)) return InstagramUserFeed(result) log.info('Scraped ' + result['user_username'] + ' and ' + str(len(result['media'])) + ' posts') else: log.error('Failed to extract meta-information from HTML page') return InstagramUserFeed(result)
def init_wipe_out_land_info(self): log.info("打开内政页面") event.click_interior_menu(self.hwnd) log.info("打开内政详情页面") event.click_interior_detail_menu(self.hwnd) log.info("重置土地统计选项") event.reset_land_option(self.hwnd) for index in range(1, 10): self.init_wipe_out_land_by_level(index) log.info("返回上一页") event.click_page_close(self.hwnd) log.info("返回上一页") event.click_page_return(self.hwnd)
def __database(cls, db_name: Optional[str] = 'scraping') -> Database: """ Retorna o banco de dados padrão, caso o nenhum valor seja passado em 'db_name'.\n Senão, um novo banco de dados será criado e retornado.\n :param db_name: nome do banco de dados a ser criado, ou retornado. :raise OperationFailure: falha ao criar o banco de dados. :return: banco de dados criado. """ if db_name not in cls.__CONN.list_database_names(): try: db: Database = cls.__CONN[db_name] info(f'Database {db_name} criado.') return db except OperationFailure as e: error(f'Erro ao criar o database: {e.__str__()}') return cls.__CONN.get_database(db_name)
def send_twitter_notification(notification): log.info("Sending twitter notification") message = MIMEMultipart("alternative") message['Subject'] = BASIC_NOTIFICATION_TITLE.format( notification.alert.name, notification.tweet.get_alias()) message['From'] = config.sender_mail_address body = BASIC_BODY.format( user=notification.tweet.get_username(), alias=notification.tweet.get_alias(), date=notification.tweet.get_created_at(), tweet=notification.tweet.get_text(), keyword=notification.tweet.contains_word( notification.alert.whitelist, notification.alert.whitelist_regex), tweet_link="https://twitter.com/{}/status/{}".format( notification.tweet.get_username(), notification.tweet.get_id())) message.attach(MIMEText(body, 'html', 'utf-8')) for email in notification.alert.mail_list: send_email(email, message)
def tired_hero_replace(self): log.info("循环判断和替换体力不足的武将") for index in range(3, 4): log.info("判断是否能配置武将状态:第 %d 个部队" % index) if assistant.is_city_army_enable_setting(self.hwnd, index): self.tired_hero_replace_single(index, index == 3) else: log.info("武将队伍不能配置状态:1->征兵;2->行军;3->返回;4->练兵;") log.info("返回上一页:主页") event.click_page_return(self.hwnd)
def generate_report(domaine: str): template_name = "template_{}.html".format(domaine) template_path = os.path.join(os.getcwd(), TEMPLATES_FOLDER, template_name) report_name = "rapport_{}.html".format(domaine) report_path = os.path.join(os.getcwd(), OUTPUT_FOLDER, report_name) if domaine not in DOMAINES: log.error("Domaine {} inconnu".format(domaine)) else: log.info("Génération du rapport {}...".format(report_name)) if domaine == "client": datas = DonneesClient().tags elif domaine == "paiement": datas = DonneesPaiement().tags # Jinja2 DefaultTemplater(template_path, report_path).render(datas) log.info("Rapport {} généré !".format(report_name))
def main() -> None: response = redis_app.xread(streams={'todo': 0}, count=1) log.info('Response', response=response) if not response: return devices = get_devices() if not devices: return id, action = parse(response) try: make(devices, action, id.decode()) except Exception as exception: log.warn('Exception triggered', exception=exception) return disconnect_devices(devices) redis_app.xdel('todo', id)
def enter_city_page(self): log.info("点击标记定位菜单") event.click_mark_location_menu(self.hwnd) log.info("点击主城项") event.click_mark_location_main_city(self.hwnd) log.info("点击城池菜单") event.click_city_menu(self.hwnd)
def run(self): while True: # 数据重置 self.reset_data() log.info("进入城池页面") self.enter_city_page() log.info("疲倦武将替换") self.tired_hero_replace() log.info("武将循环出征") self.hit_the_ground() log.info("睡眠" + str(max(min(self.min_wait_duration_list), 20)) + "秒") time.sleep(3 * 60)
def location_jump(self, point, duration=2): log.info("点击地图菜单") event.click_map_menu(self.hwnd) log.info("输入坐标") event.location_input(self.hwnd, point) log.info("点击坐标跳转按钮") event.click_location_jump_button(self.hwnd, duration=duration)
def __get_records(self) -> dict: """ Obtém os registros de determinado crime que esteja presente na tabela de ocorrências, trantando os dados \n e montando um dict com os valores obtidos. \n :raise ValueError: caso o crime passado não seja encontrado. :return: dict contendo os registros de cada mês e o total. """ table_header: list = extract_table_value(self.__id_table, 'th') table_datas: list = extract_table_value(self.__id_table, 'td') try: key_word: int = table_datas.index(self.__crime) except ValueError as e: error( f'Erro ao obter os dados da região {self.__region}.\n Detalhes: {e.__str__()}' ) raise ValueError( f'O crime {self.__crime} não está presente na tabela.') else: records: list = table_datas[key_word:key_word + len(table_header)] keys: list = list( map(lambda to_lower: to_lower.lower(), filter(lambda value: value != 'Natureza', table_header))) records.pop(0) values: list = list( map(lambda value: float(value.replace('...', '0')), records)) info(f'Registros da região {self.__region} obtidos.') return dict(zip(keys, values))
def conscription(self): log.info("点击标记定位菜单") event.click_mark_location_menu(self.hwnd) log.info("点击主城项") event.click_mark_location_main_city(self.hwnd) log.info("点击城池菜单") event.click_city_menu(self.hwnd) # 遍历武将征兵 for index in range(0, config.army_count): self.hero_conscription(index) log.info("返回上一页") event.click_page_return(self.hwnd)
def init_main_city_location(self): log.info("点击标记定位菜单") event.click_mark_location_menu(self.hwnd) log.info("点击主城项") event.click_mark_location_main_city(self.hwnd) location = assistant.get_main_city_location(self.hwnd) log.info("获取主城坐标:" + str(location)) config.main_city_location = location
def collection( self, is_current_occurrences: Optional[bool] = False) -> Collection: """ Retorna a collection especificada, caso exista.\n Senão, uma nova será criada e retornada.\n >>> 'current_occurrences' if is_current_occurrences else 'last_occurrences' :param is_current_occurrences: boolean para identificar qual collection será criada, ou retornada. :return: collection criada ou selecionada. """ db: Database = self.__database() coll_name: str = 'current_occurrences' if is_current_occurrences else 'last_occurrences' if coll_name not in db.list_collection_names(): try: db.create_collection(coll_name) info(f'Collection {coll_name} criada.') except OperationFailure as e: error(f'Erro ao criar a collection: {e.__str__()}') return db.get_collection(coll_name)
def select_datas(self): """ Método responsável por interagir com todos os filtros e elementos do site, fazendo com que os dados a serem extraídos fiquem acessíveis. """ self.__selector.open_browser() self.__selector.click_button() for year in self.__years: info(f'Selecionando o ano {year}.') self.__selector.select_year(value=str(year)) info('Selecionando a região.') self.__selector.select_region(value='Capital') info('Selecionando o município.') self.__selector.select_city(value='São Paulo') info('Selecionando as delegacias.') self.__selector.select_police_stations(year=year)
def make(devices: List[ConnectedDevice], action: Dict[str, str], id: str) -> None: now = datetime.now().strftime("%m/%d/%Y %H:%M:%S") if action['type'] == 'add': tp_function = partial(add_vlan, vlan_number=action['vlan_num'], subnet=action['subnet']) with ThreadPool(len(devices)) as tp: tp.map(tp_function, devices) redis_app.xadd('update', { 'id': id, 'message': f'VLAN {action["vlan_num"]} ({action["subnet"]}) had been successfully added.' }) log.info(f'VLAN {action["vlan_num"]} ({action["subnet"]}) had been successfully added.') make(devices, {'type': 'update'}, '999') elif action['type'] == 'remove': tp_function = partial(remove_vlan, vlan_number=action['vlan_num']) with ThreadPool(len(devices)) as tp: tp.map(tp_function, devices) redis_app.xadd('update', {'id': id, 'message': f'VLAN {action["vlan_num"]} had been successfully removed.'}) log.info(f'VLAN {action["vlan_num"]} had been successfully removed.') make(devices, {'type': 'update'}, '999') elif action['type'] == 'update': with ThreadPool(len(devices)) as tp: result = tp.map(update_config, devices) data = [dict(name=device.name, config=result[key]) for key, device in enumerate(devices)] redis_app.set('config_cisco', json.dumps({'json': data, 'time': now})) redis_app.xadd('update', {'id': id, 'message': 'Running configurations had been updated successfully.'}) log.info('Running configurations had been updated.successfully.') elif action['type'] == 'save': with ThreadPool(len(devices)) as tp: tp.map(save_config, devices) redis_app.xadd('save', {'time': now}) redis_app.xadd('update', {'id': id, 'message': 'Running configurations had been saved successfully.'}) log.info('Running configurations had been saved successfully.') else: log.warn('Unknown action type')
def clear(self): """erase all datasets in database """ log.info("Resetting database '%s' ..." % self.name) super(Database, self).clear() self.modified = True