def get_quotes(): """Gets quotes from brainyquote.com and stores them (in the presence of internet). Gracefully returns if offline. """ topics = [ 'war', 'learning', 'leadership', 'knowledge', 'technology', 'nature', 'great', 'inspirational' ] for topic in topics: try: page = requests.get('https://www.brainyquote.com/topics/{}'.format(topic)) tree = html.fromstring(page.content) q = tree.xpath('//a[@title="view quote"]/text()') for quote in q: try: insertion_res = persistence.insert_quote(quote) logging.info(insertion_res) except ValueError as e: logging.debug(quote) pass except Exception as e: rollbar.report_exc_info() logging.error(str(e)) return database_cleanse = StoppableThread(target=clean_db, args=(True,)) database_cleanse.daemon = True database_cleanse.start()
def update_stream(self, in_stream: Iterator[bytes]): """Update the stream when restarting a container.""" if self._buffer_task: self._buffer_task.stop(StopThreadException) self._in_stream = in_stream self._buffer_task = StoppableThread(target=self._buffer_input, daemon=True) self._buffer_task.start()
def fetch_and_display_images(subject, image_urls=[]): if image_urls == []: try: logging.info(f'Extracting image urls for: {subject}') res = wikipedia.page(subject) image_urls = res.images logging.info(f'Got image urls: {image_urls}') except Exception as e: logging.error(e) return render_images = StoppableThread(target=display_manager.fetch_images, args=(subject, image_urls,)) render_images.daemon = True render_images.start()
def run(url, cms_list, arg_dict, sums, return_dict=None): global THREAD, SILENCE, SUM, SUCCESS, SESSION, STOP, TEMP # -------------------------------------------- SUCCESS = {} SUM = sums timeout = arg_dict['timeout'] retry = arg_dict['retry'] THREAD = arg_dict['thread'] SILENCE = arg_dict['silence'] SESSION = requests.Session() if (retry > 0): SESSION.mount( 'http://', requests.adapters.HTTPAdapter(max_retries=retry)) SESSION.mount( 'https://', requests.adapters.HTTPAdapter(max_retries=retry)) new_cms_list = [list() for x in range(THREAD)] it = 0 while(len(cms_list) > 0): new_cms_list[it].append(cms_list.pop()) it += 1 if (it == THREAD): it = 0 try: thread_list = [] count = 0 for each_cms_list in new_cms_list: if (len(each_cms_list) > 0): t = StoppableThread(target=thread_run, args=( url, each_cms_list, timeout)) t.setDaemon(True) t.start() thread_list.append(t) while (SUM[0] < SUM[1] and len(enumerate()) > 1): sleep(0.2) count += 1 if (count >= 500): # 超时100秒 for each in thread_list: each.stop(TimeoutError) STOP = True SUCCESS['STOP'] = True break except KeyboardInterrupt: SUCCESS['STOP'] = True finally: if (STOP is True): return 1 # -------------------------------------------- SUM[0] += 1 if (return_dict is not None): return_dict.update(SUCCESS) return 0
def command_handler(user_input): kernel.setPredicate("name", os.environ['LUNA_USER']) command = user_input.lower() try: if command == 'lets be serious': wolfram() if command == 'help': utils.help_center() elif (command.startswith('what is the distance between ') or command.startswith('distance between')): end = command.find(' and ') start = command.find(' between ') x = command[start + 9:end] y = command[end + 5:] utils.groundDistance(x, y) return elif 'resource' in command: utils.find_external_resource() return elif command == 'newscatcher' or command == 'nc': nc_handler.main() return elif command == 'search quotes': # display_manager.quote_search() return elif command.lower().strip().startswith('search '): listing_handler.list_search_results(command[7:]) elif command.lower() == 'insert quote': result = data_manager.insert_quote_from_user() handle_persistence_response(result, True) return elif command == 'reading list': listing_handler.list_table('random') return elif command == 'latency': render_latency_chart = StoppableThread(target=utils.latency) render_latency_chart.daemon = True render_latency_chart.start() return elif command == 'clear': os.system('clear') return elif command.startswith('extract '): extraction_list = command[8:] targets = extraction_list.split(',') formatted_parameters = '' for target in targets: formatted_parameters += f"'{target.strip()}' " os.system( f'gnome-terminal -e "python functions/extractor/extractor.py {formatted_parameters}"' ) return # todo: run without tagging. Also find a way to remove entries from text file after extraction. # elif command.lower() == 'run passive extraction': # targets = utils.fetch_passive_extraction_list() # os.system(f'gnome-terminal -e "python functions/extractor/extractor.py {targets}"') # return elif command.startswith('play me some '): utils.media_player(command[13:]) return elif command == 'data stats': H() sprint('Listing database item counts\n') sprint( '| ' + f'{ Style.BRIGHT + Fore.YELLOW + "INTEL" + Fore.RESET + Style.RESET_ALL + " (Total: " + str(persistence.get_db_count("intelligence")) + ")" }' ) tags = persistence.fetch_distinct_tags() space = len(sorted(tags, key=len)[-1]) + 10 for tag in sorted(tags): count = persistence.tag_count(tag)[0] if count > 1 and tag != 'intelligence': print(' - %s%s: %s' % (tag.title().replace('_', ' '), (' ' * (space - len(tag))), count)) sprint( '| ' + f'{ Style.BRIGHT + Fore.YELLOW + "FILES" + Fore.RESET + Style.RESET_ALL + " (Total: " + str(persistence.get_db_count("texts")) + ")" }' ) sprint( '| ' + f'{ Style.BRIGHT + Fore.YELLOW + "ARCHIVES" + Fore.RESET + Style.RESET_ALL + " (Total: " + str(persistence.get_db_count("archive")) + ")" }' ) return elif command.startswith('how do i pronounce'): try: transformed = num_word_transform.number_to_words(command[19:]) H() sprint(transformed) return except Exception as e: H() sprint(str(e)) return elif command == 'banner': banners = ['db_banner2.py', 'db_banner3.py'] # for future: add other banners here os.system('python3 ./resources/banners/%s' % random.choice(banners)) return elif command.startswith('find the') and 'root' in command: utils.find_root(command) return elif command.lower().startswith('list '): table = command[5:].strip(' ') listing_handler.list_table(table) return # todo: complete and refine implementation elif command == 'merge all relations': H() sprint( 'Merging all unprotected intel relations. This may take a while.' ) result = persistence.merge_all_relations_by_tag() logging.debug(result) handle_persistence_response(result) return elif (command.lower().startswith('merge ') and command.lower() != 'merge all relations'): tag = command[6:].replace(' ', '_') result = persistence.merge_relation_by_tag(tag) handle_persistence_response(result) return elif command.lower().startswith('pin'): table = 'intelligence' tag = 'PIN_TO_START' document_to_be_tagged = command[4:].strip(' ') result = persistence.update_doc_flags(table, document_to_be_tagged, tag) handle_persistence_response(result) return elif command.lower() == 'clear pins': result = persistence.clear_all_pins() handle_persistence_response(result) return elif 'population density' in command: utils.population_density() return elif 'terminal' in command: H() sprint('Terminal open.') display_manager.terminal_session() return elif command == 'htop': os.system('htop') return elif command == 'clean swap': logging.info('attempting to transfer swap memory to RAM') print('') os.system('sudo swapoff -a && sudo swapon -a') logging.info('Swap cleansed.') H() sprint('Swap cleansed.') return elif command == 'clean db': onboarding.clean_db() return elif 'network diagnostic' in command or command == 'netdog': print('') os.system('sudo nmcli radio wifi off') logging.debug('Turning wifi off.') os.system('nmcli radio wifi on') logging.debug('Turning wifi on.') os.system('sudo service network-manager restart') logging.debug('Restarting network manager.') H() sprint('Diagnosis complete. Counter-measures deployed.') return elif command == 'whats my ip': H() os.system('dig +short myip.opendns.com @resolver1.opendns.com') return elif 'all systems shutdown' in command: if os.environ['LUNA_USER'] == 'FRTNX': try: os.system('sudo shutdown now') except KeyboardInterrupt: return except Exception as e: H() print(e) return else: H() sprint(random.choice(DoA)) return elif command.startswith('convert'): utils.converter(command) return elif 'reboot all systems' in command: # refine condition to only execute when called by primary user if os.environ['LUNA_USER'] == 'FRTNX': try: os.system('sudo reboot now') except KeyboardInterrupt: return except Exception as e: H() print(e) return else: H() sprint(random.choice(DoA)) return elif command.startswith('show me all '): utils.nearby(command[12:]) return elif command.startswith('dict'): utils.dictionary() return elif 'fibonacci' in command: utils.laFibonacci() return elif 'koan' in command: utils.zen() return elif command == 'history': print('') os.system('sudo python3 herodotus.py') return elif 'exit' in command: H() sprint(random.choice(farewell_responses)) logging.warn('shutting down...') exit() else: H() sprint(kernel.respond(command)) result = persistence.insert_session_data(command) logging.info(result) return except Exception as e: rollbar.report_exc_info() H() print(e) return
client = wolframalpha.Client(os.getenv('WOLFRAM')) num_word_transform = inflect.engine() kernel = aiml.Kernel() spine = './brain/' try: brn = os.listdir(spine) kernel.loadBrain(spine + brn[0]) except Exception as e: rollbar.report_exc_info() logging.error(e) logging.error("I'm brainless.") load_character = StoppableThread(target=utils.character_loader, args=(kernel, )) load_character.daemon = True load_character.start() coords = StoppableThread(target=utils.get_coords) coords.daemon = True coords.start() def resolveListOrDict(variable): if isinstance(variable, list): return variable[0]['plaintext'] else: return variable['plaintext']
class LogEventMonitor(PatternMatchingEventMonitor[LogEvent]): """Log buffer supporting logging to a file and waiting for a line pattern match. `log_config` parameter holds the configuration of the file logger. Consecutive values are interpreted as lines by splitting them on the new line character. Internally it uses a thread to read the stream and add lines to the buffer. """ _buffer_task: Optional[StoppableThread] _file_logger: logging.Logger _in_stream: Iterator[bytes] def __init__(self, name: str, log_config: Optional[LogConfig] = None): super().__init__(name) if log_config: self._file_logger = _create_file_logger(log_config) else: self._file_logger = logging.getLogger(name) self._buffer_task = None self._loop = asyncio.get_event_loop() def event_str(self, event: LogEvent) -> str: """Return the string associated with `event` on which to perform matching.""" return event.message @property def events(self) -> Sequence[LogEvent]: """Return the events that occurred so far.""" return self._events def start(self, in_stream: Iterator[bytes]): """Start reading the logs.""" super().start() self.update_stream(in_stream) logger.debug("Started LogEventMonitor. name=%s", self._file_logger.name) async def stop(self) -> None: """Stop the monitor.""" if self._buffer_task: self._buffer_task.stop(StopThreadException) await super().stop() def update_stream(self, in_stream: Iterator[bytes]): """Update the stream when restarting a container.""" if self._buffer_task: self._buffer_task.stop(StopThreadException) self._in_stream = in_stream self._buffer_task = StoppableThread(target=self._buffer_input, daemon=True) self._buffer_task.start() def _buffer_input(self): try: for chunk in self._in_stream: chunk = chunk.decode() for line in chunk.splitlines(): self._file_logger.info(line) event = LogEvent(line) self.add_event_sync(event) except StopThreadException: return async def wait_for_entry(self, pattern: str, timeout: Optional[float] = None) -> LogEvent: """Search log for a log entry with the message matching `pattern`. The first call to this method will examine all log entries gathered since this monitor was started and then, if needed, will wait for up to `timeout` seconds (or indefinitely, if `timeout` is `None`) for a matching entry. Subsequent calls will examine all log entries gathered since the previous call returned and then wait for up to `timeout` seconds. """ event = await self.wait_for_pattern(pattern, timeout) logger.debug( "Log assertion completed with a match. pattern=%s, match=%s", pattern, event.message, ) return event
def intent_and_entity_rerouter(text): logging.info('Intent classifier received: %s' % text) for ignorable in nlu_ignore: if (text.lower().startswith(ignorable)): command_handler(text) return True THRESHOLD = 0.75 try: nlu_response = func_timeout(1, nlu_parser, args=(text, )) except (FunctionTimedOut, Exception) as e: logging.error(f'Error getting NLU data: {e}') return False logging.debug('Intent classifier response: %s' % nlu_response) if nlu_response['intent']['confidence'] >= THRESHOLD: intent = nlu_response['intent']['name'] entities = nlu_response['entities'] has_entities = isinstance(entities, list) and len(entities) > 0 if intent == 'get_weather': logging.info( 'Weather request acknowledged. Sending through designated path.' ) if entities: weather.get_weather(False, False, *[entities[0]['value']]) else: weather.get_weather() return True elif intent == 'find_info' and has_entities: # TODO: consider how to make images and local lookup optional # possible intents: toggle_image_display (translated entities: on/off); toggle_air_gap (grants or removes Luna's access to the internet, # and, more importantly, the internets access to Luna) action = intel_handler.informant(entities[0]['value'].title(), True, 0, False) logging.info(f'Caller received action: {action}') if action: handle_user_input(action) return True elif intent == 'find_images': if utils.is_online(): entity = entities[0]['value'] try: image_urls = wikipedia.page(entity).images render_images = StoppableThread( target=display_manager.fetch_images, args=( entities[0]['value'], image_urls, )) render_images.daemon = True render_images.start() H() sprint(random.choice(pending_image_search_responses)) except Exception as e: logging.error(e) H() sprint('For some reason I could not comply.') else: H() sprint('I need an internet connection to comply.') return True elif intent == 'find_related_info' and has_entities: utils.find_related(entities[0]['value']) return True elif intent == 'directions' and has_entities: origin = None destination = None for entity in entities: if entity['entity'] == 'source': origin = entity['value'] elif entity['entity'] == 'destination': destination = entity['value'] logging.info( 'Parsing direction query with destination: %s and origin: %s' % (destination, origin)) if destination: utils.directions(destination, origin) else: H() sprint('No destination found.') return True elif intent == 'find_location' and has_entities: utils.find_location(entities[0]['value']) return True elif intent == 'find_more_info' and has_entities: action = intel_handler.informant(entities[0]['value'].title(), False, 0, True) logging.info(f'Caller received action: {action}') if action: handle_user_input(action) else: return True else: return False return False
def informant(mark, images=True, latency=0, flesh=False, first_call=True, search_online=False): """Looks for specified subject in local database and looks for it on wikipedia if not found. Also searches for images subject. params: mark (string) : the subject/object being sought (e.g., Physics, Barack Obama, Milk, Proximal Policy Optimisation, ...etc) images (boolean) : look for images on a parallel thread. set to False to disable. latency (int) : if no data is found locally on subject, the time spent running this process up to this point is inserted as the value here and this function is called again from within itself, this time with a flag indicating that the local database is to be ignored and an internet search to be conducted immediately if possible. flesh (boolean) : controls whether to display introductory article summary or article detail. Set to True when seeking article detail and False when only a summary is required. search_online : if True the local database is ignored and the request goes straight to wikipedia.org """ # logging.debug('informant recieved: %s' % mark) logging.info('Requested document "%s".' % mark) logging.info(f'Inherited latency: {latency} seconds') logging.info(f'First call: {first_call}') logging.info(f'Search online: {search_online}') global found if search_online: render_ellipses = StoppableThread(target=ellipses) render_ellipses.daemon = True render_ellipses.start() if first_call and images and not search_online: render_images = StoppableThread(target=fetch_and_display_images, args=(mark,)) render_images.daemon = True render_images.start() try: if search_online: try: logging.warn('Nature of request requires internet. Attempting to connect') start = time.time() res = func_timeout(8, wikipedia.page, args=(mark,)) if first_call and images: image_urls = res.images render_images = StoppableThread(target=fetch_and_display_images, args=(mark, image_urls,)) render_images.daemon = True render_images.start() content = res.content title = res.title slice_limit = content.find('\n\n\n') reset_prompt() display_manager.output_prompt(); display_manager.output_prompt(); print(gbullet + '\n') end = time.time() logging.info('"%s" found. latency is at %s seconds.' % (title, str(end - start + latency))) if flesh: logging.info('Fleshing out requested document.') action = directive(content, title, slice_limit + 3, *['flesh']) return action if 'displaystyle' not in content[:slice_limit] and 'textstyle' not in content[:slice_limit]: sprint(content[:slice_limit], 0.015) else: output_controller(content[:slice_limit]) action = directive(content, title, slice_limit, *['enable-saving']) logging.info(f'Returning {action} to caller for real this time') return action except KeyboardInterrupt as e: print('\n') found = True action = directive(content, title, slice_limit, *['enable-saving']) return action except FunctionTimedOut: end = time.time() reset_prompt() logging.error('Request for %s timed out. Request took %s seconds' % (mark, end - start + latency)) display_manager.output_prompt(); display_manager.output_prompt(); sprint('We seem to have a really bad internet connection. Try again when conditions improve.') return except wikipedia.PageError as e: logging.error(e) reset_prompt() display_manager.output_prompt(); display_manager.output_prompt(); sprint("I couldnt find anything on %s in my online repositories." % mark) display_manager.output_prompt(); sprint('Perhaps try a different alias.') return except wikipedia.DisambiguationError as e: logging.error(e) reset_prompt() display_manager.output_prompt(); display_manager.output_prompt(); sprint("'%s' refers to too many things. Try being more specific." % mark) return except Exception as e: logging.error(e) reset_prompt() # todo: resolve malfomrmed output here display_manager.output_prompt(); display_manager.output_prompt(); sprint(random.choice(bad_connection_responses)) rollbar.report_exc_info() utils.queue_for_extraction(mark) return else: try: start = time.time() content = func_timeout(2, persistence.get_document, args=('intelligence', mark,)) slice_limit = content.find('\n\n\n') end = time.time() logging.info('"%s" found. latency is at %s seconds.' % (mark, str(end-start+latency))) display_manager.output_prompt(); sprint(bullet + "\n") if flesh: logging.info('Fleshing out requested document.') action = directive(content, mark, slice_limit + 3, *['flesh']) return action if 'displaystyle' not in content[:slice_limit] and 'textstyle' not in content[:slice_limit]: sprint(content[:slice_limit], 0.015) else: output_controller(content[:slice_limit]) action = directive(content, mark, slice_limit, *['savenot']) return action except KeyboardInterrupt as e: print('\n') action = directive(content, mark, slice_limit, *['savenot']) return action except FunctionTimedOut: end = time.time() logging.error('Could not find requested document locally within acceptable time.') logging.warning('Attempting to find requested document online.') informant(mark, False, end-start, flesh, False, *['engagethehive']) return except TypeError: end = time.time() logging.error('Could not find requested document locally.') logging.warning('Attempting to find requested document online.') informant(mark, False, end-start, flesh, False, True) return except Exception as e: logging.error(e) rollbar.report_exc_info() return except KeyboardInterrupt as e: return except Exception as e: logging.debug(str(e)) return
def setup_thread(database): thread = StoppableThread(name='t1', target=t1, args=(database, )) thread.daemon = True return thread