def compile_resume(output_dir, has_pubs): """ Compile resume files Args: output_dir: The resume output directory has_pubs: The boolean whether there is a publication section Returns: None """ log = Logger() log.notice('Compiling resume files') curr_dir = os.getcwd() os.chdir(output_dir) if run_cmd('xelatex resume.tex'): if has_pubs and (not run_cmd('biber resume') or not run_cmd('xelatex resume.tex')): log.warn( 'Failed to compile resume files, please compile them manually') else: log.warn( 'Failed to compile resume files, please compile them manually') os.chdir(curr_dir)
def compile_resume(output_dir, has_pubs, timeout): """ Compile resume files Args: output_dir: the resume output directory has_pubs: the boolean whether there is a publication section timeout: the timeout value Returns: None """ log = Logger() log.notice("Compiling resume files") curr_dir = os.getcwd() os.chdir(output_dir) if run_cmd("xelatex resume.tex", timeout): if has_pubs and (not run_cmd("biber resume", timeout) or not run_cmd("xelatex resume.tex", timeout)): log.warn( "Failed to compile resume files, please compile them manually") else: log.warn( "Failed to compile resume files, please compile them manually") os.chdir(curr_dir)
class LoggableError(Exception): """ Parent class for customized exception classes which provides interface to the logging functions Attributes: log_file: The file in which to keep logs logger: Instance of logbook.Handler which handles logging """ def __init__(self): self.orig_message = self.message self.UPATH = os.getenv("HOME") self.log_file = '%s/Desktop/tweetlog.rtf' % self.UPATH self.initialize_logger() def initialize_logger(self): try: if self.logger is not None: pass except: self.logger = Logger() # self.logger = FileHandler(self.log_file) # self.logger.push_application() #Pushes handler onto stack of log handlers def log_error(self, error_message): self.logger.error(error_message) def log_warning(self, warning_message): self.logger.warn(warning_message)
def scan_photo(file_name=None, file_url=None): curr_datetime = date.today() curr_year = curr_datetime.year curr_month = curr_datetime.month query = datastore_client.query(kind=API_COUNT) query.add_filter(YEAR, "=", curr_year) query.add_filter(MONTH, "=", curr_month) entities = {} for entity in query.fetch(): entities[entity[NAME]] = entity[COUNT] is_safe = likelihood = None if GCP not in entities or entities[GCP] <= GCP_LIMIT: is_safe, likelihood = gcp_scan(file_name, file_url) update_api_count(datastore_client, GCP, curr_year, curr_month) elif AZURE not in entities or entities[AZURE] <= AZURE_LIMIT: is_safe, likelihood = azure_scan(file_name, file_url) update_api_count(datastore_client, AZURE, curr_year, curr_month) else: log = Logger() log.warn("Vision scan tokens exhausted") return is_safe, likelihood
def test2(): log = Logger('Logbook-test-2') log.critical("critical") log.error("error") log.warn("warn") log.notice("notice") log.info("test") log.debug("debug")
def make_references(publications, output_dir): """ Create reference bib file Args: publications: the list of publications output_dir: the output directory Returns: A list of reference identifiers """ log = Logger() cr = Crossref() lines = [] references = [] for i, publication in enumerate(publications): log.notice( f"Querying and formatting {i + 1} out of {len(publications)} publications" ) link = publication[LINK] title = publication[TITLE] # Check if it is a DOI url if link and "doi.org" in link: doi = urlparse(link).path.strip("/") # Extract the DOI using the title else: results = cr.works(query_bibliographic=title, limit=1) if (results["message"]["total-results"] == 0 or results["message"]["items"][0]["title"][0].lower() != title.lower()): log.warn(f'Could not find the doi for "{title}"') continue doi = results["message"]["items"][0]["DOI"] try: reference = cn.content_negotiation(doi) lines.append(reference) references.append( re.sub("^@.*{", "", reference.split("\n")[0]).strip(",")) except HTTPError: log.warn(f'Could not Create reference for "{title}"') with open(os.path.join(output_dir, "references.bib"), "w") as f: f.write("\n\n".join(lines)) return references
class PyPLogger(object): def __init__(self, clazz): logbook.set_datetime_format("local") self.serverName = clazz.__name__[clazz.__name__.rfind('.') + 1:] if not os.path.exists(log_dir): os.makedirs(log_dir) self.log_file = TimedRotatingFileHandler(os.path.join( log_dir, '%s.log' % self.serverName), date_format='%Y-%m-%d', bubble=True, encoding='utf-8') self.log_std = ColorizedStderrHandler(bubble=True) # self.log_std = StderrHandler(bubble=True) self.log = Logger(self.serverName) self.__init_logger() self.__setting() def log_type(self, record, handler): # log = "[{date}]-[{level}]-[" + self.serverName + "] - {msg}".format( log = "[" + self.serverName + "]" + "-[{date}]-[{level}] - {msg}".format( date=record.time, level=record.level_name, # filename = os.path.split(record.filename)[-1], # func_name = record.func_name, # lineno = record.lineno, msg=record.message) return log def __init_logger(self): logbook.set_datetime_format("local") self.log.handlers = [] self.log.handlers.append(self.log_file) self.log.handlers.append(self.log_std) def __setting(self): self.log_std.formatter = self.log_type self.log_file.formatter = self.log_type def info(self, *args, **kwargs): self.log.info(*args, **kwargs) def warn(self, *args, **kwargs): self.log.warn(*args, **kwargs) def error(self, *args, **kwargs): self.log.error(*args, **kwargs)
def make_references(publications, output_dir): """ Create reference bib file Args: publications: the list of publications output_dir: the output directory Returns: A list of reference identifiers """ log = Logger() cr = Crossref() lines = [] references = [] for i, publication in enumerate(publications): log.notice(f'Querying and formatting {i + 1} out of {len(publications)} publications') link = publication[LINK] title = publication[TITLE] # Check if it is a DOI url if link and 'doi.org' in link: doi = urlparse(link).path.strip('/') # Extract the DOI using the title else: results = cr.works(query_title=title, limit=1) if results['message']['total-results'] == 0 or \ results['message']['items'][0]['title'][0].lower() != title.lower(): log.warn(f'Could not find the doi for "{title}"') continue doi = results['message']['items'][0]['DOI'] try: reference = cn.content_negotiation(doi) lines.append(reference) references.append(re.sub('^@.*{', '', reference.split('\n')[0]).strip(',')) except HTTPError: log.warn(f'Could not Create reference for "{title}"') with open(os.path.join(output_dir, 'references.bib'), 'w') as f: f.write('\n\n'.join(lines)) return references
def test_simple(self): logger = Logger('sentry.tests.test_contrib.test_logbook') handler = SentryHandler('INFO') with handler.applicationbound(): logger.warn('foo') event = Event.objects.all()[0] self.assertEquals(event.type, 'sentry.events.Message') self.assertEquals(event.time_spent, 0) self.assertTrue('sentry.interfaces.Message' in event.data) event_data = event.data['sentry.interfaces.Message'] self.assertTrue('message' in event_data) self.assertEquals(event_data['message'], 'foo') self.assertTrue('params' in event_data) self.assertEquals(event_data['params'], []) tags = dict(event.tags) self.assertTrue('level' in tags) self.assertEquals(tags['level'], 'warning')
def store_creds(email, password, creds_file): """ Store login credentials Args: email: the LinkedIn login email password: the LinkedIn login password creds_file: the credentials file to store the login credentials Returns: None """ log = Logger() log.warn(f'It is highly NOT recommended to keep your login credentials, ' f'you can always remove the file {CREDENTIALS_FILE} to remove them') make_dir(os.path.expanduser(f'~/.{PACKAGE_NAME}')) credentials = {'email': email, 'password': password} with open(creds_file, 'w') as f: json.dump(credentials, f)
class Kernel(object): def __init__(self): # Basic setup self.__basepath = os.path.dirname( os.path.realpath(os.path.realpath( __file__ ) + '/../../' ) ) self.__router = Router() # Load container self.__container = self._create_container() # Setup logging if self.__container.has_service('log.handler'): self.__log_handler = self.__container.get_service('log.handler') else: self.__log_handler = NullHandler() self.__log_handler.push_thread() self.__logger = Logger('MiniMVC') self.__container.set_param('sys.log', self.__logger) # Import application sys.path.append(self.__basepath) import app self.__logger.info('Kernel started') def run(self, request): query_string = request.unparsed_uri self.__logger.info('Request: ' + query_string) route = self.__router.route(query_string) if route: self.__logger.info('Route matched: %s.%s(%s)' % (route['controller'], route['action'], route['params'])) self.__container.set_param('sys.matched_route', route) request.parameters = route['params'] res = ObjectFactory.instantiate_and_call(route['controller'], [self.__container], route['action'], request) else: self.__logger.warn('No matching route found for: ' + query_string) res = False # Shutdown logger and return self.__log_handler.pop_thread() # Return the local request log self.request_log = self.__container.get_service('log.test_handler').records return res def _create_container(self): container = ServiceContainer() container.set_param('sys.kernel', self) container.set_param('sys.container', container) container.set_param('sys.basepath', self.__basepath) container.set_param('sys.router', self.__router) #container.set_param('sys.log', self.__logger) loader = ServiceContainerLoader() loader.register_section_loader(DatabaseSectionLoader()) loader.register_section_loader(ServicesSectionLoader()) loader.register_section_loader(RoutesSectionLoader()) loader.load(container, self.__basepath + '/app/config/config.yml') return container def __str__(self): return "MiniMVC.Kernel" @property def container(self): return self.__container
each_size = max_size / (backup_count + 1) filename = path_join(file_path, 'fbcli-rotate.log') rotating_file_handler = RotatingFileHandler(filename=filename, level=file_level, bubble=True, max_size=each_size, backup_count=backup_count) rotating_file_handler.format_string = formatter['file'] rotating_file_handler.push_application() logger.debug('start logging on file: {}'.format(filename)) else: try: mkdir(file_path) except Exception: logger.error("CreateDirError: {}".format(file_path)) logger.warn('Could not logging in file. Confirm and restart.') def set_level(level): level_list = ['debug', 'info', 'warning', 'error', 'warn'] if level not in level_list: logger.error("Unavailable log level '{}'. Select in {}".format( level, level_list)) return code = get_log_code(level) stream_handler.level = code logger.info('Changed log level to {}'.format(level)) def set_mode(mode): mode_list = ['debug', 'normal']
from logbook import Logger, StreamHandler import sys StreamHandler(sys.stdout).push_application() log = Logger('linda的日志') log.warn('这是一个非常酷的输出')
Depending on your website elements or whichever criteria you opt to use ''' while(counter < 80): #Ensure that all pop-ups are suppressed try: alert = driver.switch_to_alert() alert.dismiss() logger.notice('Alert dismissed...') except NoAlertPresentException: #if no pop-up present carry on pass sleep(30) counter += 1 #sleep for 30secs count 1 logger.warn('Logging out '+user) except Exception as exception: logger.critical(type(exception).__name__+' Exception occured when loading page') driver.quit() exit(0) logger.info('Choked on all users successfully') logger.notice('Now dying...') logger.notice('application terminated at '+str(dt.now())) driver.quit() exit(0) except KeyboardInterrupt: logger.error('KeyboardInterrupt...Dying') try: driver.quit()
class PmLogHandler(log.CementLogHandler): """ PmLogHandler - override CementLogHandler to use logbook. This class uses the same configuration options as :ref:`LoggingLogHandler <cement.ext.ext_logging>` """ class Meta: interface = log.ILog """The interface that this class implements.""" label = 'pmlog' """The string identifier of this handler.""" namespace = "pm" """ The logging namespace. Note: Although Meta.namespace defaults to None, Cement will set this to the application label (CementApp.Meta.label) if not set during setup. """ file_format = "{record.time} ({record.level_name}) {record.channel} : {record.message}" """The logging format for the file logger.""" console_format = "{record.time:%Y-%m-%d %H:%M} ({record.level_name}): {record.message}" """The logging format for the consoler logger.""" debug_format = "{record.time} ({record.level_name}) {record.channel} : {record.message}" """The logging format for both file and console if ``debug==True``.""" log_setup = None """Nested log setup placeholder""" level = 0 """Global level for handlers""" clear_loggers = True """Whether of not to clear previous loggers first.""" # These are the default config values, overridden by any '[log]' # section in parsed config files. config_section = 'log' """ The section of the application configuration that holds this handlers configuration. """ config_defaults = dict( file=None, level='INFO', to_console=True, rotate=False, max_bytes=512000, max_files=4, ) """ The default configuration dictionary to populate the ``log`` section. """ levels = ['INFO', 'WARN', 'ERROR', 'DEBUG', 'FATAL'] def __init__(self, *args, **kw): super(PmLogHandler, self).__init__(*args, **kw) self.app = None def _setup(self, app_obj): super(PmLogHandler, self)._setup(app_obj) if self._meta.namespace is None: self._meta.namespace = self.app._meta.label self.backend = Logger(self._meta.namespace) # hack for application debugging if is_true(self.app._meta.debug): self.app.config.set('log', 'level', 'DEBUG') # Mainly for backwards compatibility since Logger level should # be NOTSET (level 0). Output level is controlled by handlers self.set_level(self.app.config.get('log', 'level')) # clear loggers? if is_true(self._meta.clear_loggers): self.clear_loggers() # console if is_true(self.app.config.get('log', 'to_console')): self._setup_console_log() # file if self.app.config.get('log', 'file'): self._setup_file_log() # nested setup self.backend.handlers.append(logbook.NullHandler(bubble=False)) self.log_setup = logbook.NestedSetup(self.backend.handlers) with self._console_handler.applicationbound(): self.debug("logging initialized for '%s' using PmLogHandler" % \ self._meta.namespace) def set_level(self, level): """ Set the log level. Must be one of the log levels configured in self.levels which are ``['INFO', 'WARN', 'ERROR', 'DEBUG', 'FATAL']``. :param level: The log level to set. """ level = level.upper() if level not in self.levels: level = 'INFO' level = logbook.lookup_level(level.upper()) self.level = level def get_level(self): """Returns a string representation of the current log level.""" return logbook.get_level_name(self.level) def _setup_console_log(self): """Add a console log handler.""" if logbook.lookup_level(self.get_level()) == logbook.DEBUG: fmt_string = self._meta.debug_format else: fmt_string = self._meta.console_format console_handler = logbook.StderrHandler( format_string=fmt_string, level = logbook.lookup_level(self.get_level()), bubble = True) self._console_handler = console_handler self.backend.handlers.append(console_handler) def _setup_file_log(self): """Add a file log handler.""" file_path = os.path.expandvars(fs.abspath(self.app.config.get('log', 'file'))) log_dir = os.path.dirname(file_path) if not os.path.exists(log_dir): os.makedirs(log_dir) if logbook.lookup_level(self.get_level()) == logbook.DEBUG: fmt_string = self._meta.debug_format else: fmt_string = self._meta.file_format if self.app.config.get('log', 'rotate'): from logbook import RotatingFileHandler file_handler = RotatingFileHandler( file_path, max_size=int(self.app.config.get('log', 'max_bytes')), backup_count=int(self.app.config.get('log', 'max_files')), format_string=fmt_string, level = logbook.lookup_level(self.get_level()), bubble = True, ) else: from logbook import FileHandler file_handler = FileHandler(file_path, format_string=fmt_string, level = logbook.lookup_level(self.get_level()), bubble = True, ) self._file_handler = file_handler self.backend.handlers.append(file_handler) def _get_logging_kwargs(self, namespace, **kw): if namespace is None: namespace = self._meta.namespace if 'extra' in kw.keys() and 'namespace' in kw['extra'].keys(): pass elif 'extra' in kw.keys() and 'namespace' not in kw['extra'].keys(): kw['extra']['namespace'] = namespace else: kw['extra'] = dict(namespace=namespace) return kw def info(self, msg, namespace=None, **kw): """ Log to the INFO facility. :param msg: The message the log. :param namespace: A log prefix, generally the module ``__name__`` that the log is coming from. Will default to self._meta.namespace if None is passed. :keyword kw: Keyword arguments are passed on to the backend logging system. """ kwargs = self._get_logging_kwargs(namespace, **kw) self.backend.info(msg, **kwargs) def debug(self, msg, namespace=None, **kw): """ Log to the DEBUG facility. :param msg: The message the log. :param namespace: A log prefix, generally the module ``__name__`` that the log is coming from. Will default to self._meta.namespace if None is passed. For debugging, it can be useful to set this to ``__file__``, though ``__name__`` is much less verbose. :keyword kw: Keyword arguments are passed on to the backend logging system. """ kwargs = self._get_logging_kwargs(namespace, **kw) self.backend.debug(msg, **kwargs) def warn(self, msg, namespace=None, **kw): """ Log to the WARN facility. :param msg: The message the log. :param namespace: A log prefix, generally the module ``__name__`` that the log is coming from. Will default to self._meta.namespace if None is passed. For debugging, it can be useful to set this to ``__file__``, though ``__name__`` is much less verbose. :keyword kw: Keyword arguments are passed on to the backend logging system. """ kwargs = self._get_logging_kwargs(namespace, **kw) self.backend.warn(msg, **kwargs) def critical(self, msg, namespace=None, **kw): """ Log to the CRITICAL facility. :param msg: The message the log. :param namespace: A log prefix, generally the module ``__name__`` that the log is coming from. Will default to self._meta.namespace if None is passed. For debugging, it can be useful to set this to ``__file__``, though ``__name__`` is much less verbose. :keyword kw: Keyword arguments are passed on to the backend logging system. """ kwargs = self._get_logging_kwargs(namespace, **kw) self.backend.critical(msg, **kwargs) def fatal(self, msg, namespace=None, **kw): """ Log to the FATAL facility. :param msg: The message the log. :param namespace: A log prefix, generally the module ``__name__`` that the log is coming from. Will default to self._meta.namespace if None is passed. For debugging, it can be useful to set this to ``__file__``, though ``__name__`` is much less verbose. :keyword kw: Keyword arguments are passed on to the backend logging system. """ kwargs = self._get_logging_kwargs(namespace, **kw) self.backend.fatal(msg, **kwargs) def error(self, msg, namespace=None, **kw): """ Log to the ERROR facility. :param msg: The message the log. :param namespace: A log prefix, generally the module ``__name__`` that the log is coming from. Will default to self._meta.namespace if None is passed. For debugging, it can be useful to set this to ``__file__``, though ``__name__`` is much less verbose. :keyword kw: Keyword arguments are passed on to the backend logging system. """ kwargs = self._get_logging_kwargs(namespace, **kw) self.backend.error(msg, **kwargs) ## NOTE: do we even need this for logbook? def clear_loggers(self): """Clear any previously configured logging namespaces. """ if not self._meta.namespace: # _setup() probably wasn't run return self.handlers = []
- record the log into a file (in /var/log or /tmp where it makes the most sense - Reorder alphabetically the library calls """ # Extract the samplerate from the config file import configparser import os import sys from asc import utils from soundfile import SoundFile from logbook import Logger, StreamHandler config_path = os.path.expanduser('~') + '/.ascrc' log = Logger('My samplerate logger') config = configparser.ConfigParser() config.read(config_path) samplerate = int(utils.read_config('audio', 'samplerate', config)) data_path = utils.read_config('path', 'data', config) for root, dirs, files in os.walk(data_path): for f in files: if f.endswith('wav'): audio_file = SoundFile(os.path.join(root, f)) if audio_file.samplerate != samplerate: StreamHandler(sys.stdout).push_application() log.warn('wrong samplerate %d - %s' % ( samplerate, os.path.join(root, f)))
class Robot: """ The Robot class enables you to connect to the robot, send actions and read data from the robot. :var version: the version of the robot's firmware. :var voltage: the voltage of the robot's batteries, between 0.0 and 1.0. :var is_connected: whether the robot has connected or not. """ __animation_map = { Animation.right_arm_down: right_arm_down, Animation.right_arm_point: right_arm_point, Animation.head_down: head_down, Animation.head_middle: head_middle, Animation.head_up: head_up, Animation.head_turn_left: head_turn_left, Animation.head_turn_middle: head_turn_middle, Animation.head_turn_right: head_turn_right, Animation.close_mouth: close_mouth, Animation.open_mouth: open_mouth, Animation.poke_tounge: poke_tounge, Animation.eye_lid_open: eye_lid_open, Animation.eye_lid_close: eye_lid_close, Animation.raise_eyebrows: raise_eyebrows, Animation.frown_eyebrows: frown_eyebrows, Animation.smile: smile, Animation.mouth_neutral: mouth_neutral, Animation.mouth_frown: mouth_frown, Animation.reset: reset_motors, Animation.go_crazy: go_crazy, Animation.go_crazy2: go_crazy2, Animation.go_crazy3: go_crazy3, Animation.awkward: awkward, Animation.cute1: cute1, Animation.cute2: cute2, Animation.sleep: sleep, Animation.sleeping: sleeping, Animation.sleepy: sleepy, Animation.tell_a_joke: tell_a_joke, Animation.wake_up: wake_up, Animation.worry: worry } def __init__(self, read_rate_hz: float = 0.05, log_level=logbook.INFO): """ The Robot constructor. :param read_rate_hz: the rate in Hz to read data from the robot. :param log_level: the level for displaying and logging information, e.g. debugging information. """ StreamHandler(sys.stdout).push_application() self.version: str = "" self.voltage: float = float("NaN") self.is_connected: bool = False self._log = Logger('Robot') self._log.level = log_level self._read_rate_hz = read_rate_hz self._keep_alive_secs = 9.0 self._read_commands = [voltage_cmd()] self._read_thread = threading.Thread(target=self._send_read_cmds) self._read_event = threading.Event() self._keep_alive_thread = threading.Thread(target=self._keep_alive) self._keep_alive_event = threading.Event() self._transport = TcpTransport(data_received_cb=self._data_received_cb, initial_cmd=activity_cmd(" "), log_level=log_level) self._action_handle_lock = threading.Lock() self._action_handles = {} self._is_action_active_lock = threading.Lock() self._is_action_active = False self._is_running = False self._last_cmd_time = 0 def connect(self) -> bool: """ Creates a network connection with the robot, i.e. a TCP socket stream. This method must be called before controlling the robot. :return: whether the connection was successful or not. """ self._log.info("Connecting to robot...") self.is_connected = self._transport.connect() if self.is_connected: self.animate( Animation.eye_lid_open ) # the animation that plays right after Einstein connects gets stopped # just as he is closing his eyes self._is_running = True self._read_thread.start() time.sleep(1) self._last_cmd_time = time.time() self._keep_alive_thread.start() time.sleep(2) self._log.info("Connected.") return self.is_connected def disconnect(self) -> None: """ Close the network connection with the robot. This method must be called before the program finishes. :return: None. """ self._log.info("Disconnecting from robot...") self._is_running = False self._transport.close() self._read_event.set() if self._read_thread.is_alive(): self._read_thread.join() self._keep_alive_event.set() # Will cancel keep alive from sleeping if self._keep_alive_thread.is_alive(): self._keep_alive_thread.join() self._log.info("Disconnected.") def say(self, text: str, block: bool = True, done_cb: Callable[[], None] = None) -> ActionHandle: """ An action to make the robot speak. :param text: the text for the robot to speak. :param block: whether to block until the action has finished. :param done_cb: a callback to be triggered when the action has completed. :return: an action handle. """ self._set_action_start() builder = say(text) handle = ActionHandle() handle.add_callback(done_cb) handle.add_callback(self._set_action_done) self._add_action_handle(handle) cmd = activity_cmd(builder.build() + callback_end(handle.id).build()) self._transport.send(cmd) if block: self.wait(handle) return handle def walk_forward(self, steps: int = 4, block: bool = True, done_cb: Callable[[], None] = None) -> ActionHandle: """ An action to make the robot walk forward a given number of steps. :param steps: the number of steps to take. Must be between 1 and 10 steps. A step means both feet step forward once time each. :param block: whether to block until the action has finished. :param done_cb: a callback to be triggered when the action has completed. :return: an action handle. """ self._set_action_start() builder = walk_forward(steps=steps) handle = ActionHandle(timeout=builder.duration()) handle.add_callback(done_cb) handle.add_callback(self._set_action_done) cmd = activity_cmd(builder.build()) self._transport.send(cmd) handle.start_timer() if block: self.wait(handle) return handle def walk_backward(self, steps: int = 4, block: bool = True, done_cb: Callable[[], None] = None) -> ActionHandle: """ An action to make the robot walk backward a given number of steps. :param steps: the number of steps to take. Must be between 1 and 10 steps. A step means both feet step backward once time each. :param block: whether to block until the action has finished. :param done_cb: a callback to be triggered when the action has completed. :return: an action handle. """ self._set_action_start() builder = walk_backward(steps=steps) handle = ActionHandle(timeout=builder.duration()) handle.add_callback(done_cb) handle.add_callback(self._set_action_done) cmd = activity_cmd(builder.build()) self._transport.send(cmd) handle.start_timer() if block: self.wait(handle) return handle def walk_left(self, steps: int = 4, block: bool = True, done_cb: Callable[[], None] = None) -> ActionHandle: """ An action to make the robot walk left a given number of steps. :param steps: the number of steps to take. Must be between 1 and 10 steps. A step means the right foot takes a single step, making the robot walk left. :param block: whether to block until the action has finished. :param done_cb: a callback to be triggered when the action has completed. :return: an action handle. """ self._set_action_start() builder = walk_left(steps=steps) handle = ActionHandle(timeout=builder.duration()) handle.add_callback(done_cb) handle.add_callback(self._set_action_done) cmd = activity_cmd(builder.build()) self._transport.send(cmd) handle.start_timer() if block: self.wait(handle) return handle def walk_right(self, steps: int = 4, block: bool = True, done_cb: Callable[[], None] = None) -> ActionHandle: """ An action to make the robot walk right a given number of steps. :param steps: the number of steps to take. Must be between 1 and 10 steps. A step means the left foot takes a single step, making the robot walk right. :param block: whether to block until the action has finished. :param done_cb: a callback to be triggered when the action has completed. :return: an action handle. """ self._set_action_start() builder = walk_right(steps=steps) handle = ActionHandle(timeout=builder.duration()) handle.add_callback(done_cb) handle.add_callback(self._set_action_done) cmd = activity_cmd(builder.build()) self._transport.send(cmd) handle.start_timer() if block: self.wait(handle) return handle def animate(self, animation: Animation, block: bool = True, done_cb: Callable[[], None] = None) -> ActionHandle: """ An action to make the robot perform an animation. :param animation: the type of animation to perform. :param block: whether to block until the action has finished. :param done_cb: a callback to be triggered when the action has completed. :return: an action handle. """ self._set_action_start() builder = self.__animation_map[animation]() handle = ActionHandle() handle.add_callback(done_cb) handle.add_callback(self._set_action_done) self._add_action_handle(handle) # For the TE callback fire after motors, you *need a space* and then the <PA> command before <TE=...> # this forces the TE command to wait until after the robot has spoken the space. cmd = activity_cmd(builder.build() + " " + wait_for_motors_and_speaking().build() + callback_end(handle.id).build()) self._transport.send(cmd) if block: self.wait(handle) return handle def do(self, *commands, block: bool = True, done_cb: Callable[[], None] = None) -> ActionHandle: self._set_action_start() cmd_list = [cmd for cmd in commands] builder = command_list(*cmd_list) handle = ActionHandle() handle.add_callback(done_cb) handle.add_callback(self._set_action_done) self._add_action_handle(handle) # For the TE callback fire after motors, you *need a space* and then the <PA> command before <TE=...> # this forces the TE command to wait until after the robot has spoken the space. cmd = activity_cmd(builder.build() + " " + wait_for_motors_and_speaking().build() + callback_end(handle.id).build()) self._transport.send(cmd) if block: self.wait(handle) return handle def wait(self, *action_handles) -> None: """ Block until the given action handles have finished. :param action_handles: :return: None. """ for handle in action_handles: handle.wait() def __enter__(self): self.connect() return self def __exit__(self, type, value, traceback): self.disconnect() def _set_action_start(self): with self._is_action_active_lock: self._is_action_active = True def _set_action_done(self): with self._is_action_active_lock: self._last_cmd_time = time.time() self._is_action_active = False def _add_action_handle(self, handle: ActionHandle) -> None: with self._action_handle_lock: self._action_handles[handle.id] = (0, handle) def _set_action_handle_done(self, action_id: str) -> None: handle_to_finish = None with self._action_handle_lock: if action_id in self._action_handles: # These get triggered twice because the robot sends two triggers back # for some reason. We send the callback on the second trigger in case # it affects the behaviour. count, ah = self._action_handles[action_id] count += 1 if count > 1: self._action_handles.pop(action_id) handle_to_finish = ah else: self._action_handles[action_id] = (count, ah) else: self._log.warn( "Robot._set_action_handle_done: ActionHandle with action_id '{}' not found" .format(action_id)) if handle_to_finish is not None: handle_to_finish.done() def _send_read_cmds(self): while self._is_running: for cmd in self._read_commands: self._transport.send(cmd) secs = 1. / self._read_rate_hz self._read_event.wait(timeout=secs) def _keep_alive(self): """ This function makes sure that the robot doesn't say or do it's automatic functions whilst we are using it, because it interferes with the programs we want to write. This function is called every 9 seconds and only sends a command to the robot if an action isn't currently running and the last command was sent 9 seconds ago. """ while self._is_running: secs_since_last_cmd = time.time() - self._last_cmd_time if not self._is_action_active and secs_since_last_cmd > self._keep_alive_secs: self._transport.send(activity_cmd(" ")) self._last_cmd_time = time.time() self._log.debug("Keeping alive") self._keep_alive_event.wait(timeout=self._keep_alive_secs) def _data_received_cb(self, msg): self._log.debug("data_received_cb: {}".format(msg)) # TODO: handle partial messages try: # Parse messages, there can be more than one json message returned in msg prev_index = 0 msg_len = len(msg) while prev_index < msg_len: start_index = prev_index + msg[prev_index:].find('{') sub_msg_len = int(msg[prev_index:start_index]) end_index = start_index + sub_msg_len sub_msg = msg[start_index:end_index] data = json.loads(sub_msg) # When each message has been parsed into json, update the robots state self._update_state(data) prev_index = end_index except ValueError as e: self._log.error( "Error decoding json for message: {}. Error: {}".format( msg, e)) def _update_state(self, data): if "device" in data and self.version is '': self.version = data["device"]["version"] if "trigger" in data: trigger = data["trigger"] if trigger.startswith("voltage."): self.voltage = float(trigger.replace("voltage.", "")) / 10. elif trigger.startswith("cb."): self._log.debug("Trigger received: {}".format(trigger)) self._set_action_handle_done(trigger) else: self._log.debug("Unknown trigger: {}".format(trigger))
return int(true_vnr) my_client = MongoClient('localhost', 27017) database = my_client['hagkaup'] coll = database['scrape1'] with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: future_to_url = dict((executor.submit(load_url, url, 240), url) for url in URLS) for future in concurrent.futures.as_completed(future_to_url): url = future_to_url[future] if future.exception() is not None: print('%r generated an exception: %s' % (url, future.exception())) log.warn(('%r generated an exception: %s' % (url,future.exception()))) else: if len(future.result()) > 10500: try: #print('%r page is %d bytes' % (url, len(future.result()))) # url_list.append(Vnr(future.result())) bob = Vnr(future.result()) #print(bob.title) #print(bob.unit) # TEST VERSION OF DB COMS. post = {"store": "hagkaup", "title": bob.title, "_id": bob.productNo, "price": bob.price, "vnr": bob.productVnr, "date": datetime.datetime.utcnow(),
class Config(object, metaclass=Singleton): def __init__(self, *args): self.arg_parser = ap = ArgParser( description="Transition-based parser for UCCA.", formatter_class=ArgumentDefaultsHelpFormatter) add_boolean_option(ap, "use-bert", default=False, description="whether to use bert embeddings") ap.add_argument("--bert-model", choices=[ "bert-base-uncased", "bert-large-uncased", "bert-base-cased", "bert-large-cased", "bert-base-multilingual-cased" ], default="bert-base-multilingual-cased") ap.add_argument("--bert-layers", type=int, nargs='+', default=[-1, -2, -3, -4]) ap.add_argument("--bert-layers-pooling", choices=["weighted", "sum", "concat"], default="weighted") ap.add_argument("--bert-token-align-by", choices=["first", "sum", "mean"], default="sum") ap.add_argument("--bert-multilingual", choices=[0], type=int) add_boolean_option( ap, "bert-use-default-word-embeddings", default=False, description="whether to use default word embeddings") ap.add_argument("--bert-dropout", type=float, default=0, choices=np.linspace(0, 0.9, num=10)) ap.add_argument("passages", nargs="*", help="passage files/directories to test on/parse") ap.add_argument("--version", action="version", version="") ap.add_argument("-C", "--config", is_config_file=True, help="configuration file to get arguments from") ap.add_argument( "-m", "--models", nargs="+", help="model file basename(s) to load/save, ensemble if >1 " "(default: <format>_<model_type>") ap.add_argument("-c", "--classifier", choices=CLASSIFIERS, default=BIRNN, help="model type") ap.add_argument("-B", "--beam", type=int, choices=(1, ), default=1, help="beam size for beam search") add_boolean_option(ap, "evaluate", "evaluation of parsed passages", short="e") add_verbose_arg(ap, help="detailed parse output") constructions.add_argument(ap) add_boolean_option(ap, "sentences", "split to sentences") add_boolean_option(ap, "paragraphs", "split to paragraphs") ap.add_argument( "--timeout", type=float, help="max number of seconds to wait for a single passage") group = ap.add_argument_group(title="Training parameters") group.add_argument("-t", "--train", nargs="+", default=(), help="passage files/directories to train on") group.add_argument("-d", "--dev", nargs="+", default=(), help="passage files/directories to tune on") group.add_argument( "-I", "--iterations", nargs="+", type=Iterations, default=(Iterations(50), Iterations("100 --optimizer=" + EXTRA_TRAINER)), help= "number of training iterations along with optional hyperparameters per part" ) group.add_argument("--folds", type=int, choices=(3, 5, 10), help="#folds for cross validation") group.add_argument("--seed", type=int, default=1, help="random number generator seed") add_boolean_option( group, "early-update", "early update procedure (finish example on first error)") group.add_argument( "--save-every", type=int, help="every this many passages, evaluate on dev and save model") add_boolean_option( group, "eval-test", "evaluate on test whenever evaluating on dev, but keep results hidden" ) add_boolean_option( group, "ignore-case", "pre-convert all input files to lower-case in training and test") group = ap.add_argument_group(title="Output files") group.add_argument("-o", "--outdir", default=".", help="output directory for parsed files") group.add_argument("-p", "--prefix", default="", help="output filename prefix") add_boolean_option(group, "write", "writing parsed output to files", default=True, short_no="W") group.add_argument( "-j", "--join", help= "if output format is textual, write all to one file with this basename" ) group.add_argument( "-l", "--log", help="output log file (default: model filename + .log)") group.add_argument( "--devscores", help= "output CSV file for dev scores (default: model filename + .dev.csv)" ) group.add_argument( "--testscores", help= "output CSV file for test scores (default: model filename + .test.csv)" ) group.add_argument("--action-stats", help="output CSV file for action statistics") add_boolean_option( group, "normalize", "apply normalizations to output in case format is UCCA", default=False) ap.add_argument( "-f", "--formats", nargs="+", choices=FILE_FORMATS, default=(), help= "input formats for creating all parameters before training starts " "(otherwise created dynamically based on filename suffix), " "and output formats for written files (each will be written; default: UCCA XML)" ) ap.add_argument("-u", "--unlabeled", nargs="*", choices=FORMATS, help="to ignore labels in") ap.add_argument( "--lang", default="en", help="two-letter language code to use as the default language") add_boolean_option( ap, "multilingual", "separate model parameters per language (passage.attrib['lang'])") group = ap.add_argument_group(title="Sanity checks") add_boolean_option(group, "check-loops", "check for parser state loop") add_boolean_option(group, "verify", "check for oracle reproducing original passage") add_boolean_option(group, "validate-oracle", "require oracle output to respect constraints", default=True) add_param_arguments(ap) group = ap.add_argument_group(title="DyNet parameters") group.add_argument("--dynet-mem", help="memory for dynet") group.add_argument("--dynet-weight-decay", type=float, default=1e-5, help="weight decay for parameters") add_boolean_option(group, "dynet-apply-weight-decay-on-load", "workaround for clab/dynet#1206", default=False) add_boolean_option(group, "dynet-gpu", "GPU for training") group.add_argument("--dynet-gpus", type=int, default=1, help="how many GPUs you want to use") add_boolean_option(group, "dynet-autobatch", "auto-batching of training examples") add_boolean_option(group, "dynet-check-validity", "check validity of expressions immediately") DYNET_ARG_NAMES.update(get_group_arg_names(group)) ap.add_argument( "-H", "--hyperparams", type=HyperparamsInitializer.action, nargs="*", help= "shared hyperparameters or hyperparameters for specific formats, " 'e.g., "shared --lstm-layer-dim=100 --lstm-layers=1" "ucca --word-dim=300"', default=[HyperparamsInitializer.action("shared --lstm-layers 2")]) ap.add_argument("--copy-shared", nargs="*", choices=FORMATS, help="formats whose parameters shall be " "copied from loaded shared parameters") self.args = FallbackNamespace(ap.parse_args(args if args else None)) if self.args.config: print("Loading configuration from '%s'." % self.args.config) if self.args.passages and self.args.write: os.makedirs(self.args.outdir, exist_ok=True) if self.args.models: if not self.args.log: self.args.log = self.args.models[0] + ".log" if self.args.dev and not self.args.devscores: self.args.devscores = self.args.models[0] + ".dev.csv" if self.args.passages and not self.args.testscores: self.args.testscores = self.args.models[0] + ".test.csv" elif not self.args.log: self.args.log = "parse.log" self.sub_configs = [ ] # Copies to be stored in Models so that they do not interfere with each other self._logger = self.format = self.hyperparams = self.iteration_hyperparams = None self._vocab = {} self.original_values = {} self.random = np.random self.update() def create_original_values(self, args=None): return { attr: getattr(self.args, attr) if args is None else args[attr] for attr in RESTORED_ARGS if args is None or attr in args } def set_format(self, f=None, update=False, recursive=True): if f in ( None, "text" ) and not self.format: # In update or parsing UCCA (with no extra["format"]) or plain text f = "ucca" # Default output format is UCCA if update or self.format != f: if f not in (None, "text"): self.format = f self.update_by_hyperparams() if recursive: for config in self.descendants(): config.set_format(f=f, update=update, recursive=False) def descendants(self): ret = [] configs = [self] while configs: c = configs.pop(0) ret += c.sub_configs configs += c.sub_configs return ret def is_unlabeled(self, f=None): # If just -u or --unlabeled is given then its value is [], and we want to treat that as "all formats" # If not given at all it is None, and we want to treat that as "no format" return self.args.unlabeled == [] or (f or self.format) in ( self.args.unlabeled or ()) def max_actions_unlabeled(self): return 6 + ( # Shift Node Reduce LeftEdge RightEdge Finish 3 if self.args.remote else 0) + ( # RemoteNode LeftRemote RightRemote 1 if self.args.swap == REGULAR else (self.args.max_swap if self.args.swap == COMPOUND else 0) ) + ( # Swap 1 if self.args.implicit else 0) + ( # Implicit 2 if self.args.node_labels and not self.args.use_gold_node_labels else 0) # Label x 2 def set_dynet_arguments(self): self.random.seed(self.args.seed) kwargs = dict(random_seed=self.args.seed) if self.args.dynet_mem: kwargs.update(mem=self.args.dynet_mem) if self.args.dynet_weight_decay: kwargs.update(weight_decay=self.args.dynet_weight_decay) if self.args.dynet_gpus and self.args.dynet_gpus != 1: kwargs.update(requested_gpus=self.args.dynet_gpus) if self.args.dynet_autobatch: kwargs.update(autobatch=True) dynet_config.set(**kwargs) if self.args.dynet_gpu: dynet_config.set_gpu() def update(self, params=None): if params: for name, value in params.items(): setattr(self.args, name, value) self.original_values.update(self.create_original_values(params)) self.hyperparams = self.create_hyperparams() for f, num in EDGE_LABELS_NUM.items(): self.hyperparams.specific[f].max_edge_labels = num amr_hyperparams = self.hyperparams.specific["amr"] for k, v in dict(node_label_dim=20, max_node_labels=1000, node_category_dim=5, max_node_categories=25).items(): if k not in amr_hyperparams and not getattr( amr_hyperparams, k, None): setattr(amr_hyperparams, k, v) self.set_format(update=True) self.set_dynet_arguments() def create_hyperparams(self): return Hyperparams( parent=self.args, **{h.name: h.args for h in self.args.hyperparams or ()}) def update_hyperparams(self, **kwargs): self.update( dict(hyperparams=[ HyperparamsInitializer(k, **v) for k, v in kwargs.items() ])) def update_iteration(self, iteration, print_message=True, recursive=True): if iteration.hyperparams: if print_message: print("Updating: %s" % iteration.hyperparams) self.iteration_hyperparams = iteration.hyperparams.args self.update_by_hyperparams() if recursive: for config in self.descendants(): config.update_iteration(iteration, print_message=False, recursive=False) def update_by_hyperparams(self): format_values = dict(self.original_values) for hyperparams in (self.iteration_hyperparams, self.hyperparams.specific[self.format]): if hyperparams: format_values.update({ k: v for k, v in hyperparams.items() if not k.startswith("_") }) for attr, value in sorted(format_values.items()): self.print("Setting %s=%s" % (attr, value)) setattr(self.args, attr, value) if self.format != "amr": self.args.node_labels = False self.args.node_label_dim = self.args.max_node_labels = \ self.args.node_category_dim = self.args.max_node_categories = 0 if self.is_unlabeled(): self.args.max_edge_labels = self.args.edge_label_dim = 0 self.args.max_action_labels = self.max_actions_unlabeled() else: self.args.max_action_labels = max(self.args.max_action_labels, 6 * self.args.max_edge_labels) @property def line_end(self): return "\n" if self.args.verbose > 2 else " " # show all in one line unless verbose @property def passage_word(self): return "sentence" if self.args.sentences else "paragraph" if self.args.paragraphs else "passage" @property def passages_word(self): return " %ss" % self.passage_word def log(self, message): try: if self._logger is None: FileHandler(self.args.log, format_string= "{record.time:%Y-%m-%d %H:%M:%S} {record.message}" ).push_application() if self.args.verbose > 1: StderrHandler(bubble=True).push_application() self._logger = Logger("tupa") self._logger.warn(message) except OSError: pass def vocab(self, filename=None, lang=None): if filename is None: args = self.hyperparams.specific[lang] if lang else self.args filename = args.vocab if not filename: return None vocab = self._vocab.get(filename) if vocab: return vocab vocab = load_enum(filename) self._vocab[filename] = vocab return vocab def print(self, message, level=3): if self.args.verbose >= level: try: print(message() if hasattr(message, "__call__") else message, flush=True) except UnicodeEncodeError: pass def save(self, filename): out_file = filename + ".yml" print("Saving configuration to '%s'." % out_file) with open(out_file, "w") as f: name = None values = [] for arg in shlex.split(str(self), "--") + ["--"]: if arg.startswith("--"): if name and name not in ("train", "dev"): if len(values) > 1: values[0] = "[" + values[0] values[-1] += "]" elif name.startswith("no-"): name = name[3:] values = ["false"] print("%s: %s" % (name, ", ".join(values) or "true"), file=f) name = arg[2:] values = [] else: values.append(arg) def copy(self): cls = self.__class__ ret = cls.__new__(cls) ret.arg_parser = self.arg_parser ret.args = deepcopy(self.args) ret.original_values = deepcopy(self.original_values) ret.hyperparams = deepcopy(self.hyperparams) ret.iteration_hyperparams = deepcopy(self.iteration_hyperparams) ret.format = self.format ret.random = self.random ret._logger = self._logger ret._vocab = dict(self._vocab) ret.sub_configs = [] self.sub_configs.append(ret) return ret def args_str(self, args): return [ "--" + ("no-" if v is False else "") + k.replace("_", "-") + ("" if v is False or v is True else (" " + str(" ".join(map(str, v)) if hasattr(v, "__iter__") and not isinstance(v, str) else v))) for (k, v) in sorted(args.items()) if v not in (None, (), "", self.arg_parser.get_default(k)) and not k.startswith("_") and (args.node_labels or ( "node_label" not in k and "node_categor" not in k)) and ( args.swap or "swap_" not in k) and ( args.swap == COMPOUND or k != "max_swap") and (not args.require_connected or k != "orphan_label") and ( args.classifier == SPARSE or k not in SPARSE_ARG_NAMES) and ( args.classifier in NN_CLASSIFIERS or k not in NN_ARG_NAMES | DYNET_ARG_NAMES) and k != "passages" ] def __str__(self): self.args.hyperparams = [ HyperparamsInitializer(name, **args.vars()) for name, args in self.hyperparams.items() ] return " ".join(list(self.args.passages) + self.args_str(self.args))
if not os.path.exists(dir_path): os.makedirs(dir_path) RotatingFileHandler(logfile, mode='a', encoding='utf-8', level=file_level, format_string=format_string, delay=False, max_size=max_size, backup_count=backup_count, filter=None, bubble=True).push_application() return None if __name__ == '__main__': from ziyan.utils.util import get_conf from logbook import Logger conf = get_conf('../text_file/ziyan-main-conf.toml')['log_configuration'] setup_logger(conf) log = Logger('test') log.debug(conf) log.debug('debug:test') log.info('info:test') log.notice('notice:test') log.warn('warning:test') log.error('error:test') log.critical('critical:test')
class RollBot: CONFIG_LOCATION = "./config.json" def __init__(self): self.command_list = {} self.logger = Logger('RollBot', level=2) self.logger.info("RollBot started.") self.last_ping = None self.registered = False with open(self.CONFIG_LOCATION) as f: self.config = json.load(f) self.nick = self.config['botnick'] self.owner = self.config['owner']['nick'] self.channels = set([x.lower() for x in self.config['channel']]) self.command_prefix = self.config['prefix'] self.command_list = {x: getattr(self, x) for x in commands} print("Added {} commands: {}".format( len(self.command_list), ", ".join(self.command_list.keys()))) self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.socket_file = self.socket.makefile(encoding="utf-8", errors="ignore") self.warn_interval = 5 # seconds self.last_warn = -self.warn_interval # To allow using the warn command instantly. def send_message(self, channel, message): message_template = "PRIVMSG {} :{}" self.send_raw(message_template.format(channel, message)) def send_ping(self, ping_message): message_template = "PONG : {}" self.send_raw(message_template.format(ping_message)) self.update_ping_time() def join_channel(self, channel): if channel: message_template = "JOIN {}" self.send_raw(message_template.format(channel)) def leave_channel(self, channel): if channel in self.channels: message_template = "PART {}" self.send_raw(message_template.format(channel)) self.channels.remove(channel) def connect(self): server_information = (self.config['server'], self.config['port']) self.socket.connect(server_information) self.send_raw("PASS " + self.config['password']) self.send_raw("USER {} {} {} :{}".format(self.nick, self.nick, self.nick, "rollbot")) self.send_raw("NICK " + self.nick) self.run_loop() def get_message_from_server(self): return self.socket_file.readline() def run_loop(self): message_regex = r"^(?:[:](?P<prefix>\S+) )" \ r"?(?P<type>\S+)" \ r"(?: (?!:)(?P<destination>.+?))" \ r"?(?: [:](?P<message>.+))?$" # Extracts all appropriate groups from a raw IRC message compiled_message = re.compile(message_regex) print(compiled_message) while True: try: message = self.get_message_from_server() self.logger.debug("Received server message: {}", message) parsed_message = compiled_message.finditer(message) message_dict = [ m.groupdict() for m in parsed_message ][0] # Extract all the named groups into a dict source_nick = "" hostmask = "" ircmsg = message.strip('\n\r') # remove new lines print(ircmsg.encode("ascii", errors="ignore")) if message_dict['prefix'] is not None: if "!" in message_dict[ 'prefix']: # Is the prefix from a nickname? hostmask = message_dict['prefix'].split("@")[1] source_nick = message_dict['prefix'].split("!")[0] if message_dict['type'] == "PING": self.send_ping(message_dict['message']) if message_dict['type'] == "PRIVMSG": self.handle_message(hostmask, source_nick, message_dict['destination'], message_dict['message']) # if source_nick not in mods: # mods[source_nick] = {"date":str(arrow.utcnow()), "message":message_dict['message'], "channel":message_dict['destination']} if source_nick != "TagChatBot": mods[source_nick] = { "date": str(arrow.utcnow()), "message": message_dict['message'], "channel": message_dict['destination'] } while tell_message.contains(Query().target.test( lambda s: s.lower() == source_nick.lower() )) and message_dict['destination'] == "#TagProMods": name = tell_message.get(Query().target.test( lambda s: s.lower() == source_nick.lower())) date_remove = name['date'] self.send_message( source_nick, "{}, {} left a message: \"{}\"".format( source_nick, name['source'], name['message'])) tell_message.remove(Query().date.test( lambda s: s.lower() == date_remove.lower())) time.sleep(.1) if message_dict[ 'type'] == "001": # Registration confirmation message self.registered = True self.logger.info("{} connected to server successfully.", self.nick) for channel in self.config['channel']: self.logger.info("Attempting to join {}", channel) self.join_channel(channel) except socket.timeout: self.logger.error("Disconnected. Attempting to reconnect.") self.socket.close() self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.connect() def handle_message(self, hostmask, source, destination, message): is_command = message.startswith(self.config['prefix']) if is_command: self.handle_command(hostmask, source, destination, message) def handle_command(self, hostmask, source, destination, message): try: split_message = message[1:].split() command_key = split_message[0].lower() except IndexError: self.logger.info("No Command") return arguments = split_message[1:] reply_to = destination try: if destination == self.nick: reply_to = source # If it's a private message, reply to the source. Otherwise it's a channel message and reply there. if command_key in self.command_list: self.logger.info("Received command '{}' from {}", command_key, source) command = self.command_list[command_key] return_message = command(hostmask, source, reply_to, *arguments) if return_message is not None: if isinstance(return_message, str): # Is it a string? self.send_message( reply_to, return_message) # If so, just send it along. else: # Otherwise it's a list or a tuple for message in return_message: # So let's loop over them all self.send_message(reply_to, message) # And send them. else: pass # combined_command = self.command_prefix + command_key # self.send_message(reply_to, "Sorry, {} isn't a recognized command.".format(combined_command)) except Exception as e: self.send_message( reply_to, "Sorry, I encountered an error while running that command.") print("Exception in command {}: {}".format(command_key, e)) def send_raw(self, message): return self.socket.send((message + "\n").encode("utf-8")) def update_ping_time(self): self.last_ping = time.time() # Commands @command def commands(self, hostmask, source, reply_to, *args): return "Available commands: {}".format(", ".join( sorted(self.command_list.keys()))) @command def netsplit(self, hostmask, source, reply_to, *args): return "technically we all netsplit http://pastebin.com/mPanErhR" @command def mods(self, hostmask, source, reply_to, *args): if reply_to != "#TPmods": if source in [ "WOLOWOLO", "justanotheruser", "MRCOW", "LEBRONxJAMES", "defense_bot" ]: return "can you not" else: return "Sorry! You must use this command in the channel #TPmods | Double click the channel to join." else: if ' '.join(args) == "": return "{} - Please recall !mods with a reason to notify a moderator.".format( source) else: self.send_raw("NAMES #TPmods") message = self.get_message_from_server() ircmsg = message.strip('\n\r') try: actualip = "{}".format( re.findall(r'\b(?:\d{1,3}[\.-]){3}\d{1,3}\b', hostmask)[0]) actualipp = actualip.replace("-", ".") ippfinal = " ( http://tagpro-origin.koalabeast.com/moderate/ips/{} )".format( actualipp) except IndexError: ippfinal = "" if ircmsg.find(' 353 {} '.format(self.nick)) != -1: namelist = ircmsg.split(":")[2] modlist = " ".join(x[1:] for x in namelist.split() if x.startswith('+')) #oplist = " ".join(x[1:] for x in namelist.split() if x.startswith('@')) oplist = "" modmsg = "- " + ' '.join(args) if ' '.join(args) == "": modmsg = "" if modlist == "" and oplist == "": self.send_raw( "PRIVMSG #TPmods :Sorry {}, all mods are currently AFK. You can stick around or leave your request for one to find later." .format(source)) else: self.send_raw( "PRIVMSG #TagProMods :Mods - {} {}".format( modlist, oplist)) self.send_raw( "PRIVMSG #TPmods :{} - the mods have received your request. Please stay patient while waiting. Make sure to state the user/issue to speed up the request process." .format(source)) self.send_raw( "PRIVMSG #TagProMods :Mod request from {}{} in {} {}" .format(source, ippfinal, reply_to, modmsg)) @command def check(self, hostmask, source, reply_to, *args): ipaddress = ' '.join(args) if re.match('^[-0-9.]*$', ipaddress): ipaddress = ipaddress.replace("-", ".") else: return "Sorry, that's not an IP address!" with open('email.txt') as e: email = e.read().strip() page = requests.get( 'http://check.getipintel.net/check.php?ip={}&contact={}'.format( ipaddress, email)) return "{}: chances of naughty IP = {}%".format( source, int(float(re.findall("(\d+(?:.\d+)?)", page.text)[0]) * 100)) @command def seen(self, hostmask, source, reply_to, *args): name = ' '.join(args) if name not in mods: return "Sorry, haven't seen that weenie" if name in mods: timeseen = arrow.get(mods[name]["date"]) formattime = timeseen.format(('YYYY-MM-DD HH:mm:ss ZZ')) humantime = timeseen.humanize() return "{} was seen {} ({}) saying {}".format( name, humantime, formattime, mods[name]["message"]) @command def tell(self, hostmask, source, reply_to, *args): target = args[0] message = ' '.join(args[1:]) #mods[source_nick] = {"date":str(arrow.utcnow()), "message":message_dict['message'], "channel":message_dict['destination']} # tell_message[target] = {"source":source, "message":message, "date":str(arrow.utcnow())} if tell_message.search( Query().target.test(lambda s: s.lower() == target.lower)): nick = tell_message.get( Query().target.test(lambda s: s.lower() == target.lower())) pass if True: nick = tell_message.get( Query().target.test(lambda s: s.lower() == target.lower())) if tell_message.count( Query().target.test(lambda s: s.lower() == target.lower()) & Query().source.test( lambda s: s.lower() == source.lower())) <= 6: tell_message.insert({ "target": target, "message": message, 'date': str(arrow.utcnow()), 'source': source }) return "Ok! I'll pass that on when they become active" else: return "U message that person like wayyy too much" @command def optin(self, hostmask, source, reply_to, *args): if reply_to not in ["#TagProMods", "#tagprochat"]: return "Sorry! This command is not authorized here." if reply_to == "#TagProMods": self.send_raw("NAMES #TPmods") message = self.get_message_from_server() ircmsg = message.strip('\n\r') duty = "duty" if ircmsg.find('+{}'.format(source)) != -1: print(ircmsg.find('+{}'.format(source))) return "You are already on {}, {}.".format(duty, source) elif ircmsg.find('{}'.format(source)) != -1: print(ircmsg.find('{}'.format(source))) self.send_raw( "PRIVMSG Chanserv :voice #TPmods {}".format(source)) return "You are now on {}, {}.".format(duty, source) else: return "You are not in #TPmods, {}!".format(source) if reply_to == "#tagprochat": self.send_raw("NAMES #tagprochat") message = self.get_message_from_server() ircmsg = message.strip('\n\r') duty = "duty" if ircmsg.find('+{}'.format(source)) != -1: return "You are already on {}, {}.".format(duty, source) elif ircmsg.find('{}'.format(source)) != -1: self.send_raw( "PRIVMSG Chanserv :voice #tagprochat {}".format(source)) return "You are now on {}, {}.".format(duty, source) else: return "You are not in #tagprochat, {}!".format(source) @command def optout(self, hostmask, source, reply_to, *args): if reply_to not in ["#TagProMods", "#tagprochat"]: return "Sorry! This command is not authorized here." if reply_to == "#TagProMods": self.send_raw("NAMES #TPmods") message = self.get_message_from_server() ircmsg = message.strip('\n\r') duty = "duty" if source == "Hootie": duty = "dootie" if ircmsg.find('+{}'.format(source)) != -1: self.send_raw( "PRIVMSG Chanserv :devoice #TPmods {}".format(source)) if source.lower() in ['cignul9']: return "Eat my ass {}".format(source) else: return "You are now off {}, {}.".format(duty, source) elif ircmsg.find('{}'.format(source)) != -1: return "You are already off {}, {}.".format(duty, source) else: return "You are not in #TPmods, {}!".format(source) if reply_to == "#tagprochat": self.send_raw("NAMES #tagprochat") message = self.get_message_from_server() ircmsg = message.strip('\n\r') duty = "duty" if source == "Hootie": duty = "dootie" if ircmsg.find('+{}'.format(source)) != -1: self.send_raw( "PRIVMSG Chanserv :devoice #tagprochat {}".format(source)) if source.lower() in ['cignul9']: return "{} is a dink".format(source) else: return "You are now off {}, {}.".format(duty, source) elif ircmsg.find('{}'.format(source)) != -1: return "You are already off {}, {}.".format(duty, source) else: return "You are not in #tagprochat, {}!".format(source) @command def op(self, hostmask, source, reply_to, *args): if reply_to != "#TagProMods": return "Sorry! This command is not authorized here." else: self.send_raw("NAMES #TPmods") message = self.get_message_from_server() ircmsg = message.strip('\n\r') if ircmsg.find('@{}'.format(source)) != -1: return "You are already an operator, {}.".format(source) elif ircmsg.find('{}'.format(source)) != -1: self.send_raw("PRIVMSG Chanserv :op #TPmods {}".format(source)) return "You are now an operator, {}.".format(source) else: return "You are not in #TPmods, {}!".format(source) @command def deop(self, hostmask, source, reply_to, *args): if reply_to != "#TagProMods": return "Sorry! This command is not authorized here." else: self.send_raw("NAMES #TPmods") message = self.get_message_from_server() ircmsg = message.strip('\n\r') if ircmsg.find('@{}'.format(source)) != -1: self.send_raw( "PRIVMSG Chanserv :deop #TPmods {}".format(source)) return "You are no longer an operator, {}.".format(source) elif ircmsg.find('{}'.format(source)) != -1: return "You are not an operator, {}.".format(source) else: return "You are not in #TPmods, {}!".format(source) @command def ticket(self, hostmask, source, reply_to, tickett=None, *args): if tickett is None: return "http://support.koalabeast.com/#/appeal" else: return "http://support.koalabeast.com/#/appeal/{}".format(tickett) @command def ip(self, hostmask, source, reply_to, *args): ipaddress = ' '.join(args) if re.match('^[-0-9.]*$', ipaddress): return ipaddress.replace("-", ".") else: return "Sorry, that's not an IP address!" @command def warn(self, hostmask, source, reply_to, *args): if reply_to != "#TagProMods": return "Sorry! This command is not authorized here." if time.time() - self.last_warn < self.warn_interval: return "You're using that too much." self.send_raw( "NOTICE #TPmods :Please take off-topic discussion to #tagpro") self.last_warn = time.time() @owner_command def quit(self, hostmask, source, reply_to, *args): self.logger.warn("Shutting down by request of {}", source) self.send_raw("QUIT :{}'s out!".format(self.nick)) self.socket.shutdown(1) self.socket.close() sys.exit() @owner_command def join(self, hostmask, source, reply_to, channel=None, *args): if channel is None: return "Please specify a channel you wish me to join." else: self.logger.info("Joining {} by request of {}".format( channel, source)) self.join_channel(channel) @owner_command def part(self, hostmask, source, reply_to, channel=None, *args): if reply_to == source and channel is None: # If this was a private message, we have no channel to leave. return "Sorry, you must run this command in a channel or provide a channel as an argument." elif channel is not None: if channel in self.channels: self.leave_channel(channel) return "Left channel {}!".format(channel) else: return "I don't believe I'm in that channel!" else: # It was a channel message, so let's leave. self.leave_channel(reply_to) @owner_command def say(self, hostmask, source, reply_to, channel=None, *args): if reply_to != source: return "{} {}".format(channel, ' '.join(args)) elif channel is not None: if channel in self.channels: self.send_message(channel, ' '.join(args)) else: return "Whoops! I'm not in the channel {}".format(channel) else: return "The format is: |say <channel> <message>"
yield line if not line and f.tell() == num_lines: break def _parse_data(self, tracepoint, data): """ Parse payload(data) for tracepoint - if we have it. """ rv = data try: rv = PARSERS[tracepoint](data) except Exception, e: rv = PARSERS[tracepoint](data) except ParserError, e: log.exception(e) log.warn('Error parsing {tp} with {data}'.format(tp=tracepoint, data=data)) finally: return rv if rv else data def _check_tracer(self, line): """ Return tracer (typically 'nop') """ match = re.match(self._TRACER_PATTERN, line.strip()) if match: return match.groupdict()['tracer'] return None def _check_buffer_entries(self, line): """ Return tuple of (entries-in-buffer, entries-written)
class RollBot: CONFIG_LOCATION = "./config.json" def __init__(self): self.command_list = {} self.logger = Logger('RollBot', level=2) self.logger.info("RollBot started.") self.last_ping = None self.registered = False with open(self.CONFIG_LOCATION) as f: self.config = json.load(f) self.nick = self.config['botnick'] self.owner = self.config['owner']['nick'] self.channels = set([x.lower() for x in self.config['channel']]) self.command_prefix = self.config['prefix'] self.command_list = {x: getattr(self, x) for x in commands} print("Added {} commands: {}".format(len(self.command_list), ", ".join(self.command_list.keys()))) self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.socket_file = self.socket.makefile(encoding="utf-8", errors="ignore") self.warn_interval = 5 # seconds self.last_warn = -self.warn_interval # To allow using the warn command instantly. def send_message(self, channel, message): message_template = "PRIVMSG {} :{}" self.send_raw(message_template.format(channel, message)) def send_ping(self, ping_message): message_template = "PONG : {}" self.send_raw(message_template.format(ping_message)) self.update_ping_time() def join_channel(self, channel): if channel: message_template = "JOIN {}" self.send_raw(message_template.format(channel)) def leave_channel(self, channel): if channel in self.channels: message_template = "PART {}" self.send_raw(message_template.format(channel)) self.channels.remove(channel) def connect(self): server_information = (self.config['server'], self.config['port']) self.socket.connect(server_information) self.send_raw("PASS " + self.config['password']) self.send_raw("USER {} {} {} :{}".format(self.nick, self.nick, self.nick, "rollbot")) self.send_raw("NICK " + self.nick) self.run_loop() def get_message_from_server(self): return self.socket_file.readline() def run_loop(self): message_regex = r"^(?:[:](?P<prefix>\S+) )" \ r"?(?P<type>\S+)" \ r"(?: (?!:)(?P<destination>.+?))" \ r"?(?: [:](?P<message>.+))?$" # Extracts all appropriate groups from a raw IRC message compiled_message = re.compile(message_regex) print(compiled_message) while True: try: message = self.get_message_from_server() self.logger.debug("Received server message: {}", message) parsed_message = compiled_message.finditer(message) message_dict = [m.groupdict() for m in parsed_message][0] # Extract all the named groups into a dict source_nick = "" hostmask = "" ircmsg = message.strip('\n\r') # remove new lines print(ircmsg.encode("ascii", errors="ignore")) if message_dict['prefix'] is not None: if "!" in message_dict['prefix']: # Is the prefix from a nickname? hostmask = message_dict['prefix'].split("@")[1] source_nick = message_dict['prefix'].split("!")[0] if message_dict['type'] == "PING": self.send_ping(message_dict['message']) if message_dict['type'] == "PRIVMSG": self.handle_message(hostmask, source_nick, message_dict['destination'], message_dict['message']) # if source_nick not in mods: # mods[source_nick] = {"date":str(arrow.utcnow()), "message":message_dict['message'], "channel":message_dict['destination']} if source_nick != "TagChatBot": mods[source_nick] = {"date":str(arrow.utcnow()), "message":message_dict['message'], "channel":message_dict['destination']} while tell_message.contains(Query().target.test(lambda s: s.lower() == source_nick.lower())) and message_dict['destination'] == "#TagProMods": name = tell_message.get(Query().target.test(lambda s: s.lower() == source_nick.lower())) date_remove = name['date'] self.send_message(source_nick,"{}, {} left a message: \"{}\"".format(source_nick, name['source'], name['message'])) tell_message.remove(Query().date.test(lambda s: s.lower() == date_remove.lower())) time.sleep(.1) if message_dict['type'] == "001": # Registration confirmation message self.registered = True self.logger.info("{} connected to server successfully.", self.nick) for channel in self.config['channel']: self.logger.info("Attempting to join {}", channel) self.join_channel(channel) except socket.timeout: self.logger.error("Disconnected. Attempting to reconnect.") self.socket.close() self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) self.connect() def handle_message(self, hostmask, source, destination, message): is_command = message.startswith(self.config['prefix']) if is_command: self.handle_command(hostmask, source, destination, message) def handle_command(self, hostmask, source, destination, message): try: split_message = message[1:].split() command_key = split_message[0].lower() except IndexError: self.logger.info("No Command") return arguments = split_message[1:] reply_to = destination try: if destination == self.nick: reply_to = source # If it's a private message, reply to the source. Otherwise it's a channel message and reply there. if command_key in self.command_list: self.logger.info("Received command '{}' from {}", command_key, source) command = self.command_list[command_key] return_message = command(hostmask, source, reply_to, *arguments) if return_message is not None: if isinstance(return_message, str): # Is it a string? self.send_message(reply_to, return_message) # If so, just send it along. else: # Otherwise it's a list or a tuple for message in return_message: # So let's loop over them all self.send_message(reply_to, message) # And send them. else: pass # combined_command = self.command_prefix + command_key # self.send_message(reply_to, "Sorry, {} isn't a recognized command.".format(combined_command)) except Exception as e: self.send_message(reply_to, "Sorry, I encountered an error while running that command.") print("Exception in command {}: {}".format(command_key, e)) def send_raw(self, message): return self.socket.send((message + "\n").encode("utf-8")) def update_ping_time(self): self.last_ping = time.time() # Commands @command def commands(self, hostmask, source, reply_to, *args): return "Available commands: {}".format(", ".join(sorted(self.command_list.keys()))) @command def netsplit(self, hostmask, source, reply_to, *args): return "technically we all netsplit http://pastebin.com/mPanErhR" @command def mods(self, hostmask, source, reply_to, *args): if reply_to != "#TPmods": if source in ["WOLOWOLO", "justanotheruser", "MRCOW", "LEBRONxJAMES", "defense_bot"]: return "can you not" else: return "Sorry! You must use this command in the channel #TPmods | Double click the channel to join." else: if ' '.join(args) == "": return "{} - Please recall !mods with a reason to notify a moderator.".format(source) else: self.send_raw("NAMES #TPmods") message = self.get_message_from_server() ircmsg = message.strip('\n\r') try: actualip = "{}".format(re.findall(r'\b(?:\d{1,3}[\.-]){3}\d{1,3}\b', hostmask)[0]) actualipp = actualip.replace("-", ".") ippfinal = " ( http://tagpro-origin.koalabeast.com/moderate/ips/{} )".format(actualipp) except IndexError: ippfinal = "" if ircmsg.find(' 353 {} '.format(self.nick)) != -1: namelist = ircmsg.split(":")[2] modlist = " ".join(x[1:] for x in namelist.split() if x.startswith('+')) #oplist = " ".join(x[1:] for x in namelist.split() if x.startswith('@')) oplist = "" modmsg = "- " + ' '.join(args) if ' '.join(args) == "": modmsg = "" if modlist == "" and oplist == "": self.send_raw( "PRIVMSG #TPmods :Sorry {}, all mods are currently AFK. You can stick around or leave your request for one to find later.".format( source)) else: self.send_raw("PRIVMSG #TagProMods :Mods - {} {}".format(modlist, oplist)) self.send_raw( "PRIVMSG #TPmods :{} - the mods have received your request. Please stay patient while waiting. Make sure to state the user/issue to speed up the request process.".format( source)) self.send_raw( "PRIVMSG #TagProMods :Mod request from {}{} in {} {}".format(source, ippfinal, reply_to, modmsg)) @command def check(self, hostmask, source, reply_to, *args): ipaddress = ' '.join(args) if re.match('^[-0-9.]*$', ipaddress): ipaddress = ipaddress.replace("-", ".") else: return "Sorry, that's not an IP address!" with open('email.txt') as e: email = e.read().strip() page = requests.get('http://check.getipintel.net/check.php?ip={}&contact={}'.format(ipaddress, email)) return "{}: chances of naughty IP = {}%".format(source, int(float(re.findall("(\d+(?:.\d+)?)", page.text)[0]) * 100)) @command def seen(self, hostmask, source, reply_to, *args): name = ' '.join(args) if name not in mods: return "Sorry, haven't seen that weenie" if name in mods: timeseen = arrow.get(mods[name]["date"]) formattime = timeseen.format(('YYYY-MM-DD HH:mm:ss ZZ')) humantime = timeseen.humanize() return "{} was seen {} ({}) saying {}".format(name,humantime, formattime, mods[name]["message"]) @command def tell(self, hostmask, source, reply_to, *args): target = args[0] message = ' '.join(args[1:]) #mods[source_nick] = {"date":str(arrow.utcnow()), "message":message_dict['message'], "channel":message_dict['destination']} # tell_message[target] = {"source":source, "message":message, "date":str(arrow.utcnow())} if tell_message.search(Query().target.test(lambda s: s.lower() == target.lower)): nick = tell_message.get(Query().target.test(lambda s: s.lower() == target.lower())) pass if True: nick = tell_message.get(Query().target.test(lambda s: s.lower() == target.lower())) if tell_message.count(Query().target.test(lambda s: s.lower() == target.lower()) & Query().source.test(lambda s: s.lower() == source.lower())) <= 6: tell_message.insert({"target":target,"message":message, 'date':str(arrow.utcnow()), 'source':source}) return "Ok! I'll pass that on when they become active" else: return "U message that person like wayyy too much" @command def optin(self, hostmask, source, reply_to, *args): if reply_to not in ["#TagProMods","#tagprochat"]: return "Sorry! This command is not authorized here." if reply_to == "#TagProMods": self.send_raw("NAMES #TPmods") message = self.get_message_from_server() ircmsg = message.strip('\n\r') duty = "duty" if ircmsg.find('+{}'.format(source)) != -1: print(ircmsg.find('+{}'.format(source))) return "You are already on {}, {}.".format(duty, source) elif ircmsg.find('{}'.format(source)) != -1: print(ircmsg.find('{}'.format(source))) self.send_raw("PRIVMSG Chanserv :voice #TPmods {}".format(source)) return "You are now on {}, {}.".format(duty, source) else: return "You are not in #TPmods, {}!".format(source) if reply_to == "#tagprochat": self.send_raw("NAMES #tagprochat") message = self.get_message_from_server() ircmsg = message.strip('\n\r') duty = "duty" if ircmsg.find('+{}'.format(source)) != -1: return "You are already on {}, {}.".format(duty, source) elif ircmsg.find('{}'.format(source)) != -1: self.send_raw("PRIVMSG Chanserv :voice #tagprochat {}".format(source)) return "You are now on {}, {}.".format(duty, source) else: return "You are not in #tagprochat, {}!".format(source) @command def optout(self, hostmask, source, reply_to, *args): if reply_to not in ["#TagProMods","#tagprochat"]: return "Sorry! This command is not authorized here." if reply_to == "#TagProMods": self.send_raw("NAMES #TPmods") message = self.get_message_from_server() ircmsg = message.strip('\n\r') duty = "duty" if source == "Hootie": duty = "dootie" if ircmsg.find('+{}'.format(source)) != -1: self.send_raw("PRIVMSG Chanserv :devoice #TPmods {}".format(source)) if source.lower() in ['cignul9']: return "Eat my ass {}".format(source) else: return "You are now off {}, {}.".format(duty, source) elif ircmsg.find('{}'.format(source)) != -1: return "You are already off {}, {}.".format(duty, source) else: return "You are not in #TPmods, {}!".format(source) if reply_to == "#tagprochat": self.send_raw("NAMES #tagprochat") message = self.get_message_from_server() ircmsg = message.strip('\n\r') duty = "duty" if source == "Hootie": duty = "dootie" if ircmsg.find('+{}'.format(source)) != -1: self.send_raw("PRIVMSG Chanserv :devoice #tagprochat {}".format(source)) if source.lower() in ['cignul9']: return "{} is a dink".format(source) else: return "You are now off {}, {}.".format(duty, source) elif ircmsg.find('{}'.format(source)) != -1: return "You are already off {}, {}.".format(duty, source) else: return "You are not in #tagprochat, {}!".format(source) @command def op(self, hostmask, source, reply_to, *args): if reply_to != "#TagProMods": return "Sorry! This command is not authorized here." else: self.send_raw("NAMES #TPmods") message = self.get_message_from_server() ircmsg = message.strip('\n\r') if ircmsg.find('@{}'.format(source)) != -1: return "You are already an operator, {}.".format(source) elif ircmsg.find('{}'.format(source)) != -1: self.send_raw("PRIVMSG Chanserv :op #TPmods {}".format(source)) return "You are now an operator, {}.".format(source) else: return "You are not in #TPmods, {}!".format(source) @command def deop(self, hostmask, source, reply_to, *args): if reply_to != "#TagProMods": return "Sorry! This command is not authorized here." else: self.send_raw("NAMES #TPmods") message = self.get_message_from_server() ircmsg = message.strip('\n\r') if ircmsg.find('@{}'.format(source)) != -1: self.send_raw("PRIVMSG Chanserv :deop #TPmods {}".format(source)) return "You are no longer an operator, {}.".format(source) elif ircmsg.find('{}'.format(source)) != -1: return "You are not an operator, {}.".format(source) else: return "You are not in #TPmods, {}!".format(source) @command def ticket(self, hostmask, source, reply_to, tickett=None, *args): if tickett is None: return "http://support.koalabeast.com/#/appeal" else: return "http://support.koalabeast.com/#/appeal/{}".format(tickett) @command def ip(self, hostmask, source, reply_to, *args): ipaddress = ' '.join(args) if re.match('^[-0-9.]*$', ipaddress): return ipaddress.replace("-", ".") else: return "Sorry, that's not an IP address!" @command def warn(self, hostmask, source, reply_to, *args): if reply_to != "#TagProMods": return "Sorry! This command is not authorized here." if time.time() - self.last_warn < self.warn_interval: return "You're using that too much." self.send_raw("NOTICE #TPmods :Please take off-topic discussion to #tagpro") self.last_warn = time.time() @owner_command def quit(self, hostmask, source, reply_to, *args): self.logger.warn("Shutting down by request of {}", source) self.send_raw("QUIT :{}'s out!".format(self.nick)) self.socket.shutdown(1) self.socket.close() sys.exit() @owner_command def join(self, hostmask, source, reply_to, channel=None, *args): if channel is None: return "Please specify a channel you wish me to join." else: self.logger.info("Joining {} by request of {}".format(channel, source)) self.join_channel(channel) @owner_command def part(self, hostmask, source, reply_to, channel=None, *args): if reply_to == source and channel is None: # If this was a private message, we have no channel to leave. return "Sorry, you must run this command in a channel or provide a channel as an argument." elif channel is not None: if channel in self.channels: self.leave_channel(channel) return "Left channel {}!".format(channel) else: return "I don't believe I'm in that channel!" else: # It was a channel message, so let's leave. self.leave_channel(reply_to) @owner_command def say(self, hostmask, source, reply_to, channel=None, *args): if reply_to != source: return "{} {}".format(channel, ' '.join(args)) elif channel is not None: if channel in self.channels: self.send_message(channel, ' '.join(args)) else: return "Whoops! I'm not in the channel {}".format(channel) else: return "The format is: |say <channel> <message>"
class ActionHandle: """ Provides functionality for waiting until an action has completed, signaling when an action has completed and triggering and sending a callback when an action has finished. """ def __init__(self, timeout: float = None, log_level=logbook.INFO): """ ActionHandle constructor. :param timeout: the timeout in seconds for the action handle to wait before it considers the action has completed. This is for handling actions on the robot that don't support triggers. :param log_level: the level for displaying and logging information, e.g. debugging information. """ StreamHandler(sys.stdout).push_application() self._log = Logger('Robot') self._log.level = log_level self.id = generate_id() self.callbacks = [] self.timeout = timeout self.event_ = threading.Event() if self.timeout is not None: self.timer_ = threading.Timer(self.timeout, self.done) def add_callback(self, done_cb: Callable[[], None] = None) -> None: """ Add a callback function to the action handle, which will be triggered when the action has finished. :param done_cb: the function to call. :return: None. """ if done_cb is not None: self.callbacks.append(done_cb) def start_timer(self) -> None: """ Start the timer to trigger the completion of the action handle after a 'timeout', which is given in the constructor. Used when a robot action doesn't have a mechanism to notify when it has completed. :return: None. """ if self.timeout is not None: self.timer_.start() else: self._log.warn( "ActionHandle.start_timer: the 'timeout' parameter of ActionHandle is not set, " "it needs to be set before starting the action handle in timer mode" ) def wait(self) -> None: """ Block and wait until the action has completed. :return: None. """ self.event_.wait() def done(self) -> None: """ Informs that action handle that the action has finished and calls the 'done_cb' function if it has been set. Called by self.timer_ or in the Robot._update_state method. :return: None. """ self.event_.set() for cb in self.callbacks: cb()
#!/usr/bin/env python # -*- encoding: utf-8 -*- from logbook import Logger, StreamHandler, FileHandler import sys import os def f(x, y): return x + y if __name__ == '__main__': StreamHandler(sys.stdout).push_application() logger = Logger('Test Logbook') log = logbook.FileHandler('test.log') log.push_application() try: f(1, '2') logger.info('called ' + f.__name__) except: logger.warn('failed on') try: f(1, 2) logger.info('called ' + f.__name__) except: logger.warn('choked on, ')
class PmLogHandler(log.CementLogHandler): """ PmLogHandler - override CementLogHandler to use logbook. This class uses the same configuration options as :ref:`LoggingLogHandler <cement.ext.ext_logging>` """ class Meta: interface = log.ILog """The interface that this class implements.""" label = 'pmlog' """The string identifier of this handler.""" namespace = "pm" """ The logging namespace. Note: Although Meta.namespace defaults to None, Cement will set this to the application label (CementApp.Meta.label) if not set during setup. """ file_format = "{record.time} ({record.level_name}) {record.channel} : {record.message}" """The logging format for the file logger.""" console_format = "{record.time:%Y-%m-%d %H:%M} ({record.level_name}): {record.message}" """The logging format for the consoler logger.""" debug_format = "{record.time} ({record.level_name}) {record.channel} : {record.message}" """The logging format for both file and console if ``debug==True``.""" log_setup = None """Nested log setup placeholder""" level = 0 """Global level for handlers""" clear_loggers = True """Whether of not to clear previous loggers first.""" # These are the default config values, overridden by any '[log]' # section in parsed config files. config_section = 'log' """ The section of the application configuration that holds this handlers configuration. """ config_defaults = dict( file=None, level='INFO', to_console=True, rotate=False, max_bytes=512000, max_files=4, ) """ The default configuration dictionary to populate the ``log`` section. """ levels = ['INFO', 'WARN', 'ERROR', 'DEBUG', 'FATAL'] def __init__(self, *args, **kw): super(PmLogHandler, self).__init__(*args, **kw) self.app = None def _setup(self, app_obj): super(PmLogHandler, self)._setup(app_obj) if self._meta.namespace is None: self._meta.namespace = self.app._meta.label self.backend = Logger(self._meta.namespace) # hack for application debugging if is_true(self.app._meta.debug): self.app.config.set('log', 'level', 'DEBUG') # Mainly for backwards compatibility since Logger level should # be NOTSET (level 0). Output level is controlled by handlers self.set_level(self.app.config.get('log', 'level')) # clear loggers? if is_true(self._meta.clear_loggers): self.clear_loggers() # console if is_true(self.app.config.get('log', 'to_console')): self._setup_console_log() # file if self.app.config.get('log', 'file'): self._setup_file_log() # nested setup self.backend.handlers.append(logbook.NullHandler(bubble=False)) self.log_setup = logbook.NestedSetup(self.backend.handlers) with self._console_handler.applicationbound(): self.debug("logging initialized for '%s' using PmLogHandler" % \ self._meta.namespace) def set_level(self, level): """ Set the log level. Must be one of the log levels configured in self.levels which are ``['INFO', 'WARN', 'ERROR', 'DEBUG', 'FATAL']``. :param level: The log level to set. """ level = level.upper() if level not in self.levels: level = 'INFO' level = logbook.lookup_level(level.upper()) self.level = level def get_level(self): """Returns a string representation of the current log level.""" return logbook.get_level_name(self.level) def _setup_console_log(self): """Add a console log handler.""" if logbook.lookup_level(self.get_level()) == logbook.DEBUG: fmt_string = self._meta.debug_format else: fmt_string = self._meta.console_format console_handler = logbook.StderrHandler(format_string=fmt_string, level=logbook.lookup_level( self.get_level()), bubble=True) self._console_handler = console_handler self.backend.handlers.append(console_handler) def _setup_file_log(self): """Add a file log handler.""" file_path = os.path.expandvars( fs.abspath(self.app.config.get('log', 'file'))) log_dir = os.path.dirname(file_path) if not os.path.exists(log_dir): os.makedirs(log_dir) if logbook.lookup_level(self.get_level()) == logbook.DEBUG: fmt_string = self._meta.debug_format else: fmt_string = self._meta.file_format if self.app.config.get('log', 'rotate'): from logbook import RotatingFileHandler file_handler = RotatingFileHandler( file_path, max_size=int(self.app.config.get('log', 'max_bytes')), backup_count=int(self.app.config.get('log', 'max_files')), format_string=fmt_string, level=logbook.lookup_level(self.get_level()), bubble=True, ) else: from logbook import FileHandler file_handler = FileHandler( file_path, format_string=fmt_string, level=logbook.lookup_level(self.get_level()), bubble=True, ) self._file_handler = file_handler self.backend.handlers.append(file_handler) def _get_logging_kwargs(self, namespace, **kw): if namespace is None: namespace = self._meta.namespace if 'extra' in kw.keys() and 'namespace' in kw['extra'].keys(): pass elif 'extra' in kw.keys() and 'namespace' not in kw['extra'].keys(): kw['extra']['namespace'] = namespace else: kw['extra'] = dict(namespace=namespace) return kw def info(self, msg, namespace=None, **kw): """ Log to the INFO facility. :param msg: The message the log. :param namespace: A log prefix, generally the module ``__name__`` that the log is coming from. Will default to self._meta.namespace if None is passed. :keyword kw: Keyword arguments are passed on to the backend logging system. """ kwargs = self._get_logging_kwargs(namespace, **kw) self.backend.info(msg, **kwargs) def debug(self, msg, namespace=None, **kw): """ Log to the DEBUG facility. :param msg: The message the log. :param namespace: A log prefix, generally the module ``__name__`` that the log is coming from. Will default to self._meta.namespace if None is passed. For debugging, it can be useful to set this to ``__file__``, though ``__name__`` is much less verbose. :keyword kw: Keyword arguments are passed on to the backend logging system. """ kwargs = self._get_logging_kwargs(namespace, **kw) self.backend.debug(msg, **kwargs) def warn(self, msg, namespace=None, **kw): """ Log to the WARN facility. :param msg: The message the log. :param namespace: A log prefix, generally the module ``__name__`` that the log is coming from. Will default to self._meta.namespace if None is passed. For debugging, it can be useful to set this to ``__file__``, though ``__name__`` is much less verbose. :keyword kw: Keyword arguments are passed on to the backend logging system. """ kwargs = self._get_logging_kwargs(namespace, **kw) self.backend.warn(msg, **kwargs) def critical(self, msg, namespace=None, **kw): """ Log to the CRITICAL facility. :param msg: The message the log. :param namespace: A log prefix, generally the module ``__name__`` that the log is coming from. Will default to self._meta.namespace if None is passed. For debugging, it can be useful to set this to ``__file__``, though ``__name__`` is much less verbose. :keyword kw: Keyword arguments are passed on to the backend logging system. """ kwargs = self._get_logging_kwargs(namespace, **kw) self.backend.critical(msg, **kwargs) def fatal(self, msg, namespace=None, **kw): """ Log to the FATAL facility. :param msg: The message the log. :param namespace: A log prefix, generally the module ``__name__`` that the log is coming from. Will default to self._meta.namespace if None is passed. For debugging, it can be useful to set this to ``__file__``, though ``__name__`` is much less verbose. :keyword kw: Keyword arguments are passed on to the backend logging system. """ kwargs = self._get_logging_kwargs(namespace, **kw) self.backend.fatal(msg, **kwargs) def error(self, msg, namespace=None, **kw): """ Log to the ERROR facility. :param msg: The message the log. :param namespace: A log prefix, generally the module ``__name__`` that the log is coming from. Will default to self._meta.namespace if None is passed. For debugging, it can be useful to set this to ``__file__``, though ``__name__`` is much less verbose. :keyword kw: Keyword arguments are passed on to the backend logging system. """ kwargs = self._get_logging_kwargs(namespace, **kw) self.backend.error(msg, **kwargs) ## NOTE: do we even need this for logbook? def clear_loggers(self): """Clear any previously configured logging namespaces. """ if not self._meta.namespace: # _setup() probably wasn't run return self.handlers = []
import sys from logbook import Logger, StreamHandler log = Logger('Stream handler logger') StreamHandler(sys.stdout).push_application() log.warn('warning') log.error("error") from logbook import StderrHandler handler = StderrHandler() handler.format_string = '{record.channel}: {record.message}' handler.push_application() log.warn('warning') log.error("error")
#!/usr/bin/env python # -*- coding: utf-8 -*- """ @author = 'wyx' @time = 2017/4/11 21:50 @annotation = '' """ import sys import logbook from logbook import Logger, StreamHandler StreamHandler(sys.stdout).push_application() # logbook.set_datetime_format("local") # patch warn logbook.base._level_names[logbook.base.WARNING] = 'WARN' log = Logger('My Awesome Logger') log.warn('This is too cool for stdlib')
#save_config(os.path.join(cfg.general.common.save_path, "ran_cfg"), cfg) # Now actually run the driving_benchmark #import pdb; pdb.set_trace() run_driving_benchmark(agent, benchmark_agent, cfg.simulator.town, cfg.simulator.carla_log_name, cfg.simulator.continue_experiment, cfg.simulator.host, cfg.simulator.port) simulator.kill_process() if __name__ == '__main__': # Initialize logger properties StreamHandler(sys.stdout).push_application() log.info("[MODE] Train") log.warn('Logbook is too awesome for most applications') # -- Parse config file & generate procs_no, arg_list = generate_configs() log.info("Starting simulation...") redirect_logging() if len(arg_list) > 1: # Run batch of experiments: pool = mp.Pool(procs_no) pool.map(run_once, arg_list) else: run_once(arg_list[0])
complete.put(100) #log.info(len([(word, letters, results) for word in words])) #blah = pool.map_async(evaluator, [(word, letters, results) for word in words]) #log.info('Done: {}'.format([word for word in blah.get() if word is not None])) pool.close() done = False try: while not done: if len(best_words) > 1 and results.empty() and complete.empty(): done = True time.sleep(0.01) except KeyboardInterrupt: log.warn('Exiting application') pool.terminate() best_words = reversed(best_words) if preferred is not None: preferred = list(preferred) log.info('Sorting for words containing {}'.format(', '.join(preferred))) best_words = sorted(best_words, key=lambda word: 1 in [l in preferred for l in word], reverse=True) log.info('Best words: {}'.format(', '.join(best_words))) end = datetime.now() log.info('Took {}s to locate {} words from {} possible'.format(end-start, len(best_words), len(words)))
import time from logbook import Logger import multiprocessing from catalyst.exchange.exchange_bundle import ExchangeBundle from rq import Connection, Worker import pandas as pd import redis from kryptos import logger_group from kryptos.settings import REDIS_HOST, REDIS_PORT CONN = redis.Redis(host=REDIS_HOST, port=REDIS_PORT) log = Logger("INGESTER") logger_group.add_logger(log) log.warn(f"Using Redis connection {REDIS_HOST}:{REDIS_PORT}") def ingest_exchange(exchange, symbol=None, start=None, end=None): exchange_bundle = ExchangeBundle(exchange) if symbol is None: log.warn(f"Queuing ingest {exchange} for all symbols") else: log.warn(f"Queuing ingest {exchange} for {symbol}") log.warn(f"Will ingest timeframe {start} - {end}") log.info(f"Ingesting {exchange} daily data") exchange_bundle.ingest( "daily", start=pd.to_datetime(start, utc=True),
class Config(object, metaclass=Singleton): def __init__(self, *args): self.arg_parser = ap = ArgParser(description="Transition-based parser for UCCA.", formatter_class=ArgumentDefaultsHelpFormatter) ap.add_argument("passages", nargs="*", help="passage files/directories to test on/parse") ap.add_argument("--version", action="version", version="") ap.add_argument("-C", "--config", is_config_file=True, help="configuration file to get arguments from") ap.add_argument("-m", "--models", nargs="+", help="model file basename(s) to load/save, ensemble if >1 " "(default: <format>_<model_type>") ap.add_argument("-c", "--classifier", choices=CLASSIFIERS, default=BIRNN, help="model type") ap.add_argument("-B", "--beam", type=int, choices=(1,), default=1, help="beam size for beam search") add_boolean_option(ap, "evaluate", "evaluation of parsed passages", short="e") add_verbose_arg(ap, help="detailed parse output") constructions.add_argument(ap) add_boolean_option(ap, "sentences", "split to sentences") add_boolean_option(ap, "paragraphs", "split to paragraphs") ap.add_argument("--timeout", type=float, help="max number of seconds to wait for a single passage") group = ap.add_argument_group(title="Training parameters") group.add_argument("-t", "--train", nargs="+", default=(), help="passage files/directories to train on") group.add_argument("-d", "--dev", nargs="+", default=(), help="passage files/directories to tune on") group.add_argument("-I", "--iterations", nargs="+", type=Iterations, default=(Iterations(50), Iterations("100 --optimizer=" + EXTRA_TRAINER)), help="number of training iterations along with optional hyperparameters per part") group.add_argument("--folds", type=int, choices=(3, 5, 10), help="#folds for cross validation") group.add_argument("--seed", type=int, default=1, help="random number generator seed") add_boolean_option(group, "early-update", "early update procedure (finish example on first error)") group.add_argument("--save-every", type=int, help="every this many passages, evaluate on dev and save model") add_boolean_option(group, "eval-test", "evaluate on test whenever evaluating on dev, but keep results hidden") add_boolean_option(group, "ignore-case", "pre-convert all input files to lower-case in training and test") group = ap.add_argument_group(title="Output files") group.add_argument("-o", "--outdir", default=".", help="output directory for parsed files") group.add_argument("-p", "--prefix", default="", help="output filename prefix") add_boolean_option(group, "write", "writing parsed output to files", default=True, short_no="W") group.add_argument("-j", "--join", help="if output format is textual, write all to one file with this basename") group.add_argument("-l", "--log", help="output log file (default: model filename + .log)") group.add_argument("--devscores", help="output CSV file for dev scores (default: model filename + .dev.csv)") group.add_argument("--testscores", help="output CSV file for test scores (default: model filename + .test.csv)") group.add_argument("--action-stats", help="output CSV file for action statistics") add_boolean_option(group, "normalize", "apply normalizations to output in case format is UCCA", default=False) ap.add_argument("-f", "--formats", nargs="+", choices=FILE_FORMATS, default=(), help="input formats for creating all parameters before training starts " "(otherwise created dynamically based on filename suffix), " "and output formats for written files (each will be written; default: UCCA XML)") ap.add_argument("-u", "--unlabeled", nargs="*", choices=FORMATS, help="to ignore labels in") ap.add_argument("--lang", default="en", help="two-letter language code to use as the default language") add_boolean_option(ap, "multilingual", "separate model parameters per language (passage.attrib['lang'])") group = ap.add_argument_group(title="Sanity checks") add_boolean_option(group, "check-loops", "check for parser state loop") add_boolean_option(group, "verify", "check for oracle reproducing original passage") add_boolean_option(group, "validate-oracle", "require oracle output to respect constraints", default=True) add_param_arguments(ap) group = ap.add_argument_group(title="DyNet parameters") group.add_argument("--dynet-mem", help="memory for dynet") group.add_argument("--dynet-weight-decay", type=float, default=1e-5, help="weight decay for parameters") add_boolean_option(group, "dynet-apply-weight-decay-on-load", "workaround for clab/dynet#1206", default=False) add_boolean_option(group, "dynet-gpu", "GPU for training") group.add_argument("--dynet-gpus", type=int, default=1, help="how many GPUs you want to use") add_boolean_option(group, "dynet-autobatch", "auto-batching of training examples") DYNET_ARG_NAMES.update(get_group_arg_names(group)) ap.add_argument("-H", "--hyperparams", type=HyperparamsInitializer.action, nargs="*", help="shared hyperparameters or hyperparameters for specific formats, " 'e.g., "shared --lstm-layer-dim=100 --lstm-layers=1" "ucca --word-dim=300"', default=[HyperparamsInitializer.action("shared --lstm-layers 2")]) ap.add_argument("--copy-shared", nargs="*", choices=FORMATS, help="formats whose parameters shall be " "copied from loaded shared parameters") self.args = FallbackNamespace(ap.parse_args(args if args else None)) if self.args.config: print("Loading configuration from '%s'." % self.args.config) if self.args.passages and self.args.write: os.makedirs(self.args.outdir, exist_ok=True) if self.args.models: if not self.args.log: self.args.log = self.args.models[0] + ".log" if self.args.dev and not self.args.devscores: self.args.devscores = self.args.models[0] + ".dev.csv" if self.args.passages and not self.args.testscores: self.args.testscores = self.args.models[0] + ".test.csv" elif not self.args.log: self.args.log = "parse.log" self.sub_configs = [] # Copies to be stored in Models so that they do not interfere with each other self._logger = self.format = self.hyperparams = self.iteration_hyperparams = None self._vocab = {} self.original_values = {} self.random = np.random self.update() def create_original_values(self, args=None): return {attr: getattr(self.args, attr) if args is None else args[attr] for attr in RESTORED_ARGS if args is None or attr in args} def set_format(self, f=None, update=False, recursive=True): if f in (None, "text") and not self.format: # In update or parsing UCCA (with no extra["format"]) or plain text f = "ucca" # Default output format is UCCA if update or self.format != f: if f not in (None, "text"): self.format = f self.update_by_hyperparams() if recursive: for config in self.descendants(): config.set_format(f=f, update=update, recursive=False) def descendants(self): ret = [] configs = [self] while configs: c = configs.pop(0) ret += c.sub_configs configs += c.sub_configs return ret def is_unlabeled(self, f=None): # If just -u or --unlabeled is given then its value is [], and we want to treat that as "all formats" # If not given at all it is None, and we want to treat that as "no format" return self.args.unlabeled == [] or (f or self.format) in (self.args.unlabeled or ()) def max_actions_unlabeled(self): return 6 + ( # Shift Node Reduce LeftEdge RightEdge Finish 3 if self.args.remote else 0) + ( # RemoteNode LeftRemote RightRemote 1 if self.args.swap == REGULAR else (self.args.max_swap if self.args.swap == COMPOUND else 0)) + ( # Swap 1 if self.args.implicit else 0) + ( # Implicit 2 if self.args.node_labels and not self.args.use_gold_node_labels else 0) # Label x 2 def set_dynet_arguments(self): self.random.seed(self.args.seed) kwargs = dict(random_seed=self.args.seed) if self.args.dynet_mem: kwargs.update(mem=self.args.dynet_mem) if self.args.dynet_weight_decay: kwargs.update(weight_decay=self.args.dynet_weight_decay) if self.args.dynet_gpus and self.args.dynet_gpus != 1: kwargs.update(requested_gpus=self.args.dynet_gpus) if self.args.dynet_autobatch: kwargs.update(autobatch=True) dynet_config.set(**kwargs) if self.args.dynet_gpu: dynet_config.set_gpu() def update(self, params=None): if params: for name, value in params.items(): setattr(self.args, name, value) self.original_values.update(self.create_original_values(params)) self.hyperparams = self.create_hyperparams() for f, num in EDGE_LABELS_NUM.items(): self.hyperparams.specific[f].max_edge_labels = num amr_hyperparams = self.hyperparams.specific["amr"] for k, v in dict(node_label_dim=20, max_node_labels=1000, node_category_dim=5, max_node_categories=25).items(): if k not in amr_hyperparams and not getattr(amr_hyperparams, k, None): setattr(amr_hyperparams, k, v) self.set_format(update=True) self.set_dynet_arguments() def create_hyperparams(self): return Hyperparams(parent=self.args, **{h.name: h.args for h in self.args.hyperparams or ()}) def update_hyperparams(self, **kwargs): self.update(dict(hyperparams=[HyperparamsInitializer(k, **v) for k, v in kwargs.items()])) def update_iteration(self, iteration, print_message=True, recursive=True): if iteration.hyperparams: if print_message: print("Updating: %s" % iteration.hyperparams) self.iteration_hyperparams = iteration.hyperparams.args self.update_by_hyperparams() if recursive: for config in self.descendants(): config.update_iteration(iteration, print_message=False, recursive=False) def update_by_hyperparams(self): format_values = dict(self.original_values) for hyperparams in (self.iteration_hyperparams, self.hyperparams.specific[self.format]): if hyperparams: format_values.update({k: v for k, v in hyperparams.items() if not k.startswith("_")}) for attr, value in sorted(format_values.items()): self.print("Setting %s=%s" % (attr, value)) setattr(self.args, attr, value) if self.format != "amr": self.args.node_labels = False self.args.node_label_dim = self.args.max_node_labels = \ self.args.node_category_dim = self.args.max_node_categories = 0 if self.is_unlabeled(): self.args.max_edge_labels = self.args.edge_label_dim = 0 self.args.max_action_labels = self.max_actions_unlabeled() else: self.args.max_action_labels = max(self.args.max_action_labels, 6 * self.args.max_edge_labels) @property def line_end(self): return "\n" if self.args.verbose > 2 else " " # show all in one line unless verbose @property def passage_word(self): return "sentence" if self.args.sentences else "paragraph" if self.args.paragraphs else "passage" @property def passages_word(self): return " %ss" % self.passage_word def log(self, message): try: if self._logger is None: FileHandler(self.args.log, format_string="{record.time:%Y-%m-%d %H:%M:%S} {record.message}").push_application() if self.args.verbose > 1: StderrHandler(bubble=True).push_application() self._logger = Logger("tupa") self._logger.warn(message) except OSError: pass def vocab(self, filename=None, lang=None): if filename is None: args = self.hyperparams.specific[lang] if lang else self.args filename = args.vocab if not filename: return None vocab = self._vocab.get(filename) if vocab: return vocab vocab = load_enum(filename) self._vocab[filename] = vocab return vocab def print(self, message, level=3): if self.args.verbose >= level: try: print(message() if hasattr(message, "__call__") else message, flush=True) except UnicodeEncodeError: pass def save(self, filename): out_file = filename + ".yml" print("Saving configuration to '%s'." % out_file) with open(out_file, "w") as f: name = None values = [] for arg in shlex.split(str(self), "--") + ["--"]: if arg.startswith("--"): if name and name not in ("train", "dev"): if len(values) > 1: values[0] = "[" + values[0] values[-1] += "]" elif name.startswith("no-"): name = name[3:] values = ["false"] print("%s: %s" % (name, ", ".join(values) or "true"), file=f) name = arg[2:] values = [] else: values.append(arg) def copy(self): cls = self.__class__ ret = cls.__new__(cls) ret.arg_parser = self.arg_parser ret.args = deepcopy(self.args) ret.original_values = deepcopy(self.original_values) ret.hyperparams = deepcopy(self.hyperparams) ret.iteration_hyperparams = deepcopy(self.iteration_hyperparams) ret.format = self.format ret.random = self.random ret._logger = self._logger ret._vocab = dict(self._vocab) ret.sub_configs = [] self.sub_configs.append(ret) return ret def args_str(self, args): return ["--" + ("no-" if v is False else "") + k.replace("_", "-") + ("" if v is False or v is True else (" " + str(" ".join(map(str, v)) if hasattr(v, "__iter__") and not isinstance(v, str) else v))) for (k, v) in sorted(args.items()) if v not in (None, (), "", self.arg_parser.get_default(k)) and not k.startswith("_") and (args.node_labels or ("node_label" not in k and "node_categor" not in k)) and (args.swap or "swap_" not in k) and (args.swap == COMPOUND or k != "max_swap") and (not args.require_connected or k != "orphan_label") and (args.classifier == SPARSE or k not in SPARSE_ARG_NAMES) and (args.classifier in NN_CLASSIFIERS or k not in NN_ARG_NAMES | DYNET_ARG_NAMES) and k != "passages"] def __str__(self): self.args.hyperparams = [HyperparamsInitializer(name, **args.vars()) for name, args in self.hyperparams.items()] return " ".join(list(self.args.passages) + self.args_str(self.args))
class VisionServer(object): _server_task_pool = [] _aux_tasks = [] def __init__(self, workers: int, token: str, chat_name: str, binary_path: str, driver_path: str, image_path: str): self._workers = workers # Fix trailing slash if not present if image_path[:-1] != '/': image_path += '/' self._log = Logger('VServer') self._aioloop = asyncio.get_event_loop() self._vkapi = VKAPIHandle(self._aioloop, token, chatname=chat_name) self._web = WebClient(binary_path=binary_path, driver_path=driver_path, image_path=image_path, timeout=WEBCLIENT_TIMEOUT) self._webclient_lock = asyncio.Lock() async def _execute_blocking(self, func, *args, **kwargs) -> Coroutine: """ Execute blocking function in separate thread Args: func: function object to be called *args: positional arguments **kwargs: keyword arguments Returns: Asyncio task corresponding to the created coroutine """ self._log.debug( f"Scheduling function {func.__name__}{args} call to separate thread" ) return await self._aioloop.run_in_executor(executor=None, func=functools.partial( func, *args, **kwargs)) def _queue_task(self, func, *args, **kwargs) -> Coroutine: """ Queue async task to be executed in the loop Args: func: function to be called *args: positional arguments **kwargs: keyword arguments Returns: asyncio task corresponding to the created coroutine """ self._log.debug( f"Scheduling function {func.__name__}{args} to loop queue") task = asyncio.ensure_future(func(*args, **kwargs)) self._aux_tasks.append(task) return task async def _process(self): """Main server coroutine""" try: async for message in self._vkapi.wait_for_messages(): link = find_link_br(message) if not link: continue # Nothing to do here self._log.info(f"Found link in message: {link}") message_task = self._queue_task( self._vkapi.send_msg, text=f"{EMOJI['process']} {link}") with (await self._webclient_lock): resolved_link = await self._execute_blocking( self._web.resolve, link) resolved_hash = hash_link(resolved_link) self._log.info(f"{link} -> {resolved_link} ({resolved_hash})") try: message_id = await message_task except VkCaptchaNeeded: self._log.error('Captcha kicked in, unable to proceed') raise RuntimeError('Captcha') if resolved_link is None: self._log.warn('Skipping link') self._queue_task(self._vkapi.edit_msg, msg_id=message_id, text=f"{EMOJI['timeout']} {link}") continue if hash_link(link) != resolved_hash: message_text = f"{EMOJI['processed']} {link} -> {resolved_link}" self._queue_task(self._vkapi.edit_msg, msg_id=message_id, text=message_text) else: message_text = f"{EMOJI['processed']} {link}" self._queue_task(self._vkapi.edit_msg, msg_id=message_id, text=message_text) with (await self._webclient_lock): screen_path = await self._execute_blocking( self._web.snap, resolved_link) if screen_path: photo_id = await self._vkapi.upload_photo(screen_path) self._queue_task(self._vkapi.edit_msg, msg_id=message_id, text=message_text, attachment=photo_id) except asyncio.CancelledError: self._log.info('Longpoll task cancelled') return def start(self): self._aioloop.run_until_complete( self._vkapi.register()) # API init subroutine try: self._server_task_pool = [ asyncio.ensure_future(self._process()) for _ in range(self._workers) ] self._aioloop.run_until_complete( asyncio.gather(*self._server_task_pool)) except KeyboardInterrupt: self._log.warn('Being shut down by keyboard interrupt or SIGINT') except RuntimeError: self._log.warn('Being shut down by server error') finally: # Send cancel exception to all server tasks for task in self._server_task_pool: task.cancel() # Collect pending coroutines and wait for them to finish pending_tasks = [ task for task in self._aux_tasks if not task.done() ] if len(pending_tasks) > 0: self._log.info( f"Waiting for {len(pending_tasks)} pending tasks to finish" ) self._aioloop.run_until_complete( asyncio.gather(*pending_tasks)) self._log.info('Pending tasks finished') # Wait for server tasks to wrap up self._aioloop.run_until_complete( asyncio.gather(*self._server_task_pool)) self._log.info('Stopping auxiliary services') self._vkapi.stop() self._web.stop() self._aioloop.close() self._log.warn('Server has been shut down!')