def _server_process(self): def _handler(environ, start_response): self.log_d('_handler: id={} self={} _fail_next_request={}'.format( id(self), self, self._fail_next_request)) self.headers = self._get_headers(environ) self.log_d('headers={}'.format(self.headers)) auth = self._decode_auth(environ) if not self._validate_auth(auth): return self.response_error(start_response, 403) if self._fail_next_request.value != 0: code = self._fail_next_request.value self._fail_next_request.value = 0 return self.response_error(start_response, code) result = self.handle_request(environ, start_response) return result log.add_logging(self, 'web_server') httpd = simple_server.make_server('', self._requested_port or 0, _handler, handler_class=self.handler) httpd.allow_reuse_address = True address = httpd.socket.getsockname() self.log_i('notifying that port is known') self._port_queue.put(address) self.log_i('calling serve_forever()') httpd.serve_forever()
def __init__(self, log_tag, options): log.add_logging(self, tag=log_tag) self._options = options or string_lexer_options.DEFAULT_OPTIONS self._keep_quotes = (self._options & string_lexer_options.KEEP_QUOTES) != 0 self._escape_quotes = (self._options & string_lexer_options.ESCAPE_QUOTES) != 0 self._ignore_comments = (self._options & string_lexer_options.IGNORE_COMMENTS) != 0 self._buffer = None self._is_escaping = False self._last_char = None self.STATE_BEGIN = string_lexer_state_begin(self) self.STATE_DONE = string_lexer_state_done(self) self.STATE_STRING = string_lexer_state_string(self) self.STATE_SPACE = string_lexer_state_space(self) self.STATE_SINGLE_QUOTED_STRING = string_lexer_state_single_quoted_string( self) self.STATE_DOUBLE_QUOTED_STRING = string_lexer_state_double_quoted_string( self) self.STATE_COMMENT = string_lexer_state_comment(self) self.state = self.STATE_BEGIN
def __init__(self, width, height): 'Create an ascii art canvas with the given dimensions.' log.add_logging(self, 'canvas') #Log.set_tag_level('canvas', log.DEBUG) self.width = width self.height = height self._matrix = [' '] * (width * height)
def __init__(self, tag, level='debug', disabled=False): log.add_logging(self, tag) self._disabled = disabled if self._disabled: return self._level = log.parse_level(level) self.starts = OrderedDict() self.starts['start'] = time.time()
def __init__(self): log.add_logging(self, tag = 'line_continuation_merger') self.STATE_CONTINUATION = _state_continuation(self) self.STATE_DONE = _state_done(self) self.STATE_EXPECTING_LINE = _state_expecting_line(self) self.state = self.STATE_EXPECTING_LINE self._buffer = None self._blank_buffer = None
def __init__(self, port=None, log_tag=None, users=None): log.add_logging(self, tag=log_tag or 'web_server') self.log_i('web_server(id={} self={}, port={})'.format( id(self), self, port)) self._users = users or {} self._requested_port = port self.address = None self._process = None self._port_queue = multiprocessing.Queue() self._fail_next_request = multiprocessing.Value('i', 0)
def __init__(self, handler): assert handler super(AsyncHandler, self).__init__() log.add_logging(self, 'async_handler') self._handler = handler self.setName(self.bes_log_tag__) self._running = False self._running_lock = Lock() self._queue = Queue() self.daemon = True
def __init__(self, fd, callback=None): assert fd >= 0 super(ReaderThread, self).__init__() log.add_logging(self, 'reader_thread') self.setName(self.bes_log_tag__) self._select = InterruptibleSelect(fd) self.response_queue = Queue() self._running = False self._running_lock = Lock() self.daemon = True self._callback = callback
def __init__(self, location, niceness_level, timeout, deleter): log.add_logging(self, 'trash_process') self.log_i( 'trash_process init with location=%s niceness_level=%s timeout=%s' % (location, niceness_level, timeout)) assert path.isdir(location) self._location = location self._niceness_level = niceness_level self._timeout = timeout self._location_lock = Lock() self._process = None self._queue = Queue()
def __init__(self, log_tag): log.add_logging(self, tag=log_tag) self._buffer = None self.STATE_BEGIN = _state_begin(self) self.STATE_DONE = _state_done(self) self.STATE_NUMBER = _state_number(self) self.STATE_PUNCTUATION = _state_punctuation(self) self.STATE_TEXT = _state_text(self) self.state = self.STATE_BEGIN
def __init__(self, log_tag): log.add_logging(self, tag = log_tag) self._buffer = None self.STATE_BEGIN = _state_begin(self) self.STATE_DONE = _state_done(self) self.STATE_PART = _state_part(self) self.STATE_PART_DELIMITER = _state_part_delimiter(self) self.STATE_PUNCTUATION = _state_punctuation(self) self.STATE_TEXT = _state_text(self) self.state = self.STATE_BEGIN
def __init__(self, location, niceness_level=None, timeout=None, deleter=None): log.add_logging(self, 'file_trash') niceness_level = niceness_level or self.DEFAULT_NICENESS timeout = timeout or self.DEFAULT_TIMEOUT deleter = deleter or fast_deleter() file_util.mkdir(location) assert path.isdir(location) self._location = location self._location_device_id = file_util.device_id(self._location) self.trash_process = trash_process(self._location, niceness_level, timeout, deleter)
def __init__(self, filename, log_tag = None, factory = None): factory = factory or sqlite3.Connection log.add_logging(self, tag = log_tag or 'sqlite') self.log_i('sqlite(filename=%s)' % (filename)) self._filename = filename if self._filename != ':memory:': file_util.ensure_file_dir(self._filename) self._filename_log_label = path.basename(self._filename) self._connection = sqlite3.connect(self._filename, isolation_level = 'IMMEDIATE', factory = factory, detect_types = sqlite3.PARSE_DECLTYPES) self._cursor = self._connection.cursor()
def __init__(self, name): check.check_string(name) self.name = name log.add_logging(self, self.name) blurb.add_blurb(self, self.name) self.parser = argparse.ArgumentParser() from bes.cli.cli_help_cli_args import cli_help_cli_args cli_help_cli_args.parser = self.parser self.commands_subparser = self.parser.add_subparsers( help='commands', dest='__bes_command_group__') command_groups = cli_command_list(self.command_group_list()) command_groups.sort(key=lambda item: item.name) commands = cli_command_list(self.command_list()) commands.sort(key=lambda item: item.name) all_handlers = cli_command_list() all_handlers.extend(command_groups) all_handlers.extend(commands) dups = all_handlers.duplicate_handlers() if dups: raise RuntimeError('duplicate handlers found:\n{}\n'.format( pprint.pformat(dups))) handler_class_name = '{}_handler_superclass'.format(name) handler_class = all_handlers.make_handler_superclass( handler_class_name) self.handler_object = handler_class() for command_group in command_groups: self._add_command_group(self.commands_subparser, command_group.name, command_group.add_args_function, command_group.description) for command in commands: self._add_command(self.commands_subparser, command.name, command.add_args_function, command.description)
def __init__(self, what, delimiter=line_break.DEFAULT_LINE_BREAK, starting_line_number=None): log.add_logging(self, 'text_line_parser') self._line_break = line_break.DEFAULT_LINE_BREAK if isinstance(what, text_line_parser): self._assign_text_line_seq(what._lines, starting_line_number) self._ends_with_line_break = False elif check.is_text_line_seq(what): self._assign_text_line_seq(what, starting_line_number) elif check.is_seq(what, tuple): lines = [text_line(*line) for line in what] self._assign_text_line_seq(lines, starting_line_number) else: check.check_string(what) self._line_break = line_break.guess_line_break( what) or line_break.DEFAULT_LINE_BREAK self._lines = self._parse(what, starting_line_number) self._ends_with_line_break = what and line_break.ends_with_line_break( what)
def __init__(self, parser): self.name = self.__class__.__name__[1:] log.add_logging(self, tag = '%s.%s' % (parser.__class__.__name__, self.name)) self.parser = parser
def __init__(self): log.add_logging(self, 'requirement_manager') self._descriptor_map = {} self._is_tool_set = set()
def __init__(self): log.add_logging(self, 'foo') log.configure('format=very_brief')
def __init__(self, lexer): self.name = self.__class__.__name__[1:] log.add_logging(self, tag='%s.%s' % (lexer.__class__.__name__, self.name)) self.lexer = lexer
def __init__(self): log.add_logging(self, 'foo') def log(self): pass
root_dir = root_dir or path.expanduser('~/') return path.join(root_dir, clazz.FILENAME) @classmethod def load(clazz, root_dir=None): p = clazz._make_filepath(root_dir=root_dir) return clazz.read_file(p) @classmethod def has_gradle_properties(clazz, root_dir=None): p = clazz._make_filepath(root_dir=root_dir) return path.isfile(p) # @classmethod # def credentials(clazz, username_key, root_dir = None): # props = clazz.load(root_dir = root_dir) # username = props.get('systemProp.gradle.wrapperUser', None) # password = props.get('systemProp.gradle.wrapperPassword', None) # return self._credentials(username, password) # @classmethod # def username(clazz, root_dir = None): # return self.credentials(root_dir = root_dir).username # @classmethod # def password(clazz, root_dir = None): # return self.credentials(root_dir = root_dir).password log.add_logging(gradle_properties, 'gradle_properties')
def __init__(self, sleep_time): log.add_logging(self, 'slow_deleter') self._sleep_time = sleep_time
def __init__(self): log.add_logging(self, 'bar') log = 666
# Delete files that went away clazz.log_d('SRC: %s' % (src_dir)) for x in src_files: clazz.log_d(' %s' % (x)) clazz.log_d('DST: %s' % (dst_dir)) for x in dst_files: clazz.log_d(' %s' % (x)) for dst_file in dst_files: if not dst_file in src_files: file_util.remove(path.join(dst_dir, dst_file)) # Either copy new files or check a checksum and update them. # The reason for the checksum is we want to leave the mtime alone of the content didnt change for src_file in src_files: src_file_path = path.join(src_dir, src_file) dst_file_path = path.join(dst_dir, src_file) should_copy = False if path.isfile(dst_file_path): should_copy = file_util.checksum('sha1', src_file_path) != file_util.checksum('sha1', dst_file_path) should_copy_mode = file_util.mode(src_file_path) != file_util.mode(dst_file_path) else: should_copy = True should_copy_mode = True if should_copy: file_util.copy(src_file_path, dst_file_path) if should_copy_mode: file_util.copy_mode(src_file_path, dst_file_path) log.add_logging(file_sync, 'file_sync')
def __init__(self): log.add_logging(self, 'fast_deleter')
success = False clazz._lock.acquire() if not clazz._pool: clazz._num_threads = num_threads success = True clazz._lock.release() if not success: raise RuntimeError('Global thread pool is already running. Call set_num_threads() before add_task()') @classmethod def add_task(clazz, func, *args, **kargs): 'Add a task to the global thread pool.' clazz._lock.acquire() if not clazz._pool: clazz._pool = clazz.__start_global_thread_pool_i(clazz._num_threads) clazz._lock.release() clazz._pool.add_task(func, *args, **kargs) @classmethod def __start_global_thread_pool_i(clazz, num_threads): clazz.log_d('Starting global thread pool with %d threads.' % (num_threads)) gtp = thread_pool(num_threads = num_threads) def __global_thread_pool_atexit_cleanup(thread_pool): thread_pool.log_d('__global_thread_pool_atexit_cleanup(%s) waiting...' % (thread_pool)) thread_pool.wait_completion() thread_pool.log_d('__global_thread_pool_atexit_cleanup(%s) done waiting...' % (thread_pool)) atexit.register(__global_thread_pool_atexit_cleanup, gtp) return gtp log.add_logging(global_thread_pool, 'global_thread_pool')
def __init__(self, request, client_address, server): log.add_logging(self, 'web_server') simple_server.WSGIRequestHandler.__init__(self, request, client_address, server)
def __init__(self, server_class): log.add_logging(self, 'web_server_controller') self._server_class = server_class self._server = None self.address = None
def __init__(self, server_class): log.add_logging(self, 'web_server') self._controller = web_server_controller(server_class)
result = clazz.find(root_dir, relative = relative, min_depth = min_depth, max_depth = max_depth, file_type = file_type) if not patterns: return result return file_match.match_fnmatch(result, patterns, match_type) @classmethod def find_re(clazz, root_dir, expressions, match_type, relative = True, min_depth = None, max_depth = None, file_type = FILE): assert expressions assert match_type assert key result = clazz.find(root_dir, relative = relative, min_depth = min_depth, max_depth = max_depth, file_type = file_type) if not expressions: return result return file_match.match_re(result, expressions, match_type) @classmethod def find_dirs(clazz, root_dir, relative = True, min_depth = None, max_depth = None): return clazz.find(root_dir, relative = relative, min_depth = min_depth, max_depth = max_depth, file_type = clazz.DIR) log.add_logging(file_find2, 'file_find2')