def __init__(self, width, height): 'Create an ascii art canvas with the given dimensions.' log.add_logging(self, 'canvas') #Log.set_tag_level('canvas', log.DEBUG) self.width = width self.height = height self._matrix = [ ' ' ] * (width * height)
def __init__(self, root_dir, artifact_manager, log_tag='package_manager'): log.add_logging(self, log_tag) check.check_artifact_manager(artifact_manager) self._root_dir = root_dir self._artifact_manager = artifact_manager self._database_path = path.join(self._root_dir, 'db/packages.db') self._db = None self._installation_dir = path.join(self._root_dir, 'stuff') self._env_dir = path.join(self._root_dir, 'env') self._lib_dir = path.join(self._installation_dir, 'lib') self._bin_dir = path.join(self._installation_dir, 'bin') self._include_dir = path.join(self._installation_dir, 'include') self._python_lib_dir = path.join(self._installation_dir, 'lib/python') self._share_dir = path.join(self._installation_dir, 'share') self._compile_instructions_dir = path.join(self._installation_dir, 'lib/rebuild_instructions') self._shell_framework_dir = path.join(self._env_dir, 'framework') self._shell_env = { os_env.LD_LIBRARY_PATH_VAR_NAME: self._lib_dir, 'PATH': self._bin_dir, 'PYTHONPATH': self._python_lib_dir, 'PKG_CONFIG_PATH': pkg_config.make_pkg_config_path_for_unix_env( self._installation_dir) }
def __init__(self, options = 0): log.add_logging(self, tag = 'string_list_parser') self._options = options self.STATE_EXPECTING_STRING = _state_expecting_string(self) self.STATE_DONE = _state_done(self) self.state = self.STATE_EXPECTING_STRING
def __init__(self, filename, log_tag = None): log.add_logging(self, tag = log_tag or 'sqlite') self.log_i('sqlite(filename=%s)' % (filename)) self._filename = filename if self._filename != ':memory:': file_util.ensure_file_dir(self._filename) self._connection = sqlite3.connect(self._filename) self._cursor = self._connection.cursor()
def __init__(self, tag, level = 'debug', disabled = False): log.add_logging(self, tag) self._disabled = disabled if self._disabled: return self._level = log.parse_level(level) self.starts = OrderedDict() self.starts['start'] = time.time()
def __init__(self): log.add_logging(self, tag = 'line_continuation_merger') self.STATE_CONTINUATION = _state_continuation(self) self.STATE_DONE = _state_done(self) self.STATE_EXPECTING_LINE = _state_expecting_line(self) self.state = self.STATE_EXPECTING_LINE self._buffer = None self._blank_buffer = None
def __init__(self, handler): assert handler super(AsyncHandler, self).__init__() log.add_logging(self, 'async_handler') self._handler = handler self.setName(self.bes_log_tag__) self._running = False self._running_lock = Lock() self._queue = Queue() self.daemon = True
def __init__(self, location, niceness_level = None, timeout = None, deleter = None): log.add_logging(self, 'file_trash') niceness_level = niceness_level or self.DEFAULT_NICENESS timeout = timeout or self.DEFAULT_TIMEOUT deleter = deleter or fast_deleter() file_util.mkdir(location) assert path.isdir(location) self._location = location self._location_device_id = file_util.device_id(self._location) self.trash_process = trash_process(self._location, niceness_level, timeout, deleter)
def __init__(self, location, niceness_level, timeout, deleter): log.add_logging(self, 'trash_process') self.log_i('trash_process init with location=%s niceness_level=%s timeout=%s' % (location, niceness_level, timeout)) assert path.isdir(location) self._location = location self._niceness_level = niceness_level self._timeout = timeout self._location_lock = Lock() self._process = None self._queue = Queue()
def __init__(self, fd, callback = None): assert fd >= 0 super(ReaderThread, self).__init__() log.add_logging(self, 'reader_thread') self.setName(self.bes_log_tag__) self._select = InterruptibleSelect(fd) self.response_queue = Queue() self._running = False self._running_lock = Lock() self.daemon = True self._callback = callback
def __init__(self, log_tag, options): log.add_logging(self, tag = log_tag) self._options = options or self.DEFAULT_OPTIONS self._keep_quotes = (self._options & self.KEEP_QUOTES) != 0 self._escape_quotes = (self._options & self.ESCAPE_QUOTES) != 0 self._ignore_comments = (self._options & self.IGNORE_COMMENTS) != 0 self._buffer = None self._is_escaping = False self._last_char = None self.STATE_BEGIN = string_lexer_state_begin(self) self.STATE_DONE = string_lexer_state_done(self) self.STATE_STRING = string_lexer_state_string(self) self.STATE_SPACE = string_lexer_state_space(self) self.STATE_SINGLE_QUOTED_STRING = string_lexer_state_single_quoted_string(self) self.STATE_DOUBLE_QUOTED_STRING = string_lexer_state_double_quoted_string(self) self.STATE_COMMENT = string_lexer_state_comment(self) self.state = self.STATE_BEGIN
def __init__(self): log.add_logging(self, 'app') self._controller = web_server_controller(sample_web_server)
success = False clazz._lock.acquire() if not clazz._pool: clazz._num_threads = num_threads success = True clazz._lock.release() if not success: raise RuntimeError('Global thread pool is already running. Call set_num_threads() before add_task()') @classmethod def add_task(clazz, func, *args, **kargs): 'Add a task to the global thread pool.' clazz._lock.acquire() if not clazz._pool: clazz._pool = clazz.__start_global_thread_pool_i(clazz._num_threads) clazz._lock.release() clazz._pool.add_task(func, *args, **kargs) @classmethod def __start_global_thread_pool_i(clazz, num_threads): clazz.log_d('Starting global thread pool with %d threads.' % (num_threads)) gtp = thread_pool(num_threads = num_threads) def __global_thread_pool_atexit_cleanup(thread_pool): thread_pool.log_d('__global_thread_pool_atexit_cleanup(%s) waiting...' % (thread_pool)) thread_pool.wait_completion() thread_pool.log_d('__global_thread_pool_atexit_cleanup(%s) done waiting...' % (thread_pool)) atexit.register(__global_thread_pool_atexit_cleanup, gtp) return gtp log.add_logging(global_thread_pool, 'global_thread_pool')
def __init__(self, sleep_time): log.add_logging(self, 'slow_deleter') self._sleep_time = sleep_time
def __init__(self): log.add_logging(self, 'requirement_manager') self._descriptor_map = {} self._is_tool_set = set()
result = clazz.find(root_dir, relative = relative, min_depth = min_depth, max_depth = max_depth, file_type = file_type) if not patterns: return result return file_match.match_fnmatch(result, patterns, match_type) @classmethod def find_re(clazz, root_dir, expressions, match_type, relative = True, min_depth = None, max_depth = None, file_type = FILE): assert expressions assert match_type assert key result = clazz.find(root_dir, relative = relative, min_depth = min_depth, max_depth = max_depth, file_type = file_type) if not expressions: return result return file_match.match_re(result, expressions, match_type) @classmethod def find_dirs(clazz, root_dir, relative = True, min_depth = None, max_depth = None): return clazz.find(root_dir, relative = relative, min_depth = min_depth, max_depth = max_depth, file_type = clazz.DIR) log.add_logging(find, 'find')
def __init__(self): log.add_logging(self, 'fast_deleter')
def __init__(self): super(Scheduler, self).__init__() log.add_logging(self, 'scheduler')
def __init__(self, tag): log.add_logging(self, tag) build_blurb.add_blurb(self, tag) self._steps = [] self._tag = tag self._stop_step = None
def __init__(self, parser): self.name = self.__class__.__name__[1:] log.add_logging(self, tag = '%s.%s' % (parser.__class__.__name__, self.name)) self.parser = parser
def __init__(self, recipe, build_target, env): log.add_logging(self, 'rebuild') build_blurb.add_blurb(self, 'rebuild') check.check_recipe(recipe) check.check_build_target(build_target) self.env = env self.timer = self.env.config.timer self.recipe = recipe self.build_target = build_target self.enabled = self.build_target.parse_expression(recipe.enabled) self.recipe_dir = path.dirname(self.filename) self._step_manager = step_manager('rebuild') self.working_dir = self._make_working_dir( self.env.config.builds_dir(self.build_target), self.descriptor.full_name, self.env.config.timestamp) self.source_unpacked_dir = path.join(self.working_dir, 'source') self.build_dir = path.join(self.working_dir, 'build') self.stage_dir = path.join(self.working_dir, 'stage') self.staged_files_dir = path.join(self.stage_dir, 'files') self.staged_files_lib_dir = path.join(self.staged_files_dir, 'lib') self.staged_files_bin_dir = path.join(self.staged_files_dir, 'bin') self.staged_files_instructions_dir = path.join( self.staged_files_lib_dir, 'rebuild_instructions') self.stagged_env_dir = path.join(self.stage_dir, 'env') self.artifact_dir = path.join(self.working_dir, 'artifact') self.logs_dir = path.join(self.working_dir, 'logs') self.test_dir = path.join(self.working_dir, 'test') self.check_dir = path.join(self.working_dir, 'check') self.temp_dir = path.join(self.working_dir, 'temp') self.python_lib_dir = path.join(self.staged_files_dir, 'lib/python') self.requirements_manager = package_manager( path.join(self.working_dir, 'requirements'), env.artifact_manager) self.substitutions = { 'REBUILD_BUILD_DIR': self.build_dir, 'REBUILD_PACKAGE_DESCRIPTION': self.descriptor.name, 'REBUILD_PACKAGE_FULL_NAME': self.descriptor.full_name, 'REBUILD_PACKAGE_FULL_VERSION': str(self.descriptor.version), 'REBUILD_PACKAGE_NAME': self.descriptor.name, 'REBUILD_PACKAGE_UPSTREAM_VERSION': self.descriptor.version.upstream_version, 'REBUILD_PYTHON_PLATFORM_NAME': self.build_target.system, 'REBUILD_RECIPE_DIR': path.abspath(self.recipe_dir), 'REBUILD_REQUIREMENTS_BIN_DIR': self.requirements_manager.bin_dir, 'REBUILD_REQUIREMENTS_DIR': self.requirements_manager.installation_dir, 'REBUILD_REQUIREMENTS_INCLUDE_DIR': self.requirements_manager.include_dir, 'REBUILD_REQUIREMENTS_LIB_DIR': self.requirements_manager.lib_dir, 'REBUILD_REQUIREMENTS_SHARE_DIR': self.requirements_manager.share_dir, 'REBUILD_SOURCE_UNPACKED_DIR': self.source_unpacked_dir, 'REBUILD_STAGE_FRAMEWORKS_DIR': path.join(self.staged_files_dir, 'frameworks'), 'REBUILD_STAGE_PREFIX_DIR': self.staged_files_dir, 'REBUILD_STAGE_PYTHON_LIB_DIR': self.python_lib_dir, 'REBUILD_TEMP_DIR': self.temp_dir, 'REBUILD_TEST_DIR': self.test_dir, } self._add_steps()
def tag(self, tag): log.add_logging(self, tag) build_blurb.add_blurb(self, tag) self.on_tag_changed()
clazz._eject(mnt.mount_point) return files @classmethod def extract(clazz, dmg, dst_dir): file_check.check_file(dmg) file_util.mkdir(dst_dir) mnt = clazz._mount_at_temp_dir(dmg) tar_util.copy_tree_with_tar(mnt.mount_point, dst_dir) clazz._eject(mnt.mount_point) @classmethod def _mount_at_temp_dir(clazz, dmg): file_check.check_file(dmg) tmp_dir = temp_file.make_temp_dir() rv = clazz._execute_cmd('hdiutil', 'attach', '-mountpoint', tmp_dir, '-plist', dmg) entries = plistlib_loads(rv.stdout.encode('utf-8')) return clazz.mount_info(dmg, tmp_dir, entries.get('system-entities', [])) @classmethod def _eject(clazz, mount_point): clazz._execute_cmd('hdiutil', 'eject', mount_point) @classmethod def _execute_cmd(clazz, *args): cmd = ' '.join(args) clazz.log_i('executing: "%s"' % (cmd)) return execute.execute(cmd) log.add_logging(dmg, 'dmg')
def __init__(self, lexer): self.name = self.__class__.__name__[1:] log.add_logging(self, tag = '%s.%s' % (lexer.__class__.__name__, self.name)) self.lexer = lexer
@classmethod def device_id(clazz, filename): return os.stat(filename).st_dev # https://stackoverflow.com/questions/1094841/reusable-library-to-get-human-readable-version-of-file-size @classmethod def sizeof_fmt(clazz, num, suffix='B'): for unit in ['','Ki','Mi','Gi','Ti','Pi','Ei','Zi']: if abs(num) < 1024.0: return "%3.1f%s%s" % (num, unit, suffix) num /= 1024.0 return "%.1f%s%s" % (num, 'Yi', suffix) # https://stackoverflow.com/questions/1131220/get-md5-hash-of-big-files-in-python @classmethod def checksum(clazz, function_name, filename, chunk_size = 1024 * 1204): hasher = hashlib.new(function_name) with open(filename, 'rb') as fin: for chunk in iter(lambda: fin.read(chunk_size), b''): hasher.update(chunk) return hasher.hexdigest() @classmethod def relocate_file(clazz, filename, dst_dir): new_filename = path.join(dst_dir, path.basename(filename)) file_util.rename(filename, new_filename) return new_filename log.add_logging(file_util, 'file_util')
result = clazz.find(root_dir, relative = relative, min_depth = min_depth, max_depth = max_depth, file_type = file_type) if not patterns: return result return file_match.match_fnmatch(result, patterns, match_type) @classmethod def find_re(clazz, root_dir, expressions, match_type, relative = True, min_depth = None, max_depth = None, file_type = FILE): assert expressions assert match_type assert key result = clazz.find(root_dir, relative = relative, min_depth = min_depth, max_depth = max_depth, file_type = file_type) if not expressions: return result return file_match.match_re(result, expressions, match_type) @classmethod def find_dirs(clazz, root_dir, relative = True, min_depth = None, max_depth = None): return clazz.find(root_dir, relative = relative, min_depth = min_depth, max_depth = max_depth, file_type = clazz.DIR) log.add_logging(file_find2, 'file_find2')
prefix = prefix or clazz._DEFAULT_PREFIX suffix = suffix or clazz._DEFAULT_DIR_SUFFIX if dir and not path.isdir(dir): file_util.mkdir(dir) tmp_dir = tempfile.mkdtemp(prefix = prefix, suffix = suffix, dir = dir) assert path.isdir(tmp_dir) if items: clazz.write_temp_files(tmp_dir, items) if delete: clazz.atexit_delete(tmp_dir) return tmp_dir @classmethod def atexit_delete(clazz, filename): 'Delete filename atexit time.' def _delete_file(*args, **kargs): filename = args[0] clazz.log_d('Removing %s atexit time.' % (filename)) file_util.remove(filename) atexit.register(_delete_file, [ filename ]) @classmethod def write_temp_files(clazz, root_dir, items): 'Write a sequence of temp files specified by items.' for item in items: item.write(root_dir) log.add_logging(temp_file, 'temp_file') #log.configure('temp_file=debug')
self.tasks = tasks self.daemon = True self.start() self.log_d('Started worker %s for thread pool' % (self)) def run(self): while True: func, args, kargs = self.tasks.get() self.log_d('Executing task %s(%s, %s)' % (func, str(args), str(kargs))) try: func(*args, **kargs) except Exception as ex: self.log_exception(ex) self.tasks.task_done() self.log_d('Done executing task %s' % (func)) log.add_logging(thread_pool_worker, 'worker') class thread_pool(object): """Pool of threads consuming tasks from a queue""" def __init__(self, num_threads): super(thread_pool, self).__init__() self.log_d('Creating thread_pool with %d threads.' % (num_threads)) self.tasks = Queue(num_threads) for _ in range(num_threads): thread_pool_worker(self.tasks) def add_task(self, func, *args, **kargs): """Add a task to the queue""" self.log_d('add_task(%s)' % (func)) self.tasks.put((func, args, kargs))
return (name, description) @classmethod def _call_pkg_config(clazz, args, PKG_CONFIG_LIBDIR=[], PKG_CONFIG_PATH=[]): check.check_string_seq(PKG_CONFIG_PATH) cmd = [clazz._PKG_CONFIG_EXE] + object_util.listify(args) env = { 'PKG_CONFIG_DEBUG_SPEW': '1', 'PKG_CONFIG_LIBDIR': ':'.join(PKG_CONFIG_LIBDIR), 'PKG_CONFIG_PATH': ':'.join(PKG_CONFIG_PATH), # 'PATH': os_env_var('PATH').value, } for p in PKG_CONFIG_PATH: file_util.mkdir(p) #build_blurb.blurb_verbose('pkg_config', '_call_pkg_config() cmd=%s' % (str(cmd))) #print('pkg_config', '_call_pkg_config() cmd=%s; env=%s' % (str(cmd), str(env))) #print('pkg_config', '_call_pkg_config() cmd=%s' % (str(cmd))) rv = execute.execute(cmd, env=env) return rv @classmethod def _parse_flags(clazz, s): flags = string_util.split_by_white_space(s) return algorithm.unique([flag.strip() for flag in flags]) log.add_logging(pkg_config, 'pkg_config')
def __init__(self): log.add_logging(self, 'foo') def log(self): pass
def __init__(self): log.add_logging(self, 'bar') log = 666
def __init__(self): log.add_logging(self, 'artifact_manager') build_blurb.add_blurb(self, 'artifact_manager') self._reset_requirement_managers() self._read_only = False self._timer = debug_timer('am', 'error', disabled = True)
def __init__(self): log.add_logging(self, 'remanage') self.parser = argparse.ArgumentParser() commands_subparser = self.parser.add_subparsers(help='commands', dest='command') # A tool command self.tools_parser = commands_subparser.add_parser('tools', help='Tools') self.tools_subparsers = self.tools_parser.add_subparsers( help='tools_commands', dest='subcommand') self.tools_subparsers.add_parser('update', help='Update installed tools') self.tools_subparsers.add_parser('print', help='Print installed tools') install_parser = self.tools_subparsers.add_parser('install', help='Install tools') install_parser.add_argument('dest_dir', action='store', default=None, help='Destination directory [ cwd ]') # packages self.packages_parser = commands_subparser.add_parser('packages', help='Packages') self.packages_subparsers = self.packages_parser.add_subparsers( help='packages_commands', dest='subcommand') self.packages_subparsers.add_parser('update', help='Update installed packages') self.packages_subparsers.add_parser('print', help='Print installed packages') # packages.install install_parser = self.packages_subparsers.add_parser( 'install', help='Install packages') self._packages_add_common_args(install_parser) install_parser.add_argument( '--wipe', '-w', action='store_true', default=False, help='Wipe the stuff dir before installing [ False ]') install_parser.add_argument('dest_dir', action='store', default=None, help='Destination directory [ None ]') install_parser.add_argument('project_name', action='store', default=None, help='Name of project [ None ]') install_parser.add_argument('packages', action='store', default=None, nargs='+', help='Packages to install [ None ]') # packages.uninstall uninstall_parser = self.packages_subparsers.add_parser( 'uninstall', help='Uninstall packages') self._packages_add_common_args(uninstall_parser) uninstall_parser.add_argument('dest_dir', action='store', default=None, help='Destination directory [ None ]') uninstall_parser.add_argument('project_name', action='store', default=None, help='Name of project [ None ]') uninstall_parser.add_argument('packages', action='store', default=None, nargs='+', help='Packages to uninstall [ None ]') # packages.update update_parser = self.packages_subparsers.add_parser( 'update', help='Update packages') self._packages_add_common_args(update_parser) update_parser.add_argument( '--wipe', '-w', action='store_true', default=False, help='Wipe the installation dir before updating [ False ]') update_parser.add_argument( '--downgrade', action='store_true', default=False, help='Allow downgrade of packages [ False ]') update_parser.add_argument( '--force', action='store_true', default=False, help= 'Force update of packages even if the version is the same [ False ]' ) update_parser.add_argument( '--dont-touch-update-script', action='store_true', default=False, help= 'Dont touch the update.sh script when done updating packages. [ False ]' ) update_parser.add_argument('project_name', action='store', default=None, nargs='?', help='Project name [ None ]') # packages.print print_parser = self.packages_subparsers.add_parser( 'print', help='Print packages') self._packages_add_common_args(print_parser) print_parser.add_argument('project_name', action='store', default=None, nargs='?', help='Project name [ None ]') # package self.package_parser = commands_subparser.add_parser('package', help='package') self.package_subparsers = self.package_parser.add_subparsers( help='package_commands', dest='subcommand') # package.files package_files_parser = self.package_subparsers.add_parser( 'files', help='List files in package') package_files_parser.add_argument('package', action='store', help='package to list files for') # package.info package_info_parser = self.package_subparsers.add_parser( 'info', help='List info in package') package_info_parser.add_argument('package', action='store', help='package to list info for') # package.metadata package_metadata_parser = self.package_subparsers.add_parser( 'metadata', help='List metadata in package') package_metadata_parser.add_argument( 'package', action='store', help='package to list metadata for') # config self.config_parser = commands_subparser.add_parser('config', help='Config') self.config_subparsers = self.config_parser.add_subparsers( help='config_commands', dest='subcommand') # config:packages packages_parser = self.config_subparsers.add_parser( 'packages', help='Print information about config packages.') self._packages_add_common_args(packages_parser) packages_parser.add_argument('project_name', action='store', default=None, help='Name of project [ None ]') # config:projects projects_parser = self.config_subparsers.add_parser( 'projects', help='Print information about config projects.') self._packages_add_common_args(projects_parser) # test self.test_parser = commands_subparser.add_parser('test', help='Test') self.test_parser.add_argument('-v', '--verbose', action='store_true') self.test_parser.add_argument( '-l', '--level', action='store', type=str, default='release', help='Build level. One of (%s) [ release ]' % (','.join(build_level.LEVELS))) self.test_parser.add_argument( '--tmp-dir', action='store', default=None, help= 'Temporary directory to use or a random one if not given. [ None ]' ) self.test_parser.add_argument( 'artifacts_dir', action='store', default=None, type=str, help='The place to locate artifacts [ ~/artifacts ]') self.test_parser.add_argument( 'tools_dir', action='store', default=None, type=str, help='The place to locate tools [ ~/tools ]') self.test_parser.add_argument( 'package_tarball', action='store', default=None, type=str, help='The tarball of the package to test [ None ]') self.test_parser.add_argument('test', action='store', type=str, help='The test(s) to run [ None ]')
def __init__(self, server_class): log.add_logging(self, 'web_server_controller') self._server_class = server_class self._server = None self.address = None