def test_register_class(self): class foo(object): pass C.register_class(foo, 'foo') C.check_foo(foo()) with self.assertRaises(TypeError) as context: C.check_foo(6)
def __new__(meta, name, bases, class_dict): clazz = type.__new__(meta, name, bases, class_dict) if hasattr(clazz, '_ENUM'): raise RuntimeError('subclassing %s not allowed.' % (bases[-1])) e = enum_loader.load(clazz) if e: clazz._ENUM = e check.register_class(clazz, name = name, cast_func = _enum_meta_class._check_cast_func) class constants(object): pass for n in clazz._ENUM.name_values: setattr(constants, n.name, n.value) clazz.CONSTANTS = constants return clazz
class temp_item(namedtuple('temp_item', 'filename, content, mode')): 'Description of an temp item.' def __new__(clazz, filename, content = None, mode = None): return clazz.__bases__[0].__new__(clazz, filename, content, mode) def write(self, root_dir): p = path.join(root_dir, self.filename) if path.isfile(self.content): content = file_util.read(self.content) else: content = self.content file_util.save(p, content = content, mode = self.mode) check.register_class(temp_item) class temp_file(object): _DEFAULT_PREFIX = file_util.remove_extension(path.basename(sys.argv[0])) + '-tmp-' _DEFAULT_SUFFIX = '' _DEFAULT_DIR_SUFFIX = '.dir' @classmethod def make_temp_file(clazz, content = None, prefix = None, suffix = None, dir = None, mode = 'w+b', delete = True): 'Write content to a temporary file. Returns the file object.' prefix = prefix or clazz._DEFAULT_PREFIX suffix = suffix or clazz._DEFAULT_SUFFIX if dir and not path.isdir(dir): file_util.mkdir(dir) tmp = tempfile.NamedTemporaryFile(prefix = prefix,
#-*- coding:utf-8; mode:python; indent-tabs-mode: nil; c-basic-offset: 2; tab-width: 2 -*- from collections import namedtuple from bes.common import check class value_origin(namedtuple('value_origin', 'filename, line_number, text')): 'Class to keep track of the origin of a value. Mostly useful for error reporting.' def __new__(clazz, filename, line_number, text): check.check_string(filename) check.check_int(line_number) check.check_string(text) return clazz.__bases__[0].__new__(clazz, filename, line_number, text) def __str__(self): return '%s:%s' % (self.filename, self.line_number) def __repr__(self): return str(self) check.register_class(value_origin)
else: data_str = str(self.data) buf.write(data_str) buf.write('\n') for child in self.children: buf.write(child.to_string(depth + indent, data_func = data_func)) return buf.getvalue() flat_result = namedtuple('flat_result', 'path, node') @classmethod def _flatten(clazz, result, stack, n): stack.append(n) if not n.children: p = [ n.data for n in stack] result.append(clazz.flat_result(p, n)) stack.pop() else: for child in n.children: clazz._flatten(result, stack, child) stack.pop() def flat_paths(self): 'Return a list of ( path, node ) tuples for all leaf nodes.' stack = [] result = [] self._flatten(result, stack, self) return sorted(result, key = lambda x: ( x.path.count('/'), x.path )) check.register_class(node)
from abc import abstractmethod, ABCMeta from bes.system.compat import with_metaclass from bes.common import check class dependency_provider(with_metaclass(ABCMeta, object)): def __init__(self): pass @abstractmethod def provided(self): 'Return a list of dependencies provided by this provider.' pass @classmethod def determine_provided(clazz, o): 'Determine the list of dependencies provided by o if it is a single or list of dependency provider(s).' if check.is_dependency_provider(o): return o.provided() elif check.is_dependency_provider_seq(o): #assert False result = [] for item in o: result.extend(item.provided()) return result else: return [] check.register_class(dependency_provider)
assert len(self.values) == 1 assert self.values[0].mask == None spaces = depth * indent * ' ' buf = StringIO() buf.write(spaces) buf.write(self.key) buf.write(':') buf.write(' ') buf.write(self.values[0].value_to_string()) return buf.getvalue() def _to_string_multi_line(self, depth, indent): assert len(self.values) > 0 spaces = depth * indent * ' ' buf = StringIO() buf.write(spaces) buf.write(self.key) buf.write('\n') for value in self.values: buf.write(spaces) buf.write(indent * ' ') buf.write(str(value)) buf.write('\n') return buf.getvalue().strip() def resolve(self, system, class_name): return self.values.resolve(system, class_name) check.register_class(recipe_value, include_seq=False)
def test_check_seq(self): class orange(object): pass C.register_class(orange, 'orange') C.check_orange_seq([ orange(), orange() ])
#!/usr/bin/env python #-*- coding:utf-8; mode:python; indent-tabs-mode: nil; c-basic-offset: 2; tab-width: 2 -*- from bes.common import check from .step_registry import step_registry from collections import namedtuple class step_description(namedtuple('step_description', 'step_class,args')): def __new__(clazz, step_class, args = None): args = args or {} check.check_dict(args) return clazz.__bases__[0].__new__(clazz, step_class, args) def __str__(self): return '%s:%s' % (str(self.step_class), str(self.args)) def __eq__(self, other): return self.__dict__ == other.__dict__ @classmethod def parse_description(self, text): check.check_string(text) step_class = step_registry.get(text) if not step_class: raise RuntimeError('no such step class: %s' % (text)) return step_description(step_class) check.register_class(step_description)
def __str__(self): return self.to_string() def to_string(self, delimiter = '=', quote_value = False): buf = StringIO() buf.write(str(self.key)) buf.write(delimiter) value = str(self.value) if quote_value: value = string_util.quote_if_needed(value) buf.write(value) return buf.getvalue() def is_instance(self, key_type, value_type): 'Return True if the key and value types are instance of the key_type and value_type.' if not isinstance(self.key, key_type): return False if not isinstance(self.value, value_type): return False return True @classmethod def parse(clazz, text, delimiter = '='): key, actual_delimiter, value = text.partition(delimiter) if actual_delimiter != delimiter: raise ValueError('invalid key value: \"%s\"' % (text)) return clazz(key.strip(), value.strip()) check.register_class(key_value, include_seq = False)
self._requirement_managers = {} def _make_requirement_manager(self, build_target): self.log_i('_make_requirement_manager(build_target=%s)' % (build_target.build_path)) self._timer.start('_make_requirement_manager() for %s' % (str(build_target))) rm = requirement_manager() latest_versions = self.list_all_by_package_descriptor(build_target).latest_versions() for pkg_desc in latest_versions: rm.add_package(pkg_desc) self._timer.stop() return rm def get_requirement_manager(self, build_target): if not build_target.build_path in self._requirement_managers: self._requirement_managers[build_target.build_path] = self._make_requirement_manager(build_target) return self._requirement_managers[build_target.build_path] def resolve_deps(self, names, build_target, hardness, include_names): self.log_i('resolve_deps(names=%s, build_target=%s, hardness=%s, include_names=%s)' % (' '.join(names), build_target.build_path, ' '.join(hardness), include_names)) rm = self.get_requirement_manager(build_target) self.log_i('resolve_deps() requirements_manager dep_map=%s' % (rm.descriptor_map_to_string())) return rm.resolve_deps(names, build_target.system, hardness, include_names) def list_latest_versions(self, build_target): return self.list_all_by_descriptor(build_target).latest_versions() check.register_class(artifact_manager_base, name = 'artifact_manager', include_seq = False)
db_b = other set_a = set(db_a.checksum_dict()) set_b = set(db_b.checksum_dict()) in_a_only = set_a - set_b in_b_only = set_b - set_a in_both = set_a | set_b #delta_result = namedtuple('delta_result', 'common, conflicts, in_a_only, in_b_only') print('in_a_only: %s' % (str(in_a_only))) print('in_b_only: %s' % (str(in_b_only))) print('in_both: %s' % (str(in_both))) return None def dump(self): tt = text_table(data=self.entries()) print(str(tt)) def to_json(self): return json.dumps(self._db, indent=2) def find_by_checksum(self, checksum): for entry in self._db.itervalues(): if entry.checksum == checksum: return entry return None check.register_class(source_finder_db_base)
#-*- coding:utf-8; mode:python; indent-tabs-mode: nil; c-basic-offset: 2; tab-width: 2 -*- from bes.common import check from .value_file import value_file class value_dir(value_file): def __init__(self, origin=None, filename='', properties=None): 'Class to manage a recipe dir.' super(value_dir, self).__init__(origin=origin, filename=filename, properties=properties) check.register_class(value_dir, include_seq=True)
'revision': self.revision, 'epoch': self.epoch, 'system': self.system, 'level': self.level, 'arch': self.arch, 'distro': self.distro, 'distro_version': self.distro_version, 'requirements': util.requirements_to_string_list(self.requirements), 'properties': self.properties, 'files': self.files.to_simple_dict(), } def clone_with_filename(self, filename): l = list(self) l[1] = filename # remove format version which __init__() does not take l.pop(0) return self.__class__(*l) @cached_property def full_name(self): return self.make_full_name_str(self.name, self.build_version) @classmethod def make_full_name_str(clazz, name, version): return '%s%s%s' % (name, '-', str(version)) check.register_class(package_metadata, include_seq=False)
@classmethod #@abstractmethod def default_value(clazz, class_name): return None @classmethod #@abstractmethod def resolve(clazz, values, class_name): # FIXME return values[-1] def _update_tarball_path(self): assert self.substitutions temp_dir = self.substitute('${REBUILD_TEMP_DIR}') self._full_name = self.substitute('${REBUILD_PACKAGE_FULL_NAME}') self._resolved_where = self.substitute(self.where) tarball_filename = '%s.tar.gz' % (self._full_name) self._tarball = path.join(temp_dir, tarball_filename) def tarball(self): if not path.isfile(self._tarball): archiver.create(self._tarball, self._resolved_where, base_dir=self._full_name) assert path.isfile(self._tarball) return self._tarball check.register_class(value_source_dir, include_seq=False)
# def replace(self, replacements): # self.value = variable.substitute(self.value, replacements) @classmethod def _first_delimiter(clazz, s): 'Return which delimiter comes first in s (either : or =).' colon_index = s.find(clazz.COLON) equal_index = s.find(clazz.EQUAL) colon_found = colon_index >= 0 equal_found = equal_index >= 0 if colon_found and equal_found: if colon_index < equal_index: return clazz.COLON else: return clazz.EQUAL elif colon_found: return clazz.COLON elif equal_found: return clazz.EQUAL else: return None @classmethod def _parse_value(clazz, line, delimiter): v = line.text_no_comments.partition(delimiter) assert v[1] == delimiter return key_value(v[0].strip(), v[2].strip()) check.register_class(caca_entry, name='caca_pkg_config_entry')
for value in self: result.extend(value.sources(recipe_env)) return result @classmethod #@abstractmethod def resolve(clazz, values, class_name): 'Resolve a list of list values by flattening it down to a single list.' if not check.is_seq(values, clazz): raise TypeError('%s: should be a sequence of %s instead of %s - %s' % (values[0].origin, clazz, str(values), type(values))) value_type = clazz.value_type() result = clazz() for value in values: check.check(value, clazz) result.extend(value._values) result.remove_dups() return result @classmethod def _split_values_and_properties(clazz, l): values = [] properties = [] for v in l: if '=' in v: properties.append(v) else: values.append(v) return ( values, ' '.join(properties) ) check.register_class(value_list_base, include_seq = False)
'Return the names for all the descriptors.' return [ pd.name for pd in self ] def latest_versions(self): 'Return a list of only the lastest version of any package with multiple versions.' latest = {} for pd in self: if not pd.name in latest: latest[pd.name] = pd else: if pd.version > latest[pd.name].version: latest[pd.name] = pd result = package_descriptor_list(latest.values()) result.sort() return result @classmethod def resolve(clazz, what): if check.is_string(what): return clazz([ package_descriptor.parse(pkg_descs) ]) elif check.is_string_seq(what): return clazz([ package_descriptor.parse(p) for p in what ]) elif check.is_package_descriptor(what): return clazz([ what ]) elif check.is_package_descriptor_seq(what): return what else: raise TypeError('Cannot resolve to package descriptor list: %s - %s' % (str(what), type(what))) check.register_class(package_descriptor_list, include_seq = False)
version = self.version if author_name is not None: check.check_string(author_name) else: author_name = self.author_name if author_email is not None: check.check_string(author_email) else: author_email = self.author_email if address is not None: check.check_string(address) else: address = self.address if tag is not None: check.check_string(tag) else: tag = self.tag if timestamp is not None: check.check_string(timestamp) else: timestamp = self.timestamp return self.__class__(version, author_name, author_email, address, tag, timestamp) @classmethod def version_info_for_module(clazz, mod): check.check_module(mod) return clazz(mod.__version__, mod.__bes_author_name__, mod.__author__, mod.__bes_address__, mod.__bes_tag__, mod.__bes_timestamp__) check.register_class(version_info)
#-*- coding:utf-8; mode:python; indent-tabs-mode: nil; c-basic-offset: 2; tab-width: 2 -*- from bes.common import check class build_level(object): DEBUG = 'debug' RELEASE = 'release' LEVELS = [DEBUG, RELEASE] @classmethod def level_is_valid(clazz, build_level): return build_level in clazz.LEVELS @classmethod def parse_level(clazz, s): slower = s.lower() if not slower in clazz.LEVELS: raise ValueError('Invalid build_level: %s' % (s)) return slower check.register_class(build_level)
def __init__(self): self._finders = [] def __str__(self): return ', '.join([str(f) for f in self._finders]) def __len__(self): return len(self._finders) def add_finder(self, finder): check.check(finder, source_finder) assert finder not in self._finders self._finders.append(finder) def find_tarball(self, filename): for finder in self._finders: result = finder.find_tarball(filename) if result: return result return None def ensure_source(self, filename): assert path.isabs(filename) for finder in self._finders: if finder.ensure_source(filename): return True return False check.register_class(source_finder_chain, include_seq=False)
current.reload(root_dir = root_dir) return self == current def has_filename(self, filename): current = self[:] current.reload(root_dir = root_dir) return self == current def has_filename(self, filename): current = self[:] current.reload(root_dir = root_dir) return self == current def checksum(self): 'Return a checksum of the files and file checksums themselves.' buf = StringIO() for value in self: buf.write(value.filename) buf.write(value.checksum) return hashlib.sha256(buf.getvalue().encode('utf-8')).hexdigest() def to_dict(self): 'Return a dictionary of filenames to checksums.' result = {} for value in self: result[value.filename] = value.checksum return result check.register_class(file_checksum, include_seq = False) check.register_class(file_checksum_list, include_seq = False)
def __eq__(self, other): if isinstance(other, self.__class__): return self._steps == other._steps elif isinstance(other, list): return self._steps == other else: raise TypeError( 'other should be of recipe_step_list type instead of %s' % (type(other))) def __str__(self): buf = StringIO() for step in self._steps: buf.write(str(step)) buf.write('\n') return buf.getvalue().strip() def append(self, step): check.check_recipe_step(step) self._steps.append(step) def extend(self, steps): for step in steps: self.append(step) def __len__(self): return len(self._steps) check.register_class(recipe_step_list, include_seq=False)
#@abstractmethod def get_source_finder(self): return self._builder_env.source_finder class testing_recipe_load_env(recipe_load_env_base): def __init__(self, build_target = None, downloads_manager = None, source_finder = None): build_target = build_target or BT.make_host_build_target() check.check_build_target(build_target) self._build_target = build_target if downloads_manager: check.check_git_download_cache(downloads_manager) self._downloads_manager = downloads_manager if source_finder: check.check_source_finder_chain(source_finder) self._source_finder = source_finder #@abstractmethod def get_build_target(self): return self._build_target #@abstractmethod def get_downloads_manager(self): return self._downloads_manager #@abstractmethod def get_source_finder(self): return self._source_finder check.register_class(recipe_load_env_base, include_seq = False)
if self.mask: return self._to_string_with_mask(depth, indent, quote) else: return self._to_string_no_mask(depth, indent, quote) def value_to_string(self, quote=True): return self.value.value_to_string(quote) def _to_string_no_mask(self, depth, indent, quote): spaces = depth * indent * ' ' buf = StringIO() buf.write(spaces) buf.write(self.value_to_string(quote=quote)) return buf.getvalue() def _to_string_with_mask(self, depth, indent, quote): spaces = depth * indent * ' ' buf = StringIO() buf.write(spaces) buf.write(self.mask) buf.write(value_parsing.MASK_DELIMITER) buf.write(' ') buf.write(self.value_to_string(quote=quote)) return buf.getvalue() def mask_matches(self, system): return build_system.mask_matches(self.mask or 'all', system) check.register_class(masked_value, include_seq=False)
if not text: values = string_list() else: values = string_list.parse(text, options = string_list.KEEP_QUOTES) return clazz(origin = origin, values = values) @classmethod #@abstractmethod def default_value(clazz, class_name): return string_list() @classmethod #@abstractmethod def resolve(clazz, values, class_name): 'Resolve a list of values if this type into a nice dictionary.' if class_name != value_type.STRING_LIST: values_string = [ str(x) for x in values ] print('WARNING: %s: class_name should be %s instead of %s for %s' % (values[0].origin, value_type.value_to_name(value_type.STRING_LIST), value_type.value_to_name(class_name), values_string)) assert False return clazz.default_value(class_name) #raise TypeError('class_name should be %s instead of %d' % (value_type.STRING_LIST, class_name)) result = string_list() for value in values: check.check_value_string_list(value) result.extend(value.values) result.remove_dups() return result check.register_class(value_string_list, include_seq = False)
return None except Exception as ex: raise @cached_property def git_tracked(self): 'Return True if the file is tracked by the git repo.' if not self.git_root: return False return git.is_tracked(self.git_root, self.filename) @cached_property def inspection(self): 'Return the git root for this file or None if not within a git repo.' try: return unit_test_inspect.inspect_file(self.filename) except SyntaxError as ex: #printer.writeln('Failed to inspect: %s - %s' % (f, str(ex))) print('syntax error inspecting: %s - %s' % (self.filename, str(ex))) raise except Exception as ex: #printer.writeln('Failed to inspect: %s - %s:%s' % (f, type(ex), str(ex))) print('2 Failed to inspect: %s - %s:%s' % (self.filename, type(ex), str(ex))) raise @property def is_broken_link(self): return file_util.is_broken_link(self.filename) check.register_class(file_info, include_seq = False)
return path.exists(tarball_path) def get_tarball(self, address, revision): 'Return the local filesystem path to the tarball with address and revision.' local_address_path = self.path_for_address(address) tarball_filename = '%s.tar.gz' % (revision) tarball_path = path.join(local_address_path, tarball_filename) if path.exists(tarball_path): return tarball_path tmp_dir = temp_file.make_temp_dir() if path.isdir(address): name = path.basename(address) else: name = git_util.name_from_address(address) tmp_full_path = path.join(tmp_dir, tarball_filename) git.download_tarball(name, revision, address, tmp_full_path) file_util.rename(tmp_full_path, tarball_path) return tarball_path def path_for_address(self, address): 'Return path for local tarball.' return path.join(self.root_dir, git_util.sanitize_address(address)) def tarball_path(self, address, revision): 'Return True if the tarball with address and revision is in the cache.' local_address_path = self.path_for_address(address) tarball_filename = '%s.tar.gz' % (revision) return path.join(local_address_path, tarball_filename) check.register_class(git_download_cache, include_seq = False)
l[4] = hardness return self.__class__(*l) def clone_replace_system_mask(self, system_mask): l = list(self) l[3] = system_mask return self.__class__(*l) def system_mask_matches(self, system): 'Resolve requirements for the given system.' if not build_system.system_is_valid(system): raise ValueError('invalid system: %s - %s' % (str(system), type(system))) self_system_mask = self.system_mask or build_system.ALL return build_system.mask_matches(self_system_mask, system) def hardness_matches(self, hardness): 'Return True if hardness matches.' hardness = object_util.listify(hardness) if not requirement_hardness.is_valid_seq(hardness): raise ValueError('invalid hardness: %s - %s' % (str(hardness), type(hardness))) self_hardness = self.hardness or requirement_hardness.DEFAULT for h in hardness: if self_hardness == requirement_hardness(h): return True return False check.register_class(requirement)
arch = parts[1] level = parts[2] system, distro, distro_version = clazz._parse_system(system) arch = build_arch.split(arch, delimiter='-') return clazz(system, distro, distro_version, arch, level) @classmethod def _parse_system(clazz, s): parts = s.split('-') if len(parts) < 1: raise ValueError('Invalid system: %s' % (s)) distro = '' distro_version = '' system = parts.pop(0) if len(parts) == 1: distro_version = parts[0] elif len(parts) == 2: distro = parts[0] distro_version = parts[1] elif len(parts) > 2: raise ValueError('Invalid system: %s' % (s)) return system, distro, distro_version @classmethod def make_host_build_target(clazz, level=build_level.RELEASE): return clazz(host.SYSTEM, host.DISTRO, host.VERSION, (host.ARCH, ), level) check.register_class(build_target)
if config: if check.is_masked_value_list(config): resolved = config.resolve(system, value_type.STRING_LIST) else: raise RuntimeError('not a valid masked_value_list: %s' % (config)) else: resolved = [] requirements = script.descriptor.requirements.filter_by_hardness(['RUN', 'BUILD']).filter_by_system(system) deps_names = requirements.names() export_names = resolved if export_names == dependency_resolver.ALL_DEPS: export_names = deps_names delta = (set(export_names) - set(deps_names)) if delta: raise RuntimeError('Trying to export deps that are not specified by %s: %s' % (script.descriptor.name, ' '.join(delta))) return export_names @classmethod def _env_find_roque_dollar_sign(clazz, env): for key in sorted(env.keys()): if variable.has_rogue_dollar_signs(env[key]): return key return None @classmethod def _env_substitite(clazz, env): for key in sorted(env.keys()): env[key] = variable.substitute(str(env[key]), env) check.register_class(step, include_seq = False)
def test_register_class_duplicate(self): class bar(object): pass C.register_class(bar, 'bar') with self.assertRaises(RuntimeError) as context: C.register_class(bar, 'bar')
return clazz.__bases__[0].__new__(clazz, files, env_files, files_checksum, env_files_checksum) @classmethod def parse_dict(clazz, o): return clazz(file_checksum_list.from_simple_list(o['files']), file_checksum_list.from_simple_list(o['env_files']), o['files_checksum'], o['env_files_checksum']) def to_simple_dict(self): 'Return a simplified dict suitable for json encoding.' return { 'files': self.files.to_simple_list(), 'env_files': self.env_files.to_simple_list(), 'files_checksum': self.files_checksum, 'env_files_checksum': self.env_files_checksum, } def to_sql_dict(self): 'Return a dict suitable to use directly with sqlite insert commands' d = { 'files_checksum': util.sql_encode_string(self.files_checksum), 'env_files_checksum': util.sql_encode_string(self.env_files_checksum), } return d check.register_class(package_files, include_seq=False)
def test_is_seq(self): class kiwi(object): pass C.register_class(kiwi, 'kiwi') self.assertTrue( C.is_kiwi_seq([ kiwi(), kiwi() ]) ) self.assertFalse( C.is_kiwi_seq([ kiwi(), int(6) ]) )
@cached_property def full_name(self): return self.make_full_name_str(self.name, self.build_version) @classmethod def make_full_name_str(clazz, name, version): return '%s%s%s' % (name, '-', str(version)) def clone_with_mutation(self, field, value): i = self._fields.index(field) l = list(self) l[i] = value return self.__class__(*l) def clone_with_mutations(self, mutations): l = list(self) for field, value in mutations.items(): i = self._fields.index(field) l[i] = value return self.__class__(*l) @classmethod def parse(clazz, s): parts = s.split(';') if len(parts) != 9: raise ValueError('Invalid artifact descriptor: %s' % (s)) return clazz(*parts) check.register_class(artifact_descriptor, include_seq=False)
@property def stripped_text(self): return self.text.strip() @classmethod def merge(clazz, lines): 'Merge a sequence of lines into one. Continuation flags are cleared' buf = StringIO() for line in lines: text = string_util.remove_tail(line.text, clazz.CONTINUATION_CHAR) buf.write(text) return clazz(lines[0].line_number, buf.getvalue()) def get_text(self, strip_comments = False, strip_text = False): if strip_comments: text = self.text_no_comments else: text = self.text if strip_text: text = text.strip() return text def text_is_empty(self): return self.get_text(strip_text = True) == '' def clone_stripped(self): return line_token(self.line_number, self.text.strip()) check.register_class(line_token)
def test_is(self): class baz(object): pass C.register_class(baz, 'baz') self.assertTrue( C.is_baz(baz()) ) self.assertFalse( C.is_baz(int(6)) )
check.check_string(filename) if fixture is not None: check.check_string(fixture) if function is not None: check.check_string(function) return clazz.__bases__[0].__new__(clazz, filename, fixture, function) @classmethod def parse(clazz, s): 'Parse a unit test description in the form filename:fixutre.function' filename, _, right = s.partition(':') if '.' in right: fixture, _, function = right.partition('.') else: fixture, function = ( None, right ) return clazz(filename or None, fixture or None, function or None) def __str__(self): v = [] if self.filename: v.append(self.filename) v.append('.') if self.fixture: v.append(self.fixture) v.append(':') if self.function: v.append(self.function) return ''.join(v) check.register_class(unit_test_description)
def test_is_seq_not_registered(self): class apple(object): pass C.register_class(apple, 'apple', include_seq = False) with self.assertRaises(AttributeError) as context: self.assertTrue( C.is_apple_seq([ apple(), apple() ]) )
return clazz(o['name'], o['version'], o['revision'], o['epoch'], util.requirements_from_string_list(o['requirements']), o['properties'], package_files.parse_dict(o['files'])) @classmethod def _parse_requirements(clazz, l): check.check_string_seq(l) reqs = requirement_list() for n in l: reqs.extend(requirement_list.parse(n)) return reqs def to_simple_dict(self): 'Return a simplified dict suitable for json encoding.' return { '_format_version': self.format_version, 'name': self.name, 'version': self.version, 'revision': self.revision, 'epoch': self.epoch, 'requirements': util.requirements_to_string_list(self.requirements), 'properties': self.properties, 'files': self.files.to_simple_dict(), } check.register_class(package_db_entry, include_seq = False)
def test_check_seq(self): class potato(object): pass C.register_class(potato, 'potato', include_seq = False) with self.assertRaises(AttributeError) as context: C.check_potato_seq([ potato(), potato() ])
@classmethod def _resolve_file_list(clazz, values): result_values = [] for value in values: check.check_value_file(value) result_values.append(value) result = value_file_list(values=result_values) result.remove_dups() return result @classmethod def _resolve_file(clazz, values): if not values: return None return values[-1] check.register_class(value_file, include_seq=True) class value_file_list(value_list_base): __value_type__ = value_file def __init__(self, origin=None, values=None): super(value_file_list, self).__init__(origin=origin, values=values) check.register_class(value_file_list, include_seq=False)
content_type = None content_hash = None if 'contents' in d: contents = [ clazz.parse_dict(item) for item in d['contents'] ] else: contents = None assert 'folderid' in d pcloud_id = d['folderid'] else: size = d['size'] category = d['category'] content_type = d['contenttype'] content_hash = d['hash'] contents = None assert 'fileid' in d pcloud_id = d['fileid'] return clazz(name, pcloud_id, is_folder, size, category, content_type, content_hash, contents, None) def mutate_checksum(self, checksum): return self.__class__(self.name, self.pcloud_id, self.is_folder, self.size, self.category, self.content_type, self.content_hash, self.contents, checksum) def mutate_contents(self, contents): return self.__class__(self.name, self.pcloud_id, self.is_folder, self.size, self.category, self.content_type, self.content_hash, contents, self.checksum) check.register_class(pcloud_metadata)
'Return a list of sources this caca provides or None if no sources.' assert False @classmethod @abstractmethod def parse(clazz, origin, text, node): 'Parse a value.' assert False @classmethod @abstractmethod def default_value(clazz, class_name): 'Return the default value to use for this class.' assert False @classmethod @abstractmethod def resolve(clazz, values, class_name): 'Resolve a list of values if this type into a nice dictionary.' assert False def _append_properties_string(self, buf, include_properties): if include_properties: ps = self.properties_to_string() if ps: buf.write(' ') buf.write(ps) check.register_class(value_base)
#@abstractmethod def resolve(clazz, values, class_name): check.check_value_hook_seq(values) assert class_name == value_type.HOOK_LIST result_hooks = [] for value in values: check.check_value_hook(value) result_hooks.append(value) result = value_hook_list(values=result_hooks) result.remove_dups() return result @abstractmethod def execute(self, script, env): 'Execute the hook. Same semantics as step.execute.' pass check.register_class(value_hook, include_seq=True) class value_hook_list(value_list_base): __value_type__ = value_hook def __init__(self, origin=None, values=None): super(value_hook_list, self).__init__(origin=origin, values=values) check.register_class(value_hook_list, include_seq=False)
check.check_node(node) values = key_value_list.parse(value, options = key_value_list.KEEP_QUOTES) return clazz(origin = origin, values = values) @classmethod #@abstractmethod def default_value(clazz, class_name): return key_value_list() @classmethod #@abstractmethod def resolve(clazz, values, class_name): 'Resolve a list of values if this type into a nice dictionary.' result = key_value_list() seen = {} for value in values: check.check_value_key_values(value) check.check_key_value_list(value.values) for next_kv in value.values: check.check_key_value(next_kv) i = len(result) seen_i = seen.get(next_kv.key, None) if seen_i is not None: result[seen_i] = next_kv else: result.append(next_kv) seen[next_kv.key] = i return result check.register_class(value_key_values, include_seq = False)
def filter_by_hardness(self, hardness): 'Return only the requirements that match hardness.' return requirement_list( [req for req in self if req.hardness_matches(hardness)]) def filter_by_system(self, system): 'Return only the requirements that match system.' return requirement_list( [req for req in self if req.system_mask_matches(system)]) def filter_by(self, hardness, system): 'Return only the requirements that match system.' return requirement_list([ req for req in self if req.hardness_matches(hardness) and req.system_mask_matches(system) ]) def names(self): 'Return only the requirements that match system.' return [req.name for req in self] @staticmethod def _check_cast_func(clazz, obj): return clazz([x for x in obj]) check.register_class(requirement_list, include_seq=False, cast_func=requirement_list._check_cast_func)
#@abstractmethod def sources(self, recipe_env): 'Return a list of sources this caca provides or None if no sources.' return [] #@abstractmethod def substitutions_changed(self): self.value = self.substitute(self.value) @classmethod #@abstractmethod def parse(clazz, origin, text, node): if origin: check.check_value_origin(origin) check.check_node(node) return clazz(origin = origin, value = text) @classmethod #@abstractmethod def default_value(clazz, class_name): return None @classmethod #@abstractmethod def resolve(clazz, values, class_name): 'Resolve a list of values if this type into a nice dictionary.' assert class_name == value_type.STRING return values[-1].value check.register_class(value_string, include_seq = True)