def main(): parser = argparse.ArgumentParser() parser.add_argument('--manifest', help='Path to the SDK\'s manifest file', required=True) parser.add_argument('--meta', help='Path to output metadata file', required=True) parser.add_argument('--target-arch', help='Architecture of precompiled target atoms', required=True) parser.add_argument('--host-arch', help='Architecture of host tools', required=True) args = parser.parse_args() with open(args.manifest, 'r') as manifest_file: manifest = json.load(manifest_file) atoms = [Atom(a) for a in manifest['atoms']] meta = { 'arch': { 'host': args.host_arch, 'target': [ args.target_arch, ], }, 'parts': sorted(filter(lambda m: m, [a.metadata for a in atoms])), } with open(args.meta, 'w') as meta_file: json.dump(meta, meta_file, indent=2, sort_keys=True)
def main(): parser = argparse.ArgumentParser() parser.add_argument( '--manifest', help='Path to the SDK\'s manifest file', required=True) parser.add_argument( '--mapping', help='Extra files to add to the archive', action=MappingAction) parser.add_argument( '--output', help='Path to the output file manifest', required=True) args = parser.parse_args() with open(args.manifest, 'r') as manifest_file: manifest = json.load(manifest_file) all_files = {} def add(dest, src): if dest in all_files: print('Error: multiple entries for %s' % dest) print(' - %s' % all_files[dest]) print(' - %s' % src) return 1 all_files[dest] = src for atom in [Atom(a) for a in manifest['atoms']]: for file in atom.files: add(file.destination, file.source) for dest, source in args.mappings.items(): add(dest, source) with open(args.output, 'w') as output_file: for mapping in sorted(all_files.items()): output_file.write('%s=%s\n' % mapping)
def _process_manifest_data(manifest, builder): '''For testing.''' atoms = [Atom(a) for a in manifest['atoms']] builder.metadata = Metadata(manifest['meta']) # Verify that the manifest only contains supported domains. extra_domains = set( filter(lambda d: d not in builder.domains, [a.id.domain for a in atoms])) extra_domains = extra_domains - set(builder.ignored_domains) if extra_domains: print('The following domains are not currently supported: %s' % ', '.join(extra_domains)) return False builder.prepare() # Pass the various atoms through the builder. for atom in atoms: domain = atom.id.domain if domain in builder.domains: getattr(builder, 'install_%s_atom' % domain)(atom) # Wrap things up. builder.finalize() return True
def main(): parser = argparse.ArgumentParser() parser.add_argument('--id', help='The atom\'s identifier', required=True) parser.add_argument('--out', help='Path to the output file', required=True) parser.add_argument('--deps', help='List of manifest paths for dependencies', nargs='*') parser.add_argument('--file', help='A (destination <-- source) mapping', action='append', nargs=2) parser.add_argument('--gn-label', help='GN label of the atom', required=True) parser.add_argument('--category', help='Publication level', required=True) parser.add_argument('--meta', help="Path to the atom's metadata file in the SDK", default='', required=False) args = parser.parse_args() # Gather the definitions of other atoms this atom depends on. (deps, atoms) = gather_dependencies(args.deps) # Build the list of files making up this atom. files = [] for destination, source in args.file: files.append({ 'source': source, 'destination': destination, }) atoms.update([ Atom({ 'id': args.id, 'meta': args.meta, 'gn-label': args.gn_label, 'category': args.category, 'deps': sorted(list(deps)), 'files': files, }) ]) if detect_collisions(atoms): print('Name collisions detected!') return 1 if detect_category_violations(args.category, atoms): print('Publication level violations detected!') return 1 manifest = { 'ids': [args.id], 'atoms': map(lambda a: a.json, sorted(list(atoms))), } with open(os.path.abspath(args.out), 'w') as out: json.dump(manifest, out, indent=2, sort_keys=True)
def _atom(name, category): return Atom({ 'id': { 'domain': 'foo', 'name': name, }, 'category': category, 'gn-label': '//hello', 'deps': [], 'package-deps': [], 'files': [], 'tags': [], })
def main(): parser = argparse.ArgumentParser() parser.add_argument('--manifest', help='Path to the SDK\'s manifest file', required=True) parser.add_argument('--meta', help='Path to output metadata file', required=True) parser.add_argument('--target-arch', help='Architecture of precompiled target atoms', required=True) parser.add_argument('--host-arch', help='Architecture of host tools', required=True) parser.add_argument('--id', help='Opaque identifier for the SDK', default='') parser.add_argument('--schema-version', help='Opaque identifier for the metadata schemas', required=True) args = parser.parse_args() with open(args.manifest, 'r') as manifest_file: manifest = json.load(manifest_file) atoms = [Atom(a) for a in manifest['atoms']] meta = { 'arch': { 'host': args.host_arch, 'target': [ args.target_arch, ], }, 'id': args.id, 'parts': sorted([{ 'meta': a.metadata, 'type': a.type, } for a in atoms]), 'schema_version': args.schema_version, } with open(args.meta, 'w') as meta_file: json.dump(meta, meta_file, indent=2, sort_keys=True, separators=(',', ': '))
def _atom(name, category): return Atom({ 'id': { 'domain': 'foo', 'name': name, }, 'meta': { 'source': 'foo', 'dest': 'bar', }, 'category': category, 'gn-label': '//hello', 'deps': [], 'package-deps': [], 'files': [], 'tags': [], 'type': 'schema.json' })
def main(): parser = argparse.ArgumentParser() parser.add_argument('--manifest', help='Path to the SDK\'s manifest file', required=True) parser.add_argument('--meta', help='Path to SDK metadata file', required=True) parser.add_argument('--output', help='Path to the output file manifest', required=True) args = parser.parse_args() with open(args.manifest, 'r') as manifest_file: manifest = json.load(manifest_file) all_files = {} def add(dest, src): if dest in all_files: print('Error: multiple entries for %s' % dest) print(' - %s' % all_files[dest]) print(' - %s' % src) return 1 all_files[dest] = src for atom in [Atom(a) for a in manifest['atoms']]: # TODO(DX-340): remove this once destination paths are made relative to # the SDK root. parsed_id = urlparse(atom.identifier) base = parsed_id.netloc + parsed_id.path for file in atom.files: add(os.path.join(base, file.destination), file.source) add('meta/manifest.json', args.meta) with open(args.output, 'w') as output_file: for mapping in sorted(all_files.iteritems()): output_file.write('%s=%s\n' % mapping)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--id', help='The atom\'s identifier', required=True) parser.add_argument('--out', help='Path to the output file', required=True) parser.add_argument('--depfile', help='Path to the depfile', required=True) parser.add_argument( '--deps', help='List of manifest paths for dependencies', nargs='*') parser.add_argument( '--file', help='A (destination <-- source) mapping', action='append', nargs=2) parser.add_argument( '--file-list', help='A file containing destination=source mappings') parser.add_argument( '--gn-label', help='GN label of the atom', required=True) parser.add_argument('--category', help='Publication level', required=True) parser.add_argument( '--meta', help='Path to the atom\'s metadata file in the SDK', required=True) parser.add_argument('--type', help='Type of the atom', required=True) args = parser.parse_args() # Gather the definitions of other atoms this atom depends on. (deps, atoms) = gather_dependencies(args.deps) # Build the list of files making up this atom. extra_files = [] if args.file_list: with open(args.file_list, 'r') as file_list_file: extra_files = [ line.strip().split('=', 1) for line in file_list_file.readlines() ] files = dict(itertools.chain(args.file, extra_files)) atoms.update( [ Atom( { 'id': args.id, 'meta': args.meta, 'gn-label': args.gn_label, 'category': args.category, 'deps': sorted(list(deps)), 'files': [ { 'source': source, 'destination': destination } for destination, source in files.items() ], 'type': args.type, }) ]) if detect_collisions(atoms): print('Name collisions detected!') return 1 if detect_category_violations(args.category, atoms): print('Publication level violations detected!') return 1 manifest = { 'ids': [args.id], 'atoms': [a.json for a in sorted(list(atoms))], } with open(os.path.abspath(args.out), 'w') as out: json.dump( manifest, out, indent=2, sort_keys=True, separators=(',', ': ')) with open(args.depfile, 'w') as dep_file: dep_file.write(args.out + ': ') for destination, source in extra_files: dep_file.write(source + ' ')
def main(): parser = argparse.ArgumentParser() parser.add_argument('--id', help='The atom\'s identifier', required=True) parser.add_argument('--domain', help='Name of the domain the element belongs to', required=True) name_group = parser.add_mutually_exclusive_group(required=True) name_group.add_argument('--name', help='Name of the element') name_group.add_argument('--name-file', help='Path to the file containing the name of the element') parser.add_argument('--out', help='Path to the output file', required=True) parser.add_argument('--base', help='Path to the element\'s source directory', required=True) parser.add_argument('--deps', help='List of manifest paths for dependencies', nargs='*') parser.add_argument('--package-deps', help='List of manifest paths for runtime dependencies', nargs='*') parser.add_argument('--files', help='A source=destination mapping', nargs='*') parser.add_argument('--tags', help='List of tags for the included elements', nargs='*') parser.add_argument('--tags-file', help='A file containing tags', required=False) parser.add_argument('--gn-label', help='GN label of the atom', required=True) parser.add_argument('--category', help='Publication level', required=True) parser.add_argument('--meta', help="Path to the atom's metadata file in the SDK", default='', required=False) args = parser.parse_args() if args.name: name = args.name else: with open(args.name_file, 'r') as name_file: name = name_file.read() # Gather the definitions of other atoms this atom depends on. (deps, atoms) = gather_dependencies(args.deps) (_, package_atoms) = gather_dependencies(args.package_deps) all_atoms = atoms all_atoms.update(package_atoms) # Build the list of files making up this atom. files = [] has_packaged_files = False base = os.path.realpath(args.base) for mapping in args.files: mode, pair = mapping.split(':', 1) is_packaged = (mode == 'packaged') destination, source = pair.split('=', 1) real_source = os.path.realpath(source) if not os.path.exists(real_source): raise Exception('Missing source file: %s' % real_source) if destination: if destination.find('..') != -1: raise Exception('Destination for %s cannot contain "..": %s.' % (source, destination)) else: if not real_source.startswith(base): raise Exception('Destination for %s must be given as it is not' ' under source directory %s' % (source, base)) destination = os.path.relpath(real_source, base) if os.path.isabs(destination): raise Exception('Destination cannot be absolute: %s' % destination) files.append({ 'source': real_source, 'destination': destination, 'packaged': is_packaged }) has_packaged_files = has_packaged_files or is_packaged id = { 'domain': args.domain, 'name': name, } all_package_deps = set() if has_packaged_files: all_package_deps.add(AtomId(id)) for atom in all_atoms: all_package_deps.update(atom.package_deps) tags = dict(map(lambda t: t.split(':', 1), args.tags)) if args.tags_file: with open(args.tags_file, 'r') as tags_file: data = json.load(tags_file) assert isinstance(data, dict) tags.update(data) tags['domain'] = args.domain all_atoms.update([Atom({ 'id': id, # TODO(DX-340): rename this to "id" once domain/name are gone. 'identifier': args.id, 'meta': args.meta, 'gn-label': args.gn_label, 'category': args.category, 'tags': tags, 'deps': map(lambda i: i.json, sorted(list(deps))), 'package-deps': map(lambda i: i.json, sorted(list(all_package_deps))), 'files': files, })]) if detect_collisions(all_atoms): print('Name collisions detected!') return 1 if detect_category_violations(args.category, all_atoms): print('Publication level violations detected!') return 1 manifest = { 'ids': [id], 'atoms': map(lambda a: a.json, sorted(list(all_atoms))), } with open(os.path.abspath(args.out), 'w') as out: json.dump(manifest, out, indent=2, sort_keys=True)
def main(): parser = argparse.ArgumentParser() parser.add_argument('--id', help='The atom\'s identifier', required=True) parser.add_argument('--out', help='Path to the output file', required=True) parser.add_argument('--depfile', help='Path to the depfile', required=True) parser.add_argument( '--deps', help='List of manifest paths for dependencies', nargs='*') parser.add_argument( '--file', help='A (destination <-- source) mapping', action='append', nargs=2) parser.add_argument( '--file-list', help='A file containing destination=source mappings') parser.add_argument( '--gn-label', help='GN label of the atom', required=True) parser.add_argument('--category', help='Publication level', required=True) parser.add_argument( '--meta', help= 'Path to the atom\'s metadata file in the SDK. Required by default unless --noop-atom is set to True.' ) parser.add_argument( '--noop-atom', action='store_true', help= 'Whether the atom is a sdk_noop_atom. Sets the atom\'s meta to be empty. Defaults to False.' ) parser.add_argument('--type', help='Type of the atom', required=True) parser.add_argument('--plasa', help='Path to the plasa metadata, optional') args = parser.parse_args() if args.meta is None and not args.noop_atom: parser.error("--meta is required.") # Gather the definitions of other atoms this atom depends on. (deps, atoms) = gather_dependencies(args.deps) # Build the list of files making up this atom. extra_files = [] if args.file_list: with open(args.file_list, 'r') as file_list_file: extra_files = [ line.strip().split('=', 1) for line in file_list_file.readlines() ] files = dict(itertools.chain( args.file, extra_files)) if args.file else dict(extra_files) plasa_fragments = [] if args.plasa: with open(args.plasa, 'r') as plasa_file: data = json.load(plasa_file) for d in data: files[d['dest']] = d['path'] plasa_fragments += [d['dest']] atoms.update( [ Atom( { 'id': args.id, 'meta': args.meta or '', 'gn-label': args.gn_label, 'category': args.category, 'deps': sorted(list(deps)), 'files': [ { 'source': os.path.normpath(source), 'destination': os.path.normpath(destination) } for destination, source in files.items() ], 'type': args.type, 'plasa': plasa_fragments, }) ]) if detect_collisions(atoms): print('Name collisions detected!') return 1 if detect_category_violations(args.category, atoms): print('Publication level violations detected!') return 1 manifest = { 'ids': [args.id], 'atoms': [a.json for a in sorted(list(atoms))], } with open(os.path.abspath(args.out), 'w') as out: json.dump( manifest, out, indent=2, sort_keys=True, separators=(',', ': ')) with open(args.depfile, 'w') as dep_file: dep_file.write( '{}: {}\n'.format( args.out, # Always write relative paths to depfiles. See more information # from https://fxbug.dev/75451. ' '.join(os.path.relpath(source) for _, source in extra_files), ), )